ddeutil-workflow 0.0.54__py3-none-any.whl → 0.0.56__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +4 -2
- ddeutil/workflow/__main__.py +30 -0
- ddeutil/workflow/api/__init__.py +170 -1
- ddeutil/workflow/api/routes/job.py +22 -21
- ddeutil/workflow/api/routes/schedules.py +0 -2
- ddeutil/workflow/api/routes/workflows.py +3 -4
- ddeutil/workflow/conf.py +144 -94
- ddeutil/workflow/{cron.py → event.py} +36 -20
- ddeutil/workflow/exceptions.py +10 -1
- ddeutil/workflow/job.py +23 -14
- ddeutil/workflow/result.py +1 -0
- ddeutil/workflow/scheduler.py +33 -74
- ddeutil/workflow/stages.py +169 -116
- ddeutil/workflow/workflow.py +57 -106
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/METADATA +5 -7
- ddeutil_workflow-0.0.56.dist-info/RECORD +31 -0
- ddeutil_workflow-0.0.56.dist-info/entry_points.txt +2 -0
- ddeutil/workflow/api/api.py +0 -170
- ddeutil_workflow-0.0.54.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/top_level.txt +0 -0
ddeutil/workflow/workflow.py
CHANGED
@@ -28,7 +28,7 @@ from pathlib import Path
|
|
28
28
|
from queue import Queue
|
29
29
|
from textwrap import dedent
|
30
30
|
from threading import Event
|
31
|
-
from typing import Optional
|
31
|
+
from typing import Any, Optional, Union
|
32
32
|
|
33
33
|
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
|
34
34
|
from pydantic.dataclasses import dataclass
|
@@ -37,8 +37,8 @@ from typing_extensions import Self
|
|
37
37
|
|
38
38
|
from .__cron import CronJob, CronRunner
|
39
39
|
from .__types import DictData, TupleStr
|
40
|
-
from .conf import
|
41
|
-
from .
|
40
|
+
from .conf import FileLoad, Loader, dynamic
|
41
|
+
from .event import On
|
42
42
|
from .exceptions import JobException, UtilException, WorkflowException
|
43
43
|
from .job import Job
|
44
44
|
from .logs import Audit, get_audit
|
@@ -144,7 +144,7 @@ class Release:
|
|
144
144
|
return NotImplemented
|
145
145
|
|
146
146
|
def __lt__(self, other: Release | datetime) -> bool:
|
147
|
-
"""Override
|
147
|
+
"""Override less-than property that will compare only the same type or
|
148
148
|
datetime.
|
149
149
|
|
150
150
|
:rtype: bool
|
@@ -166,12 +166,13 @@ class ReleaseQueue:
|
|
166
166
|
extras: DictData = Field(
|
167
167
|
default_factory=dict,
|
168
168
|
description="An extra parameters that want to override config values.",
|
169
|
+
repr=False,
|
169
170
|
)
|
170
171
|
|
171
172
|
@classmethod
|
172
173
|
def from_list(
|
173
174
|
cls,
|
174
|
-
queue: list[datetime]
|
175
|
+
queue: Optional[Union[list[datetime], list[Release]]] = None,
|
175
176
|
extras: Optional[DictData] = None,
|
176
177
|
) -> Self:
|
177
178
|
"""Construct ReleaseQueue object from an input queue value that passing
|
@@ -189,7 +190,6 @@ class ReleaseQueue:
|
|
189
190
|
return cls()
|
190
191
|
|
191
192
|
if isinstance(queue, list):
|
192
|
-
|
193
193
|
if all(isinstance(q, datetime) for q in queue):
|
194
194
|
return cls(
|
195
195
|
queue=[
|
@@ -213,15 +213,6 @@ class ReleaseQueue:
|
|
213
213
|
"""
|
214
214
|
return len(self.queue) > 0
|
215
215
|
|
216
|
-
@property
|
217
|
-
def first_queue(self) -> Release:
|
218
|
-
"""Check an input Release object is the first value of the
|
219
|
-
waiting queue.
|
220
|
-
|
221
|
-
:rtype: Release
|
222
|
-
"""
|
223
|
-
return self.queue[0]
|
224
|
-
|
225
216
|
def check_queue(self, value: Release | datetime) -> bool:
|
226
217
|
"""Check a Release value already exists in list of tracking
|
227
218
|
queues.
|
@@ -240,18 +231,12 @@ class ReleaseQueue:
|
|
240
231
|
or (value in self.complete)
|
241
232
|
)
|
242
233
|
|
243
|
-
def remove_running(self, value: Release) -> Self:
|
244
|
-
"""Remove Release in the running queue if it exists.
|
245
|
-
|
246
|
-
:rtype: Self
|
247
|
-
"""
|
248
|
-
if value in self.running:
|
249
|
-
self.running.remove(value)
|
250
|
-
|
251
|
-
return self
|
252
|
-
|
253
234
|
def mark_complete(self, value: Release) -> Self:
|
254
|
-
"""Push Release to the complete queue.
|
235
|
+
"""Push Release to the complete queue. After push the release, it will
|
236
|
+
delete old release base on the `CORE_MAX_QUEUE_COMPLETE_HIST` value.
|
237
|
+
|
238
|
+
:param value: (Release) A Release value that want to push to the
|
239
|
+
complete field.
|
255
240
|
|
256
241
|
:rtype: Self
|
257
242
|
"""
|
@@ -280,7 +265,7 @@ class ReleaseQueue:
|
|
280
265
|
force_run: bool = False,
|
281
266
|
extras: Optional[DictData] = None,
|
282
267
|
) -> Self:
|
283
|
-
"""Generate Release model to queue.
|
268
|
+
"""Generate a Release model to the queue field with an input CronRunner.
|
284
269
|
|
285
270
|
Steps:
|
286
271
|
- Create Release object from the current date that not reach the end
|
@@ -295,9 +280,10 @@ class ReleaseQueue:
|
|
295
280
|
:param runner: (CronRunner) A CronRunner object.
|
296
281
|
:param name: (str) A target name that want to check at pointer of audit.
|
297
282
|
:param offset: (float) An offset in second unit for time travel.
|
298
|
-
:param force_run: A flag that allow to release workflow if the
|
299
|
-
with that release was pointed.
|
300
|
-
:param extras: An extra parameter that want to override core
|
283
|
+
:param force_run: (bool) A flag that allow to release workflow if the
|
284
|
+
audit with that release was pointed. (Default is False).
|
285
|
+
:param extras: (DictDatA) An extra parameter that want to override core
|
286
|
+
config values.
|
301
287
|
|
302
288
|
:rtype: ReleaseQueue
|
303
289
|
|
@@ -305,7 +291,7 @@ class ReleaseQueue:
|
|
305
291
|
if runner.date > end_date:
|
306
292
|
return self
|
307
293
|
|
308
|
-
|
294
|
+
release = Release(
|
309
295
|
date=runner.date,
|
310
296
|
offset=offset,
|
311
297
|
end_date=end_date,
|
@@ -313,13 +299,11 @@ class ReleaseQueue:
|
|
313
299
|
type=ReleaseType.POKE,
|
314
300
|
)
|
315
301
|
|
316
|
-
while self.check_queue(
|
317
|
-
audit.is_pointed(
|
318
|
-
name=name, release=workflow_release.date, extras=extras
|
319
|
-
)
|
302
|
+
while self.check_queue(release) or (
|
303
|
+
audit.is_pointed(name=name, release=release.date, extras=extras)
|
320
304
|
and not force_run
|
321
305
|
):
|
322
|
-
|
306
|
+
release = Release(
|
323
307
|
date=runner.next,
|
324
308
|
offset=offset,
|
325
309
|
end_date=end_date,
|
@@ -330,12 +314,12 @@ class ReleaseQueue:
|
|
330
314
|
if runner.date > end_date:
|
331
315
|
return self
|
332
316
|
|
333
|
-
heappush(self.queue,
|
317
|
+
heappush(self.queue, release)
|
334
318
|
return self
|
335
319
|
|
336
320
|
|
337
321
|
class Workflow(BaseModel):
|
338
|
-
"""Workflow
|
322
|
+
"""Workflow model that use to keep the `Job` and `On` models.
|
339
323
|
|
340
324
|
This is the main future of this project because it uses to be workflow
|
341
325
|
data for running everywhere that you want or using it to scheduler task in
|
@@ -373,6 +357,7 @@ class Workflow(BaseModel):
|
|
373
357
|
cls,
|
374
358
|
name: str,
|
375
359
|
*,
|
360
|
+
path: Optional[Path] = None,
|
376
361
|
extras: DictData | None = None,
|
377
362
|
loader: type[Loader] = None,
|
378
363
|
) -> Self:
|
@@ -380,45 +365,8 @@ class Workflow(BaseModel):
|
|
380
365
|
an input workflow name. The loader object will use this workflow name to
|
381
366
|
searching configuration data of this workflow model in conf path.
|
382
367
|
|
383
|
-
:param name: A workflow name that want to pass to Loader object.
|
384
|
-
:param extras: An extra parameters that want to pass to Loader
|
385
|
-
object.
|
386
|
-
:param loader: A loader class for override default loader object.
|
387
|
-
|
388
|
-
:raise ValueError: If the type does not match with current object.
|
389
|
-
|
390
|
-
:rtype: Self
|
391
|
-
"""
|
392
|
-
loader: Loader = (loader or Loader)(name, externals=(extras or {}))
|
393
|
-
|
394
|
-
# NOTE: Validate the config type match with current connection model
|
395
|
-
if loader.type != cls.__name__:
|
396
|
-
raise ValueError(f"Type {loader.type} does not match with {cls}")
|
397
|
-
|
398
|
-
loader_data: DictData = copy.deepcopy(loader.data)
|
399
|
-
loader_data["name"] = name.replace(" ", "_")
|
400
|
-
|
401
|
-
if extras:
|
402
|
-
loader_data["extras"] = extras
|
403
|
-
|
404
|
-
cls.__bypass_on__(loader_data, path=loader.conf_path, extras=extras)
|
405
|
-
return cls.model_validate(obj=loader_data)
|
406
|
-
|
407
|
-
@classmethod
|
408
|
-
def from_path(
|
409
|
-
cls,
|
410
|
-
name: str,
|
411
|
-
path: Path,
|
412
|
-
*,
|
413
|
-
extras: DictData | None = None,
|
414
|
-
loader: type[Loader] = None,
|
415
|
-
) -> Self:
|
416
|
-
"""Create Workflow instance from the specific path. The loader object
|
417
|
-
will use this workflow name and path to searching configuration data of
|
418
|
-
this workflow model.
|
419
|
-
|
420
368
|
:param name: (str) A workflow name that want to pass to Loader object.
|
421
|
-
:param path: (Path)
|
369
|
+
:param path: (Path) An override config path.
|
422
370
|
:param extras: (DictData) An extra parameters that want to override core
|
423
371
|
config values.
|
424
372
|
:param loader: A loader class for override default loader object.
|
@@ -427,21 +375,21 @@ class Workflow(BaseModel):
|
|
427
375
|
|
428
376
|
:rtype: Self
|
429
377
|
"""
|
430
|
-
loader:
|
431
|
-
|
432
|
-
|
378
|
+
loader: type[Loader] = loader or FileLoad
|
379
|
+
load: Loader = loader(name, path=path, extras=extras)
|
380
|
+
|
433
381
|
# NOTE: Validate the config type match with current connection model
|
434
|
-
if
|
435
|
-
raise ValueError(f"Type {
|
382
|
+
if load.type != cls.__name__:
|
383
|
+
raise ValueError(f"Type {load.type} does not match with {cls}")
|
436
384
|
|
437
|
-
|
438
|
-
|
385
|
+
data: DictData = copy.deepcopy(load.data)
|
386
|
+
data["name"] = name
|
439
387
|
|
440
388
|
if extras:
|
441
|
-
|
389
|
+
data["extras"] = extras
|
442
390
|
|
443
|
-
cls.__bypass_on__(
|
444
|
-
return cls.model_validate(obj=
|
391
|
+
cls.__bypass_on__(data, path=load.path, extras=extras, loader=loader)
|
392
|
+
return cls.model_validate(obj=data)
|
445
393
|
|
446
394
|
@classmethod
|
447
395
|
def __bypass_on__(
|
@@ -449,6 +397,7 @@ class Workflow(BaseModel):
|
|
449
397
|
data: DictData,
|
450
398
|
path: Path,
|
451
399
|
extras: DictData | None = None,
|
400
|
+
loader: type[Loader] = None,
|
452
401
|
) -> DictData:
|
453
402
|
"""Bypass the on data to loaded config data.
|
454
403
|
|
@@ -469,7 +418,7 @@ class Workflow(BaseModel):
|
|
469
418
|
# field.
|
470
419
|
data["on"] = [
|
471
420
|
(
|
472
|
-
|
421
|
+
(loader or FileLoad)(n, path=path, extras=extras).data
|
473
422
|
if isinstance(n, str)
|
474
423
|
else n
|
475
424
|
)
|
@@ -478,11 +427,10 @@ class Workflow(BaseModel):
|
|
478
427
|
return data
|
479
428
|
|
480
429
|
@model_validator(mode="before")
|
481
|
-
def __prepare_model_before__(cls,
|
430
|
+
def __prepare_model_before__(cls, data: Any) -> Any:
|
482
431
|
"""Prepare the params key in the data model before validating."""
|
483
|
-
|
484
|
-
|
485
|
-
values["params"] = {
|
432
|
+
if isinstance(data, dict) and (params := data.pop("params", {})):
|
433
|
+
data["params"] = {
|
486
434
|
p: (
|
487
435
|
{"type": params[p]}
|
488
436
|
if isinstance(params[p], str)
|
@@ -490,7 +438,7 @@ class Workflow(BaseModel):
|
|
490
438
|
)
|
491
439
|
for p in params
|
492
440
|
}
|
493
|
-
return
|
441
|
+
return data
|
494
442
|
|
495
443
|
@field_validator("desc", mode="after")
|
496
444
|
def __dedent_desc__(cls, value: str) -> str:
|
@@ -702,10 +650,10 @@ class Workflow(BaseModel):
|
|
702
650
|
if isinstance(release, datetime):
|
703
651
|
release: Release = Release.from_dt(release, extras=self.extras)
|
704
652
|
|
705
|
-
result.trace.
|
653
|
+
result.trace.info(
|
706
654
|
f"[RELEASE]: Start {name!r} : {release.date:%Y-%m-%d %H:%M:%S}"
|
707
655
|
)
|
708
|
-
self.execute(
|
656
|
+
rs: Result = self.execute(
|
709
657
|
params=param2template(
|
710
658
|
params,
|
711
659
|
params={
|
@@ -724,7 +672,7 @@ class Workflow(BaseModel):
|
|
724
672
|
parent_run_id=result.parent_run_id,
|
725
673
|
timeout=timeout,
|
726
674
|
)
|
727
|
-
result.trace.
|
675
|
+
result.trace.info(
|
728
676
|
f"[RELEASE]: End {name!r} : {release.date:%Y-%m-%d %H:%M:%S}"
|
729
677
|
)
|
730
678
|
|
@@ -745,11 +693,12 @@ class Workflow(BaseModel):
|
|
745
693
|
)
|
746
694
|
|
747
695
|
if queue:
|
748
|
-
queue.
|
696
|
+
if release in queue.running:
|
697
|
+
queue.running.remove(release)
|
749
698
|
queue.mark_complete(release)
|
750
699
|
|
751
700
|
return result.catch(
|
752
|
-
status=
|
701
|
+
status=rs.status,
|
753
702
|
context={
|
754
703
|
"params": params,
|
755
704
|
"release": {
|
@@ -1054,16 +1003,19 @@ class Workflow(BaseModel):
|
|
1054
1003
|
For example with non-strategy job, when I want to use the output
|
1055
1004
|
from previous stage, I can access it with syntax:
|
1056
1005
|
|
1057
|
-
|
1058
|
-
|
1006
|
+
... ${job-id}.stages.${stage-id}.outputs.${key}
|
1007
|
+
... ${job-id}.stages.${stage-id}.errors.${key}
|
1059
1008
|
|
1060
1009
|
But example for strategy job:
|
1061
1010
|
|
1062
|
-
|
1063
|
-
|
1011
|
+
... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.outputs.${key}
|
1012
|
+
... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.errors.${key}
|
1013
|
+
|
1014
|
+
This method already handle all exception class that can raise from
|
1015
|
+
the job execution. It will warp that error and keep it in the key `errors`
|
1016
|
+
at the result context.
|
1064
1017
|
|
1065
|
-
:param params:
|
1066
|
-
will parameterize before using it.
|
1018
|
+
:param params: A parameter data that will parameterize before execution.
|
1067
1019
|
:param run_id: (str | None) A workflow running ID.
|
1068
1020
|
:param parent_run_id: (str | None) A parent workflow running ID.
|
1069
1021
|
:param result: (Result) A Result instance for return context and status.
|
@@ -1116,7 +1068,6 @@ class Workflow(BaseModel):
|
|
1116
1068
|
):
|
1117
1069
|
job_id: str = job_queue.get()
|
1118
1070
|
job: Job = self.job(name=job_id)
|
1119
|
-
|
1120
1071
|
if (check := job.check_needs(context["jobs"])) == WAIT:
|
1121
1072
|
job_queue.task_done()
|
1122
1073
|
job_queue.put(job_id)
|
@@ -1227,8 +1178,8 @@ class WorkflowTask:
|
|
1227
1178
|
:param alias: (str) An alias name of Workflow model.
|
1228
1179
|
:param workflow: (Workflow) A Workflow model instance.
|
1229
1180
|
:param runner: (CronRunner)
|
1230
|
-
:param values:
|
1231
|
-
:param extras:
|
1181
|
+
:param values: A value data that want to parameterize.
|
1182
|
+
:param extras: An extra parameter that use to override core config values.
|
1232
1183
|
"""
|
1233
1184
|
|
1234
1185
|
alias: str
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.56
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -121,12 +121,10 @@ flowchart LR
|
|
121
121
|
|
122
122
|
> [!WARNING]
|
123
123
|
> _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
|
124
|
-
> and
|
125
|
-
>
|
126
|
-
|
127
|
-
>
|
128
|
-
> Other workflow orchestration tools that I interest and pick them to be inspiration
|
129
|
-
> some for this package:
|
124
|
+
> and my experience of data framework configs pattern. :grimacing:
|
125
|
+
>
|
126
|
+
> Other workflow orchestration services that I interest and pick them to be
|
127
|
+
> this project inspiration:
|
130
128
|
>
|
131
129
|
> - [Google **Workflows**](https://cloud.google.com/workflows)
|
132
130
|
> - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
|
@@ -0,0 +1,31 @@
|
|
1
|
+
ddeutil/workflow/__about__.py,sha256=EXJHOyenQmmnoVgcw7W0m9HU1cg9EJV61611jGr4NDY,28
|
2
|
+
ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
|
3
|
+
ddeutil/workflow/__init__.py,sha256=NXEhjzKFdIGa-jtIq9HXChLCjSXNPd8VJ8ltggxbBO8,1371
|
4
|
+
ddeutil/workflow/__main__.py,sha256=x-sYedl4T8p6054aySk-EQX6vhytvPR0HvaBNYxMzp0,364
|
5
|
+
ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
|
6
|
+
ddeutil/workflow/conf.py,sha256=JaTfglfdgGe7M-nb2NeRska95MDmYapwKuAlZfzVdr4,14425
|
7
|
+
ddeutil/workflow/event.py,sha256=qiUrkkVxOvYEFfxTWBsLCgYTyOWMY125abOPieY5Xqc,10319
|
8
|
+
ddeutil/workflow/exceptions.py,sha256=0MvjCRBUsHfOm1kzMiC4Y22vb1_sfvTU0wAW7xZwtAo,1587
|
9
|
+
ddeutil/workflow/job.py,sha256=T0zxzK682kYaNmVHTzNWeBrZu8QJ6J2eYCFlKto0vA4,34381
|
10
|
+
ddeutil/workflow/logs.py,sha256=rsoBrUGQrooou18fg2yvPsB8NOaXnUA5ThQpBr_WVMg,26598
|
11
|
+
ddeutil/workflow/params.py,sha256=FKY4Oo1Ze4QZKRfAk7rqKsi44YaJQAbqAtXM6vlO2hI,11392
|
12
|
+
ddeutil/workflow/result.py,sha256=rI0S8-HanFDk1l6_BsYRRamzSfzKUy7bkKJUae1w_aQ,5708
|
13
|
+
ddeutil/workflow/reusables.py,sha256=iXcS7Gg-71qVX4ln0ILTDx03cTtUnj_rNoXHTVdVrxc,17636
|
14
|
+
ddeutil/workflow/scheduler.py,sha256=oVSNwZ-iyXFOGXhsltzaDy7GDQejI9GalMHxa8JRcro,27063
|
15
|
+
ddeutil/workflow/stages.py,sha256=E5XoMVijjcvm_YK8AbiA8xGAQUphCPTtGazW-oLAdeI,82543
|
16
|
+
ddeutil/workflow/utils.py,sha256=NZPvPPP_5g4cigFcD7tHjIKLtKMeYAcb3oUhNyhTpJ0,7947
|
17
|
+
ddeutil/workflow/workflow.py,sha256=vgVOwa79ZXWcOv2k6FG4I_FVuyswGwbglidWQJXSrsY,45739
|
18
|
+
ddeutil/workflow/api/__init__.py,sha256=kY30dL8HPY8tY_GBmm7y_3OdoXzB1-EA2a96PLU0AQw,5278
|
19
|
+
ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
20
|
+
ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
|
21
|
+
ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
|
22
|
+
ddeutil/workflow/api/routes/job.py,sha256=8X5VLDJH6PumyNIY6JGRNBsf2gWN0eG9DzxRPSh6n4I,2190
|
23
|
+
ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
|
24
|
+
ddeutil/workflow/api/routes/schedules.py,sha256=14RnaJKEGMSJtncI1H_QQVZNBe_jDS40PPRO6qFc3i0,4805
|
25
|
+
ddeutil/workflow/api/routes/workflows.py,sha256=GJu5PiXEylswrXylEImpncySjeU9chrvrtjhiMCw2RQ,4529
|
26
|
+
ddeutil_workflow-0.0.56.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
27
|
+
ddeutil_workflow-0.0.56.dist-info/METADATA,sha256=JYyoDdlPBgJukz3yFPseqjqTOwK7i2LejPgLhqkBXwo,19008
|
28
|
+
ddeutil_workflow-0.0.56.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
29
|
+
ddeutil_workflow-0.0.56.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
|
30
|
+
ddeutil_workflow-0.0.56.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
31
|
+
ddeutil_workflow-0.0.56.dist-info/RECORD,,
|
ddeutil/workflow/api/api.py
DELETED
@@ -1,170 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
-
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
-
# license information.
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
import contextlib
|
9
|
-
from collections.abc import AsyncIterator
|
10
|
-
from datetime import datetime, timedelta
|
11
|
-
from typing import TypedDict
|
12
|
-
|
13
|
-
from dotenv import load_dotenv
|
14
|
-
from fastapi import FastAPI, Request
|
15
|
-
from fastapi import status as st
|
16
|
-
from fastapi.encoders import jsonable_encoder
|
17
|
-
from fastapi.exceptions import RequestValidationError
|
18
|
-
from fastapi.middleware.cors import CORSMiddleware
|
19
|
-
from fastapi.middleware.gzip import GZipMiddleware
|
20
|
-
from fastapi.responses import UJSONResponse
|
21
|
-
|
22
|
-
from ..__about__ import __version__
|
23
|
-
from ..conf import api_config, config
|
24
|
-
from ..logs import get_logger
|
25
|
-
from ..scheduler import ReleaseThread, ReleaseThreads
|
26
|
-
from ..workflow import ReleaseQueue, WorkflowTask
|
27
|
-
from .routes import job, log
|
28
|
-
from .utils import repeat_at
|
29
|
-
|
30
|
-
load_dotenv()
|
31
|
-
logger = get_logger("uvicorn.error")
|
32
|
-
|
33
|
-
|
34
|
-
class State(TypedDict):
|
35
|
-
"""TypeDict for State of FastAPI application."""
|
36
|
-
|
37
|
-
scheduler: list[str]
|
38
|
-
workflow_threads: ReleaseThreads
|
39
|
-
workflow_tasks: list[WorkflowTask]
|
40
|
-
workflow_queue: dict[str, ReleaseQueue]
|
41
|
-
|
42
|
-
|
43
|
-
@contextlib.asynccontextmanager
|
44
|
-
async def lifespan(a: FastAPI) -> AsyncIterator[State]:
|
45
|
-
"""Lifespan function for the FastAPI application."""
|
46
|
-
a.state.scheduler = []
|
47
|
-
a.state.workflow_threads = {}
|
48
|
-
a.state.workflow_tasks = []
|
49
|
-
a.state.workflow_queue = {}
|
50
|
-
|
51
|
-
yield {
|
52
|
-
# NOTE: Scheduler value should be contained a key of workflow and
|
53
|
-
# list of datetime of queue and running.
|
54
|
-
#
|
55
|
-
# ... {
|
56
|
-
# ... '<workflow-name>': (
|
57
|
-
# ... [<running-datetime>, ...], [<queue-datetime>, ...]
|
58
|
-
# ... )
|
59
|
-
# ... }
|
60
|
-
#
|
61
|
-
"scheduler": a.state.scheduler,
|
62
|
-
"workflow_queue": a.state.workflow_queue,
|
63
|
-
"workflow_threads": a.state.workflow_threads,
|
64
|
-
"workflow_tasks": a.state.workflow_tasks,
|
65
|
-
}
|
66
|
-
|
67
|
-
|
68
|
-
app = FastAPI(
|
69
|
-
titile="Workflow",
|
70
|
-
description=(
|
71
|
-
"This is a workflow FastAPI application that use to manage manual "
|
72
|
-
"execute, logging, and schedule workflow via RestAPI."
|
73
|
-
),
|
74
|
-
version=__version__,
|
75
|
-
lifespan=lifespan,
|
76
|
-
default_response_class=UJSONResponse,
|
77
|
-
)
|
78
|
-
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
79
|
-
origins: list[str] = [
|
80
|
-
"http://localhost",
|
81
|
-
"http://localhost:88",
|
82
|
-
"http://localhost:80",
|
83
|
-
]
|
84
|
-
app.add_middleware(
|
85
|
-
CORSMiddleware,
|
86
|
-
allow_origins=origins,
|
87
|
-
allow_credentials=True,
|
88
|
-
allow_methods=["*"],
|
89
|
-
allow_headers=["*"],
|
90
|
-
)
|
91
|
-
|
92
|
-
|
93
|
-
@app.get("/")
|
94
|
-
async def health():
|
95
|
-
"""Index view that not return any template without json status."""
|
96
|
-
return {"message": "Workflow already start up with healthy status."}
|
97
|
-
|
98
|
-
|
99
|
-
# NOTE Add the jobs and logs routes by default.
|
100
|
-
app.include_router(job, prefix=api_config.prefix_path)
|
101
|
-
app.include_router(log, prefix=api_config.prefix_path)
|
102
|
-
|
103
|
-
|
104
|
-
# NOTE: Enable the workflows route.
|
105
|
-
if api_config.enable_route_workflow:
|
106
|
-
from .routes import workflow
|
107
|
-
|
108
|
-
app.include_router(workflow, prefix=api_config.prefix_path)
|
109
|
-
|
110
|
-
|
111
|
-
# NOTE: Enable the schedules route.
|
112
|
-
if api_config.enable_route_schedule:
|
113
|
-
from ..logs import get_audit
|
114
|
-
from ..scheduler import schedule_task
|
115
|
-
from .routes import schedule
|
116
|
-
|
117
|
-
app.include_router(schedule, prefix=api_config.prefix_path)
|
118
|
-
|
119
|
-
@schedule.on_event("startup")
|
120
|
-
@repeat_at(cron="* * * * *", delay=2)
|
121
|
-
def scheduler_listener():
|
122
|
-
"""Schedule broker every minute at 02 second."""
|
123
|
-
logger.debug(
|
124
|
-
f"[SCHEDULER]: Start listening schedule from queue "
|
125
|
-
f"{app.state.scheduler}"
|
126
|
-
)
|
127
|
-
if app.state.workflow_tasks:
|
128
|
-
schedule_task(
|
129
|
-
app.state.workflow_tasks,
|
130
|
-
stop=datetime.now(config.tz) + timedelta(minutes=1),
|
131
|
-
queue=app.state.workflow_queue,
|
132
|
-
threads=app.state.workflow_threads,
|
133
|
-
audit=get_audit(),
|
134
|
-
)
|
135
|
-
|
136
|
-
@schedule.on_event("startup")
|
137
|
-
@repeat_at(cron="*/5 * * * *", delay=10)
|
138
|
-
def monitoring():
|
139
|
-
"""Monitoring workflow thread that running in the background."""
|
140
|
-
logger.debug("[MONITOR]: Start monitoring threading.")
|
141
|
-
snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
|
142
|
-
for t_name in snapshot_threads:
|
143
|
-
|
144
|
-
thread_release: ReleaseThread = app.state.workflow_threads[t_name]
|
145
|
-
|
146
|
-
# NOTE: remove the thread that running success.
|
147
|
-
if not thread_release["thread"].is_alive():
|
148
|
-
app.state.workflow_threads.pop(t_name)
|
149
|
-
|
150
|
-
|
151
|
-
@app.exception_handler(RequestValidationError)
|
152
|
-
async def validation_exception_handler(
|
153
|
-
request: Request, exc: RequestValidationError
|
154
|
-
):
|
155
|
-
_ = request
|
156
|
-
return UJSONResponse(
|
157
|
-
status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
|
158
|
-
content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
|
159
|
-
)
|
160
|
-
|
161
|
-
|
162
|
-
if __name__ == "__main__":
|
163
|
-
import uvicorn
|
164
|
-
|
165
|
-
uvicorn.run(
|
166
|
-
app,
|
167
|
-
host="0.0.0.0",
|
168
|
-
port=80,
|
169
|
-
log_level="DEBUG",
|
170
|
-
)
|
@@ -1,31 +0,0 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=KTbXC0Wvt30Z6UCFcSzVAD697wqCc-kgyHEuDu_xEtY,28
|
2
|
-
ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
|
3
|
-
ddeutil/workflow/__init__.py,sha256=noE8LNRcgq32m9OnIFcQqh0P7PXWdp-SGmvBCYIXgf4,1338
|
4
|
-
ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
|
-
ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
|
6
|
-
ddeutil/workflow/conf.py,sha256=80rgmJKFU7BlH5xTLnghGzGhE8C6LFAQykd9mjHSjo8,12528
|
7
|
-
ddeutil/workflow/cron.py,sha256=WS2MInn0Sp5DKlZDZH5VFZ5AA0Q3_AnBnYEU4lZSv4I,9779
|
8
|
-
ddeutil/workflow/exceptions.py,sha256=r4Jrf9qtVPALU4wh4bnb_OYqC-StqSQJEmFC-_QK934,1408
|
9
|
-
ddeutil/workflow/job.py,sha256=uoj6grIq1f0iV5RFtQaysyel6z9keYr-urkkhFzyhcI,33831
|
10
|
-
ddeutil/workflow/logs.py,sha256=rsoBrUGQrooou18fg2yvPsB8NOaXnUA5ThQpBr_WVMg,26598
|
11
|
-
ddeutil/workflow/params.py,sha256=FKY4Oo1Ze4QZKRfAk7rqKsi44YaJQAbqAtXM6vlO2hI,11392
|
12
|
-
ddeutil/workflow/result.py,sha256=27nPQq9CETLCVczv4vvFEF9w2TllHZ_ROfyDoLFxRWM,5647
|
13
|
-
ddeutil/workflow/reusables.py,sha256=iXcS7Gg-71qVX4ln0ILTDx03cTtUnj_rNoXHTVdVrxc,17636
|
14
|
-
ddeutil/workflow/scheduler.py,sha256=4G5AogkmnsTKe7jKYSfU35qjubR82WQ8CLtEe9kqPTE,28304
|
15
|
-
ddeutil/workflow/stages.py,sha256=131pGqE5RhhDo9aLy6KTXGrvVLFT3UIwr9fS3tVqoZQ,80466
|
16
|
-
ddeutil/workflow/utils.py,sha256=NZPvPPP_5g4cigFcD7tHjIKLtKMeYAcb3oUhNyhTpJ0,7947
|
17
|
-
ddeutil/workflow/workflow.py,sha256=-7M3HdxOpuPpXeRF8oWhDh_S8anX3ivSoKlOsKnsh6c,46942
|
18
|
-
ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
|
19
|
-
ddeutil/workflow/api/api.py,sha256=xLrQ8yD7iOn-MkzaSxG-BADbdkqLikDna630oW3YEmc,5243
|
20
|
-
ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
21
|
-
ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
|
22
|
-
ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
|
23
|
-
ddeutil/workflow/api/routes/job.py,sha256=GbDFmToksKsvPHn9KPXhoOCznzEPRTe79jUn7ew0snE,1953
|
24
|
-
ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
|
25
|
-
ddeutil/workflow/api/routes/schedules.py,sha256=EgUjyRGhsm6UNaMj5luh6TcY6l571sCHcla-BL1iOfY,4829
|
26
|
-
ddeutil/workflow/api/routes/workflows.py,sha256=JcDOrn1deK8ztFRcMTNATQejG6KMA7JxZLVc4QeBsP4,4527
|
27
|
-
ddeutil_workflow-0.0.54.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
28
|
-
ddeutil_workflow-0.0.54.dist-info/METADATA,sha256=uVgjeBlD6MEq6XVs4pPNEfhiTuf44TrUzpESmy_8ADw,19081
|
29
|
-
ddeutil_workflow-0.0.54.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
30
|
-
ddeutil_workflow-0.0.54.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
31
|
-
ddeutil_workflow-0.0.54.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|