ddeutil-workflow 0.0.63__py3-none-any.whl → 0.0.65__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +1 -8
- ddeutil/workflow/api/__init__.py +5 -84
- ddeutil/workflow/api/routes/__init__.py +0 -1
- ddeutil/workflow/api/routes/job.py +2 -3
- ddeutil/workflow/api/routes/logs.py +0 -2
- ddeutil/workflow/api/routes/workflows.py +0 -3
- ddeutil/workflow/conf.py +6 -38
- ddeutil/workflow/{exceptions.py → errors.py} +47 -12
- ddeutil/workflow/job.py +249 -118
- ddeutil/workflow/params.py +11 -11
- ddeutil/workflow/result.py +86 -10
- ddeutil/workflow/reusables.py +54 -23
- ddeutil/workflow/stages.py +692 -464
- ddeutil/workflow/utils.py +37 -2
- ddeutil/workflow/workflow.py +163 -664
- {ddeutil_workflow-0.0.63.dist-info → ddeutil_workflow-0.0.65.dist-info}/METADATA +17 -67
- ddeutil_workflow-0.0.65.dist-info/RECORD +28 -0
- {ddeutil_workflow-0.0.63.dist-info → ddeutil_workflow-0.0.65.dist-info}/WHEEL +1 -1
- ddeutil/workflow/api/routes/schedules.py +0 -141
- ddeutil/workflow/api/utils.py +0 -174
- ddeutil/workflow/scheduler.py +0 -813
- ddeutil_workflow-0.0.63.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.63.dist-info → ddeutil_workflow-0.0.65.dist-info}/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.63.dist-info → ddeutil_workflow-0.0.65.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.63.dist-info → ddeutil_workflow-0.0.65.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.65
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -23,7 +23,7 @@ Requires-Python: >=3.9.13
|
|
23
23
|
Description-Content-Type: text/markdown
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil[checksum]>=0.4.8
|
26
|
-
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.
|
26
|
+
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.14
|
27
27
|
Requires-Dist: pydantic==2.11.4
|
28
28
|
Requires-Dist: pydantic-extra-types==2.10.4
|
29
29
|
Requires-Dist: python-dotenv==1.1.0
|
@@ -221,33 +221,36 @@ value (This config can override by extra parameters with `registry_caller` key).
|
|
221
221
|
> engine will auto use the `model_validate` method before run your caller function.
|
222
222
|
|
223
223
|
```python
|
224
|
-
from ddeutil.workflow import Result,
|
225
|
-
from ddeutil.workflow.
|
224
|
+
from ddeutil.workflow import Result, CallerSecret, tag
|
225
|
+
from ddeutil.workflow.errors import StageError
|
226
226
|
from pydantic import BaseModel
|
227
227
|
|
228
|
+
|
228
229
|
class AwsCredential(BaseModel):
|
229
230
|
path: str
|
230
231
|
access_client_id: str
|
231
|
-
access_client_secret:
|
232
|
+
access_client_secret: CallerSecret
|
233
|
+
|
232
234
|
|
233
235
|
class RestAuth(BaseModel):
|
234
236
|
type: str
|
235
|
-
keys:
|
237
|
+
keys: CallerSecret
|
238
|
+
|
236
239
|
|
237
240
|
@tag("requests", alias="get-api-with-oauth-to-s3")
|
238
241
|
def get_api_with_oauth_to_s3(
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
242
|
+
method: str,
|
243
|
+
url: str,
|
244
|
+
body: dict[str, str],
|
245
|
+
auth: RestAuth,
|
246
|
+
writing_node: str,
|
247
|
+
aws: AwsCredential,
|
248
|
+
result: Result,
|
246
249
|
) -> dict[str, int]:
|
247
250
|
result.trace.info("[CALLER]: Start get data via RestAPI to S3.")
|
248
251
|
result.trace.info(f"... {method}: {url}")
|
249
252
|
if method != "post":
|
250
|
-
|
253
|
+
raise StageError(f"RestAPI does not support for {method} action.")
|
251
254
|
# NOTE: If you want to use secret, you can use `auth.keys.get_secret_value()`.
|
252
255
|
return {"records": 1000}
|
253
256
|
```
|
@@ -265,45 +268,6 @@ result: Result = workflow.execute(
|
|
265
268
|
)
|
266
269
|
```
|
267
270
|
|
268
|
-
> [!NOTE]
|
269
|
-
> So, this package provide the `Schedule` template for this action, and you can
|
270
|
-
> pass the parameters dynamically for changing align with that running time by
|
271
|
-
> the `release` prefix.
|
272
|
-
>
|
273
|
-
> ```yaml
|
274
|
-
> schedule-run-local-wf:
|
275
|
-
>
|
276
|
-
> # Validate model that use to parsing exists for template file
|
277
|
-
> type: Schedule
|
278
|
-
> workflows:
|
279
|
-
>
|
280
|
-
> # Map existing workflow that want to deploy with scheduler application.
|
281
|
-
> # It allows you to pass release parameter that dynamic change depend on the
|
282
|
-
> # current context of this scheduler application releasing that time.
|
283
|
-
> - name: run-py-local
|
284
|
-
> params:
|
285
|
-
> source-extract: "USD-THB"
|
286
|
-
> run-date: "${{ release.logical_date }}"
|
287
|
-
> ```
|
288
|
-
>
|
289
|
-
> The main method of the `Schedule` model that use to running is `pending`. If you
|
290
|
-
> do not pass the `stop` date on this method, it will use config with
|
291
|
-
> `WORKFLOW_APP_STOP_BOUNDARY_DELTA` key for generate this stop date.
|
292
|
-
>
|
293
|
-
> ```python
|
294
|
-
> from ddeutil.workflow import Schedule
|
295
|
-
>
|
296
|
-
> (
|
297
|
-
> Schedule
|
298
|
-
> .from_conf("schedule-run-local-wf")
|
299
|
-
> .pending(stop=None)
|
300
|
-
> )
|
301
|
-
> ```
|
302
|
-
|
303
|
-
> [!WARNING]
|
304
|
-
> The scheduler feature is the expensive feature of this project. You should
|
305
|
-
> avoid to use it and find a scheduler tool instead.
|
306
|
-
|
307
271
|
## :cookie: Configuration
|
308
272
|
|
309
273
|
The main configuration that use to dynamic changing this workflow engine for your
|
@@ -321,7 +285,6 @@ it will use default value and do not raise any error to you.
|
|
321
285
|
| **CONF_PATH** | Core | `./conf` | The config path that keep all template `.yaml` files. |
|
322
286
|
| **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
|
323
287
|
| **STAGE_DEFAULT_ID** | Core | `false` | A flag that enable default stage ID that use for catch an execution output. |
|
324
|
-
| **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
|
325
288
|
| **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
|
326
289
|
| **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
|
327
290
|
| **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
|
@@ -333,19 +296,6 @@ it will use default value and do not raise any error to you.
|
|
333
296
|
| **TRACE_ENABLE_WRITE** | Log | `false` | |
|
334
297
|
| **AUDIT_PATH** | Log | `./audits` | |
|
335
298
|
| **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
|
336
|
-
| **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
|
337
|
-
| **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
|
338
|
-
| **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
|
339
|
-
|
340
|
-
**API Application**:
|
341
|
-
|
342
|
-
This config part use for the workflow application that build from the FastAPI
|
343
|
-
only.
|
344
|
-
|
345
|
-
| Environment | Component | Default | Description |
|
346
|
-
|:---------------------------|:-----------:|---------|------------------------------------------------------------------------------------|
|
347
|
-
| **ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. |
|
348
|
-
| **ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. |
|
349
299
|
|
350
300
|
## :rocket: Deployment
|
351
301
|
|
@@ -0,0 +1,28 @@
|
|
1
|
+
ddeutil/workflow/__about__.py,sha256=HQ_82TqpLyt4QUTSUNDK94y4NkrRcrX9-AKR-fDwuWU,28
|
2
|
+
ddeutil/workflow/__cron.py,sha256=BOKQcreiex0SAigrK1gnLxpvOeF3aca_rQwyz9Kfve4,28751
|
3
|
+
ddeutil/workflow/__init__.py,sha256=JfFZlPRDgR2J0rb0SRejt1OSrOrD3GGv9Um14z8MMfs,901
|
4
|
+
ddeutil/workflow/__main__.py,sha256=x-sYedl4T8p6054aySk-EQX6vhytvPR0HvaBNYxMzp0,364
|
5
|
+
ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
|
6
|
+
ddeutil/workflow/conf.py,sha256=VKtnD-Duuf_tPOCUfM6oa86-CrqeCj8kiQbzPLPUXx0,14807
|
7
|
+
ddeutil/workflow/errors.py,sha256=evZYwNrvAvY1jpCqqZFkBCdaZ7GN-JbYJMYL6tJmm-0,2980
|
8
|
+
ddeutil/workflow/event.py,sha256=S2eJAZZx_V5TuQ0l417hFVCtjWXnfNPZBgSCICzxQ48,11041
|
9
|
+
ddeutil/workflow/job.py,sha256=9toh8L9MlQqyy3U3WYierdA03ohf0LOhffaPIOY2IYU,39126
|
10
|
+
ddeutil/workflow/logs.py,sha256=iVtyl8i69y7t07tAuWkihc54WlkHCcBy_Ur0WtzJ_lM,31367
|
11
|
+
ddeutil/workflow/params.py,sha256=Pco3DyjptC5Jkx53dhLL9xlIQdJvNAZs4FLzMUfXpbQ,12402
|
12
|
+
ddeutil/workflow/result.py,sha256=aUMIXw2nYbCDfFZqj9ABr_b7ZLo1GftTaaW8ATh618g,7855
|
13
|
+
ddeutil/workflow/reusables.py,sha256=jPrOCbxagqRvRFGXJzIyDa1wKV5AZ4crZyJ10cldQP0,21620
|
14
|
+
ddeutil/workflow/stages.py,sha256=rorKBjdyUAxALtelNJrvc5plJp1WCV35NMkii3XMw2A,102094
|
15
|
+
ddeutil/workflow/utils.py,sha256=slhBbsBNl0yaSk9EOiCK6UL-o7smgHVsLT7svRqAWXU,10436
|
16
|
+
ddeutil/workflow/workflow.py,sha256=YP1st2y3YCUscsuFpjf3fQgYMOnQbBhiY0s6PW1Lpng,27637
|
17
|
+
ddeutil/workflow/api/__init__.py,sha256=0UIilYwW29RL6HrCRHACSWvnATJVLSJzXiCMny0bHQk,2627
|
18
|
+
ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
19
|
+
ddeutil/workflow/api/routes/__init__.py,sha256=jC1pM7q4_eo45IyO3hQbbe6RnL9B8ibRq_K6aCMP6Ag,434
|
20
|
+
ddeutil/workflow/api/routes/job.py,sha256=32TkNm7QY9gt6fxIqEPjDqPgc8XqDiMPjUb7disSrCw,2143
|
21
|
+
ddeutil/workflow/api/routes/logs.py,sha256=QJH8IF102897WLfCJ29-1g15wl29M9Yq6omroZfbahs,5305
|
22
|
+
ddeutil/workflow/api/routes/workflows.py,sha256=Gmg3e-K5rfi95pbRtWI_aIr5C089sIde_vefZVvh3U0,4420
|
23
|
+
ddeutil_workflow-0.0.65.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
24
|
+
ddeutil_workflow-0.0.65.dist-info/METADATA,sha256=ujjqpMK3NkLSGMjPJdg30Cs3oGMJB8pBQd-eGWllYX4,16685
|
25
|
+
ddeutil_workflow-0.0.65.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
|
26
|
+
ddeutil_workflow-0.0.65.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
|
27
|
+
ddeutil_workflow-0.0.65.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
28
|
+
ddeutil_workflow-0.0.65.dist-info/RECORD,,
|
@@ -1,141 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
-
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
-
# license information.
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
import copy
|
9
|
-
from datetime import datetime, timedelta
|
10
|
-
|
11
|
-
from fastapi import APIRouter, HTTPException, Request
|
12
|
-
from fastapi import status as st
|
13
|
-
from fastapi.responses import UJSONResponse
|
14
|
-
|
15
|
-
from ...conf import config
|
16
|
-
from ...logs import get_logger
|
17
|
-
from ...scheduler import Schedule
|
18
|
-
|
19
|
-
logger = get_logger("uvicorn.error")
|
20
|
-
schedule_route = APIRouter(
|
21
|
-
prefix="/schedules",
|
22
|
-
tags=["schedules"],
|
23
|
-
default_response_class=UJSONResponse,
|
24
|
-
)
|
25
|
-
|
26
|
-
|
27
|
-
@schedule_route.get(path="/{name}", status_code=st.HTTP_200_OK)
|
28
|
-
async def get_schedules(name: str):
|
29
|
-
"""Get schedule object."""
|
30
|
-
try:
|
31
|
-
schedule: Schedule = Schedule.from_conf(name=name, extras={})
|
32
|
-
except ValueError:
|
33
|
-
raise HTTPException(
|
34
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
35
|
-
detail=f"Schedule name: {name!r} does not found in /conf path",
|
36
|
-
) from None
|
37
|
-
return schedule.model_dump(
|
38
|
-
by_alias=True,
|
39
|
-
exclude_none=True,
|
40
|
-
exclude_unset=True,
|
41
|
-
exclude_defaults=True,
|
42
|
-
)
|
43
|
-
|
44
|
-
|
45
|
-
@schedule_route.get(path="/deploy/", status_code=st.HTTP_200_OK)
|
46
|
-
async def get_deploy_schedulers(request: Request):
|
47
|
-
snapshot = copy.deepcopy(request.state.scheduler)
|
48
|
-
return {"schedule": snapshot}
|
49
|
-
|
50
|
-
|
51
|
-
@schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
|
52
|
-
async def get_deploy_scheduler(request: Request, name: str):
|
53
|
-
if name in request.state.scheduler:
|
54
|
-
schedule = Schedule.from_conf(name)
|
55
|
-
getter: list[dict[str, dict[str, list[datetime]]]] = []
|
56
|
-
for workflow in schedule.workflows:
|
57
|
-
getter.append(
|
58
|
-
{
|
59
|
-
workflow.name: {
|
60
|
-
"queue": copy.deepcopy(
|
61
|
-
request.state.workflow_queue[workflow.name]
|
62
|
-
),
|
63
|
-
"running": copy.deepcopy(
|
64
|
-
request.state.workflow_running[workflow.name]
|
65
|
-
),
|
66
|
-
}
|
67
|
-
}
|
68
|
-
)
|
69
|
-
return {
|
70
|
-
"message": f"Getting {name!r} to schedule listener.",
|
71
|
-
"scheduler": getter,
|
72
|
-
}
|
73
|
-
raise HTTPException(
|
74
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
75
|
-
detail=f"Does not found {name!r} in schedule listener",
|
76
|
-
)
|
77
|
-
|
78
|
-
|
79
|
-
@schedule_route.post(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
|
80
|
-
async def add_deploy_scheduler(request: Request, name: str):
|
81
|
-
"""Adding schedule name to application state store."""
|
82
|
-
if name in request.state.scheduler:
|
83
|
-
raise HTTPException(
|
84
|
-
status_code=st.HTTP_302_FOUND,
|
85
|
-
detail=f"This schedule {name!r} already exists in scheduler list.",
|
86
|
-
)
|
87
|
-
|
88
|
-
request.state.scheduler.append(name)
|
89
|
-
|
90
|
-
start_date: datetime = datetime.now(tz=config.tz)
|
91
|
-
start_date_waiting: datetime = (start_date + timedelta(minutes=1)).replace(
|
92
|
-
second=0, microsecond=0
|
93
|
-
)
|
94
|
-
|
95
|
-
# NOTE: Create a pair of workflow and on from schedule model.
|
96
|
-
try:
|
97
|
-
schedule: Schedule = Schedule.from_conf(name)
|
98
|
-
except ValueError as err:
|
99
|
-
request.state.scheduler.remove(name)
|
100
|
-
logger.exception(err)
|
101
|
-
raise HTTPException(
|
102
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
103
|
-
detail=str(err),
|
104
|
-
) from None
|
105
|
-
|
106
|
-
request.state.workflow_tasks.extend(
|
107
|
-
schedule.tasks(
|
108
|
-
start_date_waiting,
|
109
|
-
queue=request.state.workflow_queue,
|
110
|
-
),
|
111
|
-
)
|
112
|
-
return {
|
113
|
-
"message": f"Adding {name!r} to schedule listener.",
|
114
|
-
"start_date": start_date_waiting,
|
115
|
-
}
|
116
|
-
|
117
|
-
|
118
|
-
@schedule_route.delete(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
|
119
|
-
async def del_deploy_scheduler(request: Request, name: str):
|
120
|
-
"""Delete workflow task on the schedule listener."""
|
121
|
-
if name in request.state.scheduler:
|
122
|
-
|
123
|
-
# NOTE: Remove current schedule name from the state.
|
124
|
-
request.state.scheduler.remove(name)
|
125
|
-
|
126
|
-
schedule: Schedule = Schedule.from_conf(name)
|
127
|
-
|
128
|
-
for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
|
129
|
-
if task in request.state.workflow_tasks:
|
130
|
-
request.state.workflow_tasks.remove(task)
|
131
|
-
|
132
|
-
for workflow in schedule.workflows:
|
133
|
-
if workflow.alias in request.state.workflow_queue:
|
134
|
-
request.state.workflow_queue.pop(workflow.alias)
|
135
|
-
|
136
|
-
return {"message": f"Deleted schedule {name!r} in listener."}
|
137
|
-
|
138
|
-
raise HTTPException(
|
139
|
-
status_code=st.HTTP_404_NOT_FOUND,
|
140
|
-
detail=f"Does not found schedule {name!r} in listener",
|
141
|
-
)
|
ddeutil/workflow/api/utils.py
DELETED
@@ -1,174 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2023 Priyanshu Panwar. All rights reserved.
|
3
|
-
# Licensed under the MIT License.
|
4
|
-
# This code refs from: https://github.com/priyanshu-panwar/fastapi-utilities
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
import asyncio
|
9
|
-
from asyncio import ensure_future
|
10
|
-
from datetime import datetime
|
11
|
-
from functools import wraps
|
12
|
-
|
13
|
-
from starlette.concurrency import run_in_threadpool
|
14
|
-
|
15
|
-
from ..__cron import CronJob
|
16
|
-
from ..conf import config
|
17
|
-
from ..logs import get_logger
|
18
|
-
|
19
|
-
logger = get_logger("uvicorn.error")
|
20
|
-
|
21
|
-
|
22
|
-
def get_cronjob_delta(cron: str) -> float:
|
23
|
-
"""This function returns the time delta between now and the next cron
|
24
|
-
execution time.
|
25
|
-
|
26
|
-
:rtype: float
|
27
|
-
"""
|
28
|
-
now: datetime = datetime.now(tz=config.tz)
|
29
|
-
cron = CronJob(cron)
|
30
|
-
return (cron.schedule(now).next - now).total_seconds()
|
31
|
-
|
32
|
-
|
33
|
-
def cron_valid(cron: str, raise_error: bool = True) -> bool:
|
34
|
-
"""Check this crontab string value is valid with its cron syntax.
|
35
|
-
|
36
|
-
:rtype: bool
|
37
|
-
"""
|
38
|
-
try:
|
39
|
-
CronJob(cron)
|
40
|
-
return True
|
41
|
-
except Exception as err:
|
42
|
-
if raise_error:
|
43
|
-
raise ValueError(f"Crontab value does not valid, {cron}") from err
|
44
|
-
return False
|
45
|
-
|
46
|
-
|
47
|
-
async def run_func(
|
48
|
-
is_coroutine,
|
49
|
-
func,
|
50
|
-
*args,
|
51
|
-
raise_exceptions: bool = False,
|
52
|
-
**kwargs,
|
53
|
-
):
|
54
|
-
"""Run function inside the repeat decorator functions."""
|
55
|
-
try:
|
56
|
-
if is_coroutine:
|
57
|
-
await func(*args, **kwargs)
|
58
|
-
else:
|
59
|
-
await run_in_threadpool(func, *args, **kwargs)
|
60
|
-
except Exception as e:
|
61
|
-
logger.exception(e)
|
62
|
-
if raise_exceptions:
|
63
|
-
raise e
|
64
|
-
|
65
|
-
|
66
|
-
def repeat_at(
|
67
|
-
*,
|
68
|
-
cron: str,
|
69
|
-
delay: float = 0,
|
70
|
-
raise_exceptions: bool = False,
|
71
|
-
max_repetitions: int = None,
|
72
|
-
):
|
73
|
-
"""This function returns a decorator that makes a function execute
|
74
|
-
periodically as per the cron expression provided.
|
75
|
-
|
76
|
-
:param cron: (str) A Cron-style string for periodic execution, e.g.
|
77
|
-
'0 0 * * *' every midnight
|
78
|
-
:param delay: (float) A delay seconds value.
|
79
|
-
:param raise_exceptions: (bool) A raise exception flag. Whether to raise
|
80
|
-
exceptions or log them if raise was set be false.
|
81
|
-
:param max_repetitions: int (default None)
|
82
|
-
Maximum number of times to repeat the function. If None, repeat
|
83
|
-
indefinitely.
|
84
|
-
"""
|
85
|
-
if max_repetitions and max_repetitions <= 0:
|
86
|
-
raise ValueError(
|
87
|
-
"max_repetitions should more than zero if it want to set"
|
88
|
-
)
|
89
|
-
|
90
|
-
def decorator(func):
|
91
|
-
is_coroutine: bool = asyncio.iscoroutinefunction(func)
|
92
|
-
|
93
|
-
@wraps(func)
|
94
|
-
def wrapper(*_args, **_kwargs):
|
95
|
-
repetitions: int = 0
|
96
|
-
cron_valid(cron)
|
97
|
-
|
98
|
-
async def loop(*args, **kwargs):
|
99
|
-
nonlocal repetitions
|
100
|
-
while max_repetitions is None or repetitions < max_repetitions:
|
101
|
-
sleep_time = get_cronjob_delta(cron) + delay
|
102
|
-
await asyncio.sleep(sleep_time)
|
103
|
-
await run_func(
|
104
|
-
is_coroutine,
|
105
|
-
func,
|
106
|
-
*args,
|
107
|
-
raise_exceptions=raise_exceptions,
|
108
|
-
**kwargs,
|
109
|
-
)
|
110
|
-
repetitions += 1
|
111
|
-
|
112
|
-
ensure_future(loop(*_args, **_kwargs))
|
113
|
-
|
114
|
-
return wrapper
|
115
|
-
|
116
|
-
return decorator
|
117
|
-
|
118
|
-
|
119
|
-
def repeat_every(
|
120
|
-
*,
|
121
|
-
seconds: float,
|
122
|
-
wait_first: bool = False,
|
123
|
-
raise_exceptions: bool = False,
|
124
|
-
max_repetitions: int = None,
|
125
|
-
):
|
126
|
-
"""This function returns a decorator that schedules a function to execute
|
127
|
-
periodically after every `seconds` seconds.
|
128
|
-
|
129
|
-
:param seconds: float
|
130
|
-
The number of seconds to wait before executing the function again.
|
131
|
-
:param wait_first: bool (default False)
|
132
|
-
Whether to wait `seconds` seconds before executing the function for the
|
133
|
-
first time.
|
134
|
-
:param raise_exceptions: bool (default False)
|
135
|
-
Whether to raise exceptions instead of logging them.
|
136
|
-
:param max_repetitions: int (default None)
|
137
|
-
The maximum number of times to repeat the function. If None, the
|
138
|
-
function will repeat indefinitely.
|
139
|
-
"""
|
140
|
-
if max_repetitions and max_repetitions <= 0:
|
141
|
-
raise ValueError(
|
142
|
-
"max_repetitions should more than zero if it want to set"
|
143
|
-
)
|
144
|
-
|
145
|
-
def decorator(func):
|
146
|
-
is_coroutine: bool = asyncio.iscoroutinefunction(func)
|
147
|
-
|
148
|
-
@wraps(func)
|
149
|
-
async def wrapper(*_args, **_kwargs):
|
150
|
-
repetitions = 0
|
151
|
-
|
152
|
-
async def loop(*args, **kwargs):
|
153
|
-
nonlocal repetitions
|
154
|
-
|
155
|
-
if wait_first:
|
156
|
-
await asyncio.sleep(seconds)
|
157
|
-
|
158
|
-
while max_repetitions is None or repetitions < max_repetitions:
|
159
|
-
await run_func(
|
160
|
-
is_coroutine,
|
161
|
-
func,
|
162
|
-
*args,
|
163
|
-
raise_exceptions=raise_exceptions,
|
164
|
-
**kwargs,
|
165
|
-
)
|
166
|
-
|
167
|
-
repetitions += 1
|
168
|
-
await asyncio.sleep(seconds)
|
169
|
-
|
170
|
-
ensure_future(loop(*_args, **_kwargs))
|
171
|
-
|
172
|
-
return wrapper
|
173
|
-
|
174
|
-
return decorator
|