ddeutil-workflow 0.0.35__tar.gz → 0.0.37__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/PKG-INFO +15 -13
  2. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/README.md +12 -12
  3. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/pyproject.toml +17 -4
  4. ddeutil_workflow-0.0.37/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/__init__.py +2 -0
  6. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/api.py +50 -9
  7. ddeutil_workflow-0.0.37/src/ddeutil/workflow/api/log.py +59 -0
  8. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/repeat.py +22 -12
  9. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/__init__.py +1 -0
  10. ddeutil_workflow-0.0.37/src/ddeutil/workflow/api/routes/job.py +73 -0
  11. ddeutil_workflow-0.0.37/src/ddeutil/workflow/api/routes/logs.py +165 -0
  12. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/schedules.py +7 -6
  13. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/routes/workflows.py +9 -7
  14. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/audit.py +6 -3
  15. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/caller.py +3 -1
  16. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/conf.py +0 -60
  17. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/job.py +293 -249
  18. ddeutil_workflow-0.0.37/src/ddeutil/workflow/logs.py +326 -0
  19. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/params.py +52 -15
  20. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/result.py +3 -5
  21. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/scheduler.py +42 -14
  22. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/stages.py +65 -4
  23. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/utils.py +40 -13
  24. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/workflow.py +1 -15
  25. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/PKG-INFO +15 -13
  26. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/SOURCES.txt +2 -0
  27. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/requires.txt +2 -0
  28. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_call_tag.py +52 -2
  29. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_job.py +9 -7
  30. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_job_exec.py +12 -0
  31. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_job_exec_strategy.py +7 -7
  32. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_logs.py +1 -1
  33. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_params.py +18 -0
  34. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_utils.py +16 -1
  35. ddeutil_workflow-0.0.35/src/ddeutil/workflow/__about__.py +0 -1
  36. ddeutil_workflow-0.0.35/src/ddeutil/workflow/api/routes/logs.py +0 -36
  37. ddeutil_workflow-0.0.35/src/ddeutil/workflow/logs.py +0 -214
  38. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/LICENSE +0 -0
  39. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/setup.cfg +0 -0
  40. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/__cron.py +0 -0
  41. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/__types.py +0 -0
  42. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/api/__init__.py +0 -0
  43. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/cron.py +0 -0
  44. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/exceptions.py +0 -0
  45. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil/workflow/templates.py +0 -0
  46. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  47. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  48. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test__cron.py +0 -0
  49. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test__regex.py +0 -0
  50. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_audit.py +0 -0
  51. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_conf.py +0 -0
  52. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_cron_on.py +0 -0
  53. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_job_strategy.py +0 -0
  54. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_release.py +0 -0
  55. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_release_queue.py +0 -0
  56. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_result.py +0 -0
  57. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_schedule.py +0 -0
  58. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_schedule_pending.py +0 -0
  59. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_schedule_tasks.py +0 -0
  60. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_schedule_workflow.py +0 -0
  61. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_scheduler_control.py +0 -0
  62. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_stage.py +0 -0
  63. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_stage_handler_exec.py +0 -0
  64. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_templates.py +0 -0
  65. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_templates_filter.py +0 -0
  66. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_workflow.py +0 -0
  67. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec.py +0 -0
  68. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec_job.py +0 -0
  69. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec_poke.py +0 -0
  70. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_workflow_exec_release.py +0 -0
  71. {ddeutil_workflow-0.0.35 → ddeutil_workflow-0.0.37}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.35
3
+ Version: 0.0.37
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -29,6 +29,8 @@ Requires-Dist: python-dotenv==1.0.1
29
29
  Requires-Dist: schedule<2.0.0,==1.2.2
30
30
  Provides-Extra: api
31
31
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
32
+ Requires-Dist: httpx; extra == "api"
33
+ Requires-Dist: ujson; extra == "api"
32
34
 
33
35
  # Workflow Orchestration
34
36
 
@@ -59,10 +61,10 @@ configuration. It called **Metadata Driven Data Workflow**.
59
61
 
60
62
  **:pushpin: <u>Rules of This Workflow engine</u>**:
61
63
 
62
- 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
63
- 2. Can not re-run only failed stage and its pending downstream :rotating_light:
64
- 3. All parallel tasks inside workflow engine use Multi-Threading
65
- (🐍 Python 3.13 unlock GIL :unlock:)
64
+ 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
65
+ 2. **Can not** re-run only failed stage and its pending downstream ↩️
66
+ 3. All parallel tasks inside workflow engine use **Multi-Threading**
67
+ (Python 3.13 unlock GIL 🐍🔓)
66
68
 
67
69
  ---
68
70
 
@@ -78,12 +80,12 @@ flowchart LR
78
80
 
79
81
  subgraph Docker Container
80
82
  direction TB
81
- G@{ shape: rounded, label: "Observe<br>Application" }
83
+ G@{ shape: rounded, label: "📡Observe<br>Application" }
82
84
  end
83
85
 
84
86
  subgraph Docker Container
85
87
  direction TB
86
- B@{ shape: rounded, label: "Workflow<br>Application" }
88
+ B@{ shape: rounded, label: "🏃Workflow<br>Application" }
87
89
  end
88
90
 
89
91
  A <-->|action &<br>response| B
@@ -95,7 +97,7 @@ flowchart LR
95
97
  E@{ shape: lin-cyl, label: "Audit<br>Logs" }
96
98
  end
97
99
 
98
- subgraph Git Context
100
+ subgraph Config Context
99
101
  F@{ shape: tag-rect, label: "YAML<br>files" }
100
102
  end
101
103
 
@@ -130,10 +132,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
130
132
  If you want to install this package with application add-ons, you should add
131
133
  `app` in installation;
132
134
 
133
- | Use-case | Install Optional | Support |
134
- |----------------|--------------------------|--------------------|
135
- | Python | `ddeutil-workflow` | :heavy_check_mark: |
136
- | FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
135
+ | Use-case | Install Optional | Support |
136
+ |----------------|--------------------------|:-------------------:|
137
+ | Python | `ddeutil-workflow` | :heavy_check_mark: |
138
+ | FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
137
139
 
138
140
  ## :beers: Usage
139
141
 
@@ -295,7 +297,7 @@ like crontab job but via Python API.
295
297
  ### API Server
296
298
 
297
299
  ```shell
298
- (venv) $ uvicorn src.ddeutil.workflow.api:app \
300
+ (venv) $ uvicorn ddeutil.workflow.api:app \
299
301
  --host 127.0.0.1 \
300
302
  --port 80 \
301
303
  --no-access-log
@@ -27,10 +27,10 @@ configuration. It called **Metadata Driven Data Workflow**.
27
27
 
28
28
  **:pushpin: <u>Rules of This Workflow engine</u>**:
29
29
 
30
- 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
31
- 2. Can not re-run only failed stage and its pending downstream :rotating_light:
32
- 3. All parallel tasks inside workflow engine use Multi-Threading
33
- (🐍 Python 3.13 unlock GIL :unlock:)
30
+ 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
31
+ 2. **Can not** re-run only failed stage and its pending downstream ↩️
32
+ 3. All parallel tasks inside workflow engine use **Multi-Threading**
33
+ (Python 3.13 unlock GIL 🐍🔓)
34
34
 
35
35
  ---
36
36
 
@@ -46,12 +46,12 @@ flowchart LR
46
46
 
47
47
  subgraph Docker Container
48
48
  direction TB
49
- G@{ shape: rounded, label: "Observe<br>Application" }
49
+ G@{ shape: rounded, label: "📡Observe<br>Application" }
50
50
  end
51
51
 
52
52
  subgraph Docker Container
53
53
  direction TB
54
- B@{ shape: rounded, label: "Workflow<br>Application" }
54
+ B@{ shape: rounded, label: "🏃Workflow<br>Application" }
55
55
  end
56
56
 
57
57
  A <-->|action &<br>response| B
@@ -63,7 +63,7 @@ flowchart LR
63
63
  E@{ shape: lin-cyl, label: "Audit<br>Logs" }
64
64
  end
65
65
 
66
- subgraph Git Context
66
+ subgraph Config Context
67
67
  F@{ shape: tag-rect, label: "YAML<br>files" }
68
68
  end
69
69
 
@@ -98,10 +98,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
98
98
  If you want to install this package with application add-ons, you should add
99
99
  `app` in installation;
100
100
 
101
- | Use-case | Install Optional | Support |
102
- |----------------|--------------------------|--------------------|
103
- | Python | `ddeutil-workflow` | :heavy_check_mark: |
104
- | FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
101
+ | Use-case | Install Optional | Support |
102
+ |----------------|--------------------------|:-------------------:|
103
+ | Python | `ddeutil-workflow` | :heavy_check_mark: |
104
+ | FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
105
105
 
106
106
  ## :beers: Usage
107
107
 
@@ -263,7 +263,7 @@ like crontab job but via Python API.
263
263
  ### API Server
264
264
 
265
265
  ```shell
266
- (venv) $ uvicorn src.ddeutil.workflow.api:app \
266
+ (venv) $ uvicorn ddeutil.workflow.api:app \
267
267
  --host 127.0.0.1 \
268
268
  --port 80 \
269
269
  --no-access-log
@@ -35,7 +35,11 @@ dependencies = [
35
35
  dynamic = ["version"]
36
36
 
37
37
  [project.optional-dependencies]
38
- api = [ "fastapi>=0.115.0,<1.0.0" ]
38
+ api = [
39
+ "fastapi>=0.115.0,<1.0.0",
40
+ "httpx",
41
+ "ujson",
42
+ ]
39
43
 
40
44
  [project.urls]
41
45
  Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
@@ -59,14 +63,19 @@ commit_prefix_force_fix = true
59
63
  branch = true
60
64
  relative_files = true
61
65
  concurrency = ["thread", "multiprocessing"]
62
- source = ["ddeutil.workflow"]
66
+ source = ["src.ddeutil.workflow"]
63
67
  omit = [
64
68
  "src/ddeutil/workflow/__about__.py",
65
69
  "src/ddeutil/workflow/__cron.py",
66
70
  "src/ddeutil/workflow/api/__init__.py",
67
71
  "src/ddeutil/workflow/api/api.py",
72
+ "src/ddeutil/workflow/api/log.py",
68
73
  "src/ddeutil/workflow/api/repeat.py",
69
- "src/ddeutil/workflow/api/route.py",
74
+ "src/ddeutil/workflow/api/routes/__init__.py",
75
+ "src/ddeutil/workflow/api/routes/job.py",
76
+ "src/ddeutil/workflow/api/routes/logs.py",
77
+ "src/ddeutil/workflow/api/routes/schedules.py",
78
+ "src/ddeutil/workflow/api/routes/workflows.py",
70
79
  "app.py",
71
80
  ]
72
81
 
@@ -89,7 +98,11 @@ addopts = [
89
98
  "--strict-config",
90
99
  "--strict-markers",
91
100
  ]
92
- filterwarnings = ["error"]
101
+ filterwarnings = [
102
+ "error",
103
+ "ignore::DeprecationWarning",
104
+ "ignore::pytest.PytestUnraisableExceptionWarning",
105
+ ]
93
106
  log_cli = true
94
107
  log_cli_level = "DEBUG"
95
108
  log_cli_format = "%(asctime)s [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)"
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.37"
@@ -37,9 +37,11 @@ from .exceptions import (
37
37
  )
38
38
  from .job import (
39
39
  Job,
40
+ RunsOn,
40
41
  Strategy,
41
42
  )
42
43
  from .logs import (
44
+ TraceData,
43
45
  TraceLog,
44
46
  get_dt_tznow,
45
47
  get_trace,
@@ -11,7 +11,11 @@ from datetime import datetime, timedelta
11
11
  from typing import TypedDict
12
12
 
13
13
  from dotenv import load_dotenv
14
- from fastapi import FastAPI
14
+ from fastapi import FastAPI, Request
15
+ from fastapi import status as st
16
+ from fastapi.encoders import jsonable_encoder
17
+ from fastapi.exceptions import RequestValidationError
18
+ from fastapi.middleware.cors import CORSMiddleware
15
19
  from fastapi.middleware.gzip import GZipMiddleware
16
20
  from fastapi.responses import UJSONResponse
17
21
 
@@ -20,10 +24,10 @@ from ..conf import config, get_logger
20
24
  from ..scheduler import ReleaseThread, ReleaseThreads
21
25
  from ..workflow import ReleaseQueue, WorkflowTask
22
26
  from .repeat import repeat_at
23
- from .routes import log
27
+ from .routes import job, log
24
28
 
25
29
  load_dotenv()
26
- logger = get_logger("ddeutil.workflow")
30
+ logger = get_logger("uvicorn.error")
27
31
 
28
32
 
29
33
  class State(TypedDict):
@@ -61,24 +65,38 @@ async def lifespan(a: FastAPI) -> AsyncIterator[State]:
61
65
 
62
66
 
63
67
  app = FastAPI(
64
- titile="Workflow API",
68
+ titile="Workflow",
65
69
  description=(
66
- "This is workflow FastAPI web application that use to manage manual "
67
- "execute or schedule workflow via RestAPI."
70
+ "This is a workflow FastAPI application that use to manage manual "
71
+ "execute, logging, and schedule workflow via RestAPI."
68
72
  ),
69
73
  version=__version__,
70
74
  lifespan=lifespan,
71
75
  default_response_class=UJSONResponse,
72
76
  )
73
77
  app.add_middleware(GZipMiddleware, minimum_size=1000)
78
+ origins: list[str] = [
79
+ "http://localhost",
80
+ "http://localhost:88",
81
+ "http://localhost:80",
82
+ ]
83
+ app.add_middleware(
84
+ CORSMiddleware,
85
+ allow_origins=origins,
86
+ allow_credentials=True,
87
+ allow_methods=["*"],
88
+ allow_headers=["*"],
89
+ )
74
90
 
75
91
 
76
92
  @app.get("/")
77
93
  async def health():
78
- return {"message": "Workflow API already start up"}
94
+ """Index view that not return any template without json status."""
95
+ return {"message": "Workflow already start up with healthy status."}
79
96
 
80
97
 
81
- # NOTE Add the logs route by default.
98
+ # NOTE Add the jobs and logs routes by default.
99
+ app.include_router(job, prefix=config.prefix_path)
82
100
  app.include_router(log, prefix=config.prefix_path)
83
101
 
84
102
 
@@ -111,12 +129,13 @@ if config.enable_route_schedule:
111
129
  stop=datetime.now(config.tz) + timedelta(minutes=1),
112
130
  queue=app.state.workflow_queue,
113
131
  threads=app.state.workflow_threads,
114
- log=get_audit(),
132
+ audit=get_audit(),
115
133
  )
116
134
 
117
135
  @schedule.on_event("startup")
118
136
  @repeat_at(cron="*/5 * * * *", delay=10)
119
137
  def monitoring():
138
+ """Monitoring workflow thread that running in the background."""
120
139
  logger.debug("[MONITOR]: Start monitoring threading.")
121
140
  snapshot_threads: list[str] = list(app.state.workflow_threads.keys())
122
141
  for t_name in snapshot_threads:
@@ -126,3 +145,25 @@ if config.enable_route_schedule:
126
145
  # NOTE: remove the thread that running success.
127
146
  if not thread_release["thread"].is_alive():
128
147
  app.state.workflow_threads.pop(t_name)
148
+
149
+
150
+ @app.exception_handler(RequestValidationError)
151
+ async def validation_exception_handler(
152
+ request: Request, exc: RequestValidationError
153
+ ):
154
+ _ = request
155
+ return UJSONResponse(
156
+ status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
157
+ content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
158
+ )
159
+
160
+
161
+ if __name__ == "__main__":
162
+ import uvicorn
163
+
164
+ uvicorn.run(
165
+ app,
166
+ host="0.0.0.0",
167
+ port=80,
168
+ log_level="DEBUG",
169
+ )
@@ -0,0 +1,59 @@
1
+ from ..conf import config
2
+
3
+ LOGGING_CONFIG = { # pragma: no cov
4
+ "version": 1,
5
+ "disable_existing_loggers": False,
6
+ "formatters": {
7
+ "standard": {
8
+ "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
9
+ },
10
+ "custom_formatter": {
11
+ "format": config.log_format,
12
+ "datefmt": config.log_datetime_format,
13
+ },
14
+ },
15
+ "root": {
16
+ "level": "DEBUG" if config.debug else "INFO",
17
+ },
18
+ "handlers": {
19
+ "default": {
20
+ "formatter": "standard",
21
+ "class": "logging.StreamHandler",
22
+ "stream": "ext://sys.stderr",
23
+ },
24
+ "stream_handler": {
25
+ "formatter": "custom_formatter",
26
+ "class": "logging.StreamHandler",
27
+ "stream": "ext://sys.stdout",
28
+ },
29
+ "file_handler": {
30
+ "formatter": "custom_formatter",
31
+ "class": "logging.handlers.RotatingFileHandler",
32
+ "filename": "logs/app.log",
33
+ "maxBytes": 1024 * 1024 * 1,
34
+ "backupCount": 3,
35
+ },
36
+ },
37
+ "loggers": {
38
+ "uvicorn": {
39
+ "handlers": ["default", "file_handler"],
40
+ "level": "DEBUG" if config.debug else "INFO",
41
+ "propagate": False,
42
+ },
43
+ "uvicorn.access": {
44
+ "handlers": ["stream_handler", "file_handler"],
45
+ "level": "DEBUG" if config.debug else "INFO",
46
+ "propagate": False,
47
+ },
48
+ "uvicorn.error": {
49
+ "handlers": ["stream_handler", "file_handler"],
50
+ "level": "DEBUG" if config.debug else "INFO",
51
+ "propagate": False,
52
+ },
53
+ # "uvicorn.asgi": {
54
+ # "handlers": ["stream_handler", "file_handler"],
55
+ # "level": "TRACE",
56
+ # "propagate": False,
57
+ # },
58
+ },
59
+ }
@@ -15,23 +15,32 @@ from starlette.concurrency import run_in_threadpool
15
15
  from ..__cron import CronJob
16
16
  from ..conf import config, get_logger
17
17
 
18
- logger = get_logger("ddeutil.workflow")
18
+ logger = get_logger("uvicorn.error")
19
19
 
20
20
 
21
21
  def get_cronjob_delta(cron: str) -> float:
22
22
  """This function returns the time delta between now and the next cron
23
23
  execution time.
24
+
25
+ :rtype: float
24
26
  """
25
27
  now: datetime = datetime.now(tz=config.tz)
26
28
  cron = CronJob(cron)
27
29
  return (cron.schedule(now).next - now).total_seconds()
28
30
 
29
31
 
30
- def cron_valid(cron: str):
32
+ def cron_valid(cron: str, raise_error: bool = True) -> bool:
33
+ """Check this crontab string value is valid with its cron syntax.
34
+
35
+ :rtype: bool
36
+ """
31
37
  try:
32
38
  CronJob(cron)
39
+ return True
33
40
  except Exception as err:
34
- raise ValueError(f"Crontab value does not valid, {cron}") from err
41
+ if raise_error:
42
+ raise ValueError(f"Crontab value does not valid, {cron}") from err
43
+ return False
35
44
 
36
45
 
37
46
  async def run_func(
@@ -41,6 +50,7 @@ async def run_func(
41
50
  raise_exceptions: bool = False,
42
51
  **kwargs,
43
52
  ):
53
+ """Run function inside the repeat decorator functions."""
44
54
  try:
45
55
  if is_coroutine:
46
56
  await func(*args, **kwargs)
@@ -62,11 +72,11 @@ def repeat_at(
62
72
  """This function returns a decorator that makes a function execute
63
73
  periodically as per the cron expression provided.
64
74
 
65
- :param cron: str
66
- Cron-style string for periodic execution, eg. '0 0 * * *' every midnight
67
- :param delay:
68
- :param raise_exceptions: bool (default False)
69
- Whether to raise exceptions or log them
75
+ :param cron: (str) A Cron-style string for periodic execution, e.g.
76
+ '0 0 * * *' every midnight
77
+ :param delay: (float) A delay seconds value.
78
+ :param raise_exceptions: (bool) A raise exception flag. Whether to raise
79
+ exceptions or log them if raise was set be false.
70
80
  :param max_repetitions: int (default None)
71
81
  Maximum number of times to repeat the function. If None, repeat
72
82
  indefinitely.
@@ -81,12 +91,12 @@ def repeat_at(
81
91
 
82
92
  @wraps(func)
83
93
  def wrapper(*_args, **_kwargs):
84
- repititions: int = 0
94
+ repetitions: int = 0
85
95
  cron_valid(cron)
86
96
 
87
97
  async def loop(*args, **kwargs):
88
- nonlocal repititions
89
- while max_repetitions is None or repititions < max_repetitions:
98
+ nonlocal repetitions
99
+ while max_repetitions is None or repetitions < max_repetitions:
90
100
  sleep_time = get_cronjob_delta(cron) + delay
91
101
  await asyncio.sleep(sleep_time)
92
102
  await run_func(
@@ -96,7 +106,7 @@ def repeat_at(
96
106
  raise_exceptions=raise_exceptions,
97
107
  **kwargs,
98
108
  )
99
- repititions += 1
109
+ repetitions += 1
100
110
 
101
111
  ensure_future(loop(*_args, **_kwargs))
102
112
 
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ from .job import job_route as job
6
7
  from .logs import log_route as log
7
8
  from .schedules import schedule_route as schedule
8
9
  from .workflows import workflow_route as workflow
@@ -0,0 +1,73 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from typing import Any, Optional
9
+
10
+ from fastapi import APIRouter
11
+ from fastapi.responses import UJSONResponse
12
+ from pydantic import BaseModel
13
+
14
+ from ...__types import DictData
15
+ from ...conf import get_logger
16
+ from ...exceptions import JobException
17
+ from ...job import Job
18
+ from ...result import Result
19
+
20
+ logger = get_logger("uvicorn.error")
21
+
22
+
23
+ job_route = APIRouter(
24
+ prefix="/job",
25
+ tags=["job"],
26
+ default_response_class=UJSONResponse,
27
+ )
28
+
29
+
30
+ class ResultPost(BaseModel):
31
+ context: DictData
32
+ run_id: str
33
+ parent_run_id: Optional[str] = None
34
+
35
+
36
+ @job_route.post(path="/execute/")
37
+ async def job_execute(
38
+ result: ResultPost,
39
+ job: Job,
40
+ params: dict[str, Any],
41
+ ):
42
+ """Execute job via API."""
43
+ rs: Result = Result(
44
+ context=result.context,
45
+ run_id=result.run_id,
46
+ parent_run_id=result.parent_run_id,
47
+ )
48
+ try:
49
+ job.set_outputs(
50
+ job.execute(
51
+ params=params,
52
+ run_id=rs.run_id,
53
+ parent_run_id=rs.parent_run_id,
54
+ ).context,
55
+ to=params,
56
+ )
57
+ except JobException as err:
58
+ rs.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
59
+
60
+ return {
61
+ "message": "Start execute job via API.",
62
+ "result": {
63
+ "run_id": rs.run_id,
64
+ "parent_run_id": rs.parent_run_id,
65
+ },
66
+ "job": job.model_dump(
67
+ by_alias=True,
68
+ exclude_none=True,
69
+ exclude_unset=True,
70
+ exclude_defaults=True,
71
+ ),
72
+ "params": params,
73
+ }