ddeutil-workflow 0.0.63__tar.gz → 0.0.65__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/PKG-INFO +17 -67
  2. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/README.md +15 -65
  3. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/pyproject.toml +3 -1
  4. ddeutil_workflow-0.0.65/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/__init__.py +1 -8
  6. ddeutil_workflow-0.0.65/src/ddeutil/workflow/api/__init__.py +91 -0
  7. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/api/routes/__init__.py +0 -1
  8. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/api/routes/job.py +2 -3
  9. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/api/routes/logs.py +0 -2
  10. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/api/routes/workflows.py +0 -3
  11. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/conf.py +6 -38
  12. ddeutil_workflow-0.0.63/src/ddeutil/workflow/exceptions.py → ddeutil_workflow-0.0.65/src/ddeutil/workflow/errors.py +47 -12
  13. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/job.py +249 -118
  14. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/params.py +11 -11
  15. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/result.py +86 -10
  16. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/reusables.py +54 -23
  17. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/stages.py +692 -464
  18. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/utils.py +37 -2
  19. ddeutil_workflow-0.0.65/src/ddeutil/workflow/workflow.py +767 -0
  20. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil_workflow.egg-info/PKG-INFO +17 -67
  21. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil_workflow.egg-info/SOURCES.txt +4 -16
  22. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil_workflow.egg-info/requires.txt +1 -1
  23. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_conf.py +13 -65
  24. ddeutil_workflow-0.0.65/tests/test_errors.py +10 -0
  25. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_job.py +21 -11
  26. ddeutil_workflow-0.0.65/tests/test_job_exec.py +506 -0
  27. ddeutil_workflow-0.0.65/tests/test_job_exec_strategy.py +182 -0
  28. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_params.py +17 -11
  29. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_result.py +16 -10
  30. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_reusables_call_tag.py +7 -76
  31. ddeutil_workflow-0.0.65/tests/test_reusables_func_model.py +158 -0
  32. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_reusables_template.py +12 -7
  33. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_reusables_template_filter.py +10 -10
  34. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_utils.py +2 -2
  35. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_workflow.py +36 -13
  36. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_workflow_exec.py +339 -198
  37. ddeutil_workflow-0.0.65/tests/test_workflow_exec_job.py +78 -0
  38. ddeutil_workflow-0.0.65/tests/test_workflow_release.py +100 -0
  39. ddeutil_workflow-0.0.63/src/ddeutil/workflow/__about__.py +0 -1
  40. ddeutil_workflow-0.0.63/src/ddeutil/workflow/api/__init__.py +0 -170
  41. ddeutil_workflow-0.0.63/src/ddeutil/workflow/api/routes/schedules.py +0 -141
  42. ddeutil_workflow-0.0.63/src/ddeutil/workflow/api/utils.py +0 -174
  43. ddeutil_workflow-0.0.63/src/ddeutil/workflow/scheduler.py +0 -813
  44. ddeutil_workflow-0.0.63/src/ddeutil/workflow/workflow.py +0 -1268
  45. ddeutil_workflow-0.0.63/tests/test_job_exec.py +0 -315
  46. ddeutil_workflow-0.0.63/tests/test_job_exec_strategy.py +0 -143
  47. ddeutil_workflow-0.0.63/tests/test_release.py +0 -49
  48. ddeutil_workflow-0.0.63/tests/test_release_queue.py +0 -70
  49. ddeutil_workflow-0.0.63/tests/test_schedule.py +0 -173
  50. ddeutil_workflow-0.0.63/tests/test_schedule_pending.py +0 -13
  51. ddeutil_workflow-0.0.63/tests/test_schedule_tasks.py +0 -82
  52. ddeutil_workflow-0.0.63/tests/test_schedule_workflow.py +0 -124
  53. ddeutil_workflow-0.0.63/tests/test_scheduler_control.py +0 -49
  54. ddeutil_workflow-0.0.63/tests/test_stage.py +0 -112
  55. ddeutil_workflow-0.0.63/tests/test_stage_handler_exec.py +0 -979
  56. ddeutil_workflow-0.0.63/tests/test_workflow_exec_job.py +0 -61
  57. ddeutil_workflow-0.0.63/tests/test_workflow_poke.py +0 -168
  58. ddeutil_workflow-0.0.63/tests/test_workflow_release.py +0 -153
  59. ddeutil_workflow-0.0.63/tests/test_workflow_task.py +0 -223
  60. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/LICENSE +0 -0
  61. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/setup.cfg +0 -0
  62. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/__cron.py +0 -0
  63. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/__main__.py +0 -0
  64. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/__types.py +0 -0
  65. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/api/logs.py +0 -0
  66. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/event.py +0 -0
  67. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil/workflow/logs.py +0 -0
  68. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  69. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
  70. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  71. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test__cron.py +0 -0
  72. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test__regex.py +0 -0
  73. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_event.py +0 -0
  74. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_logs_audit.py +0 -0
  75. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_logs_trace.py +0 -0
  76. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.65}/tests/test_strategy.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.63
3
+ Version: 0.0.65
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -23,7 +23,7 @@ Requires-Python: >=3.9.13
23
23
  Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
25
  Requires-Dist: ddeutil[checksum]>=0.4.8
26
- Requires-Dist: ddeutil-io[toml,yaml]>=0.2.13
26
+ Requires-Dist: ddeutil-io[toml,yaml]>=0.2.14
27
27
  Requires-Dist: pydantic==2.11.4
28
28
  Requires-Dist: pydantic-extra-types==2.10.4
29
29
  Requires-Dist: python-dotenv==1.1.0
@@ -221,33 +221,36 @@ value (This config can override by extra parameters with `registry_caller` key).
221
221
  > engine will auto use the `model_validate` method before run your caller function.
222
222
 
223
223
  ```python
224
- from ddeutil.workflow import Result, WorkflowSecret, tag
225
- from ddeutil.workflow.exceptions import StageException
224
+ from ddeutil.workflow import Result, CallerSecret, tag
225
+ from ddeutil.workflow.errors import StageError
226
226
  from pydantic import BaseModel
227
227
 
228
+
228
229
  class AwsCredential(BaseModel):
229
230
  path: str
230
231
  access_client_id: str
231
- access_client_secret: WorkflowSecret
232
+ access_client_secret: CallerSecret
233
+
232
234
 
233
235
  class RestAuth(BaseModel):
234
236
  type: str
235
- keys: WorkflowSecret
237
+ keys: CallerSecret
238
+
236
239
 
237
240
  @tag("requests", alias="get-api-with-oauth-to-s3")
238
241
  def get_api_with_oauth_to_s3(
239
- method: str,
240
- url: str,
241
- body: dict[str, str],
242
- auth: RestAuth,
243
- writing_node: str,
244
- aws: AwsCredential,
245
- result: Result,
242
+ method: str,
243
+ url: str,
244
+ body: dict[str, str],
245
+ auth: RestAuth,
246
+ writing_node: str,
247
+ aws: AwsCredential,
248
+ result: Result,
246
249
  ) -> dict[str, int]:
247
250
  result.trace.info("[CALLER]: Start get data via RestAPI to S3.")
248
251
  result.trace.info(f"... {method}: {url}")
249
252
  if method != "post":
250
- raise StageException(f"RestAPI does not support for {method} action.")
253
+ raise StageError(f"RestAPI does not support for {method} action.")
251
254
  # NOTE: If you want to use secret, you can use `auth.keys.get_secret_value()`.
252
255
  return {"records": 1000}
253
256
  ```
@@ -265,45 +268,6 @@ result: Result = workflow.execute(
265
268
  )
266
269
  ```
267
270
 
268
- > [!NOTE]
269
- > So, this package provide the `Schedule` template for this action, and you can
270
- > pass the parameters dynamically for changing align with that running time by
271
- > the `release` prefix.
272
- >
273
- > ```yaml
274
- > schedule-run-local-wf:
275
- >
276
- > # Validate model that use to parsing exists for template file
277
- > type: Schedule
278
- > workflows:
279
- >
280
- > # Map existing workflow that want to deploy with scheduler application.
281
- > # It allows you to pass release parameter that dynamic change depend on the
282
- > # current context of this scheduler application releasing that time.
283
- > - name: run-py-local
284
- > params:
285
- > source-extract: "USD-THB"
286
- > run-date: "${{ release.logical_date }}"
287
- > ```
288
- >
289
- > The main method of the `Schedule` model that use to running is `pending`. If you
290
- > do not pass the `stop` date on this method, it will use config with
291
- > `WORKFLOW_APP_STOP_BOUNDARY_DELTA` key for generate this stop date.
292
- >
293
- > ```python
294
- > from ddeutil.workflow import Schedule
295
- >
296
- > (
297
- > Schedule
298
- > .from_conf("schedule-run-local-wf")
299
- > .pending(stop=None)
300
- > )
301
- > ```
302
-
303
- > [!WARNING]
304
- > The scheduler feature is the expensive feature of this project. You should
305
- > avoid to use it and find a scheduler tool instead.
306
-
307
271
  ## :cookie: Configuration
308
272
 
309
273
  The main configuration that use to dynamic changing this workflow engine for your
@@ -321,7 +285,6 @@ it will use default value and do not raise any error to you.
321
285
  | **CONF_PATH** | Core | `./conf` | The config path that keep all template `.yaml` files. |
322
286
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
323
287
  | **STAGE_DEFAULT_ID** | Core | `false` | A flag that enable default stage ID that use for catch an execution output. |
324
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
325
288
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
326
289
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
327
290
  | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
@@ -333,19 +296,6 @@ it will use default value and do not raise any error to you.
333
296
  | **TRACE_ENABLE_WRITE** | Log | `false` | |
334
297
  | **AUDIT_PATH** | Log | `./audits` | |
335
298
  | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
336
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
337
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
338
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
339
-
340
- **API Application**:
341
-
342
- This config part use for the workflow application that build from the FastAPI
343
- only.
344
-
345
- | Environment | Component | Default | Description |
346
- |:---------------------------|:-----------:|---------|------------------------------------------------------------------------------------|
347
- | **ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. |
348
- | **ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. |
349
299
 
350
300
  ## :rocket: Deployment
351
301
 
@@ -170,33 +170,36 @@ value (This config can override by extra parameters with `registry_caller` key).
170
170
  > engine will auto use the `model_validate` method before run your caller function.
171
171
 
172
172
  ```python
173
- from ddeutil.workflow import Result, WorkflowSecret, tag
174
- from ddeutil.workflow.exceptions import StageException
173
+ from ddeutil.workflow import Result, CallerSecret, tag
174
+ from ddeutil.workflow.errors import StageError
175
175
  from pydantic import BaseModel
176
176
 
177
+
177
178
  class AwsCredential(BaseModel):
178
179
  path: str
179
180
  access_client_id: str
180
- access_client_secret: WorkflowSecret
181
+ access_client_secret: CallerSecret
182
+
181
183
 
182
184
  class RestAuth(BaseModel):
183
185
  type: str
184
- keys: WorkflowSecret
186
+ keys: CallerSecret
187
+
185
188
 
186
189
  @tag("requests", alias="get-api-with-oauth-to-s3")
187
190
  def get_api_with_oauth_to_s3(
188
- method: str,
189
- url: str,
190
- body: dict[str, str],
191
- auth: RestAuth,
192
- writing_node: str,
193
- aws: AwsCredential,
194
- result: Result,
191
+ method: str,
192
+ url: str,
193
+ body: dict[str, str],
194
+ auth: RestAuth,
195
+ writing_node: str,
196
+ aws: AwsCredential,
197
+ result: Result,
195
198
  ) -> dict[str, int]:
196
199
  result.trace.info("[CALLER]: Start get data via RestAPI to S3.")
197
200
  result.trace.info(f"... {method}: {url}")
198
201
  if method != "post":
199
- raise StageException(f"RestAPI does not support for {method} action.")
202
+ raise StageError(f"RestAPI does not support for {method} action.")
200
203
  # NOTE: If you want to use secret, you can use `auth.keys.get_secret_value()`.
201
204
  return {"records": 1000}
202
205
  ```
@@ -214,45 +217,6 @@ result: Result = workflow.execute(
214
217
  )
215
218
  ```
216
219
 
217
- > [!NOTE]
218
- > So, this package provide the `Schedule` template for this action, and you can
219
- > pass the parameters dynamically for changing align with that running time by
220
- > the `release` prefix.
221
- >
222
- > ```yaml
223
- > schedule-run-local-wf:
224
- >
225
- > # Validate model that use to parsing exists for template file
226
- > type: Schedule
227
- > workflows:
228
- >
229
- > # Map existing workflow that want to deploy with scheduler application.
230
- > # It allows you to pass release parameter that dynamic change depend on the
231
- > # current context of this scheduler application releasing that time.
232
- > - name: run-py-local
233
- > params:
234
- > source-extract: "USD-THB"
235
- > run-date: "${{ release.logical_date }}"
236
- > ```
237
- >
238
- > The main method of the `Schedule` model that use to running is `pending`. If you
239
- > do not pass the `stop` date on this method, it will use config with
240
- > `WORKFLOW_APP_STOP_BOUNDARY_DELTA` key for generate this stop date.
241
- >
242
- > ```python
243
- > from ddeutil.workflow import Schedule
244
- >
245
- > (
246
- > Schedule
247
- > .from_conf("schedule-run-local-wf")
248
- > .pending(stop=None)
249
- > )
250
- > ```
251
-
252
- > [!WARNING]
253
- > The scheduler feature is the expensive feature of this project. You should
254
- > avoid to use it and find a scheduler tool instead.
255
-
256
220
  ## :cookie: Configuration
257
221
 
258
222
  The main configuration that use to dynamic changing this workflow engine for your
@@ -270,7 +234,6 @@ it will use default value and do not raise any error to you.
270
234
  | **CONF_PATH** | Core | `./conf` | The config path that keep all template `.yaml` files. |
271
235
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
272
236
  | **STAGE_DEFAULT_ID** | Core | `false` | A flag that enable default stage ID that use for catch an execution output. |
273
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
274
237
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
275
238
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
276
239
  | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
@@ -282,19 +245,6 @@ it will use default value and do not raise any error to you.
282
245
  | **TRACE_ENABLE_WRITE** | Log | `false` | |
283
246
  | **AUDIT_PATH** | Log | `./audits` | |
284
247
  | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
285
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
286
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
287
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
288
-
289
- **API Application**:
290
-
291
- This config part use for the workflow application that build from the FastAPI
292
- only.
293
-
294
- | Environment | Component | Default | Description |
295
- |:---------------------------|:-----------:|---------|------------------------------------------------------------------------------------|
296
- | **ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. |
297
- | **ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. |
298
248
 
299
249
  ## :rocket: Deployment
300
250
 
@@ -26,7 +26,7 @@ classifiers = [
26
26
  requires-python = ">=3.9.13"
27
27
  dependencies = [
28
28
  "ddeutil[checksum]>=0.4.8",
29
- "ddeutil-io[yaml,toml]>=0.2.13",
29
+ "ddeutil-io[yaml,toml]>=0.2.14",
30
30
  "pydantic==2.11.4",
31
31
  "pydantic-extra-types==2.10.4",
32
32
  "python-dotenv==1.1.0",
@@ -121,6 +121,8 @@ console_output_style = "count"
121
121
  addopts = [
122
122
  "--strict-config",
123
123
  "--strict-markers",
124
+ # "-p no:launch",
125
+ # "-p no:launch_ros",
124
126
  ]
125
127
  filterwarnings = [
126
128
  "error",
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.65"
@@ -6,8 +6,8 @@
6
6
  from .__cron import CronJob, CronRunner
7
7
  from .__types import DictData, DictStr, Matrix, Re, TupleStr
8
8
  from .conf import *
9
+ from .errors import *
9
10
  from .event import *
10
- from .exceptions import *
11
11
  from .job import *
12
12
  from .logs import (
13
13
  Audit,
@@ -33,13 +33,6 @@ from .result import (
33
33
  Status,
34
34
  )
35
35
  from .reusables import *
36
- from .scheduler import (
37
- Schedule,
38
- ScheduleWorkflow,
39
- schedule_control,
40
- schedule_runner,
41
- schedule_task,
42
- )
43
36
  from .stages import *
44
37
  from .utils import *
45
38
  from .workflow import *
@@ -0,0 +1,91 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import contextlib
9
+ from collections.abc import AsyncIterator
10
+
11
+ from dotenv import load_dotenv
12
+ from fastapi import FastAPI, Request
13
+ from fastapi import status as st
14
+ from fastapi.encoders import jsonable_encoder
15
+ from fastapi.exceptions import RequestValidationError
16
+ from fastapi.middleware.cors import CORSMiddleware
17
+ from fastapi.middleware.gzip import GZipMiddleware
18
+ from fastapi.responses import UJSONResponse
19
+
20
+ from ..__about__ import __version__
21
+ from ..conf import api_config
22
+ from ..logs import get_logger
23
+ from .routes import job, log, workflow
24
+
25
+ load_dotenv()
26
+ logger = get_logger("uvicorn.error")
27
+
28
+
29
+ @contextlib.asynccontextmanager
30
+ async def lifespan(_: FastAPI) -> AsyncIterator[dict[str, list]]:
31
+ """Lifespan function for the FastAPI application."""
32
+ yield {}
33
+
34
+
35
+ app = FastAPI(
36
+ titile="Workflow",
37
+ description=(
38
+ "This is a workflow FastAPI application that use to manage manual "
39
+ "execute, logging, and schedule workflow via RestAPI."
40
+ ),
41
+ version=__version__,
42
+ lifespan=lifespan,
43
+ default_response_class=UJSONResponse,
44
+ )
45
+ app.add_middleware(GZipMiddleware, minimum_size=1000)
46
+ origins: list[str] = [
47
+ "http://localhost",
48
+ "http://localhost:88",
49
+ "http://localhost:80",
50
+ ]
51
+ app.add_middleware(
52
+ CORSMiddleware,
53
+ allow_origins=origins,
54
+ allow_credentials=True,
55
+ allow_methods=["*"],
56
+ allow_headers=["*"],
57
+ )
58
+
59
+
60
+ @app.get(path="/", response_class=UJSONResponse)
61
+ async def health():
62
+ """Index view that not return any template without json status."""
63
+ return {"message": "Workflow already start up with healthy status."}
64
+
65
+
66
+ # NOTE Add the jobs and logs routes by default.
67
+ app.include_router(job, prefix=api_config.prefix_path)
68
+ app.include_router(log, prefix=api_config.prefix_path)
69
+ app.include_router(workflow, prefix=api_config.prefix_path)
70
+
71
+
72
+ @app.exception_handler(RequestValidationError)
73
+ async def validation_exception_handler(
74
+ request: Request, exc: RequestValidationError
75
+ ):
76
+ _ = request
77
+ return UJSONResponse(
78
+ status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
79
+ content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
80
+ )
81
+
82
+
83
+ if __name__ == "__main__":
84
+ import uvicorn
85
+
86
+ uvicorn.run(
87
+ app,
88
+ host="0.0.0.0",
89
+ port=80,
90
+ log_level="DEBUG",
91
+ )
@@ -5,5 +5,4 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from .job import job_route as job
7
7
  from .logs import log_route as log
8
- from .schedules import schedule_route as schedule
9
8
  from .workflows import workflow_route as workflow
@@ -12,7 +12,7 @@ from fastapi.responses import UJSONResponse
12
12
  from pydantic import BaseModel, Field
13
13
 
14
14
  from ...__types import DictData
15
- from ...exceptions import JobException
15
+ from ...errors import JobError
16
16
  from ...job import Job
17
17
  from ...logs import get_logger
18
18
  from ...result import Result
@@ -59,7 +59,7 @@ async def job_execute(
59
59
  ).context,
60
60
  to=context,
61
61
  )
62
- except JobException as err:
62
+ except JobError as err:
63
63
  rs.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
64
64
 
65
65
  return {
@@ -69,7 +69,6 @@ async def job_execute(
69
69
  by_alias=True,
70
70
  exclude_none=False,
71
71
  exclude_unset=True,
72
- exclude_defaults=True,
73
72
  ),
74
73
  "params": params,
75
74
  "context": context,
@@ -44,7 +44,6 @@ async def get_traces(
44
44
  by_alias=True,
45
45
  exclude_none=True,
46
46
  exclude_unset=True,
47
- exclude_defaults=True,
48
47
  )
49
48
  for trace in result.trace.find_traces()
50
49
  ],
@@ -73,7 +72,6 @@ async def get_trace_with_id(run_id: str):
73
72
  by_alias=True,
74
73
  exclude_none=True,
75
74
  exclude_unset=True,
76
- exclude_defaults=True,
77
75
  )
78
76
  ),
79
77
  }
@@ -56,7 +56,6 @@ async def get_workflow_by_name(name: str) -> DictData:
56
56
  by_alias=True,
57
57
  exclude_none=False,
58
58
  exclude_unset=True,
59
- exclude_defaults=True,
60
59
  )
61
60
 
62
61
 
@@ -99,7 +98,6 @@ async def get_workflow_audits(name: str):
99
98
  by_alias=True,
100
99
  exclude_none=False,
101
100
  exclude_unset=True,
102
- exclude_defaults=True,
103
101
  )
104
102
  for audit in get_audit().find_audits(name=name)
105
103
  ],
@@ -133,6 +131,5 @@ async def get_workflow_release_audit(name: str, release: str):
133
131
  by_alias=True,
134
132
  exclude_none=False,
135
133
  exclude_unset=True,
136
- exclude_defaults=True,
137
134
  ),
138
135
  }
@@ -6,11 +6,9 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import copy
9
- import json
10
9
  import os
11
10
  from abc import ABC, abstractmethod
12
11
  from collections.abc import Iterator
13
- from datetime import timedelta
14
12
  from functools import cached_property
15
13
  from inspect import isclass
16
14
  from pathlib import Path
@@ -20,7 +18,7 @@ from zoneinfo import ZoneInfo
20
18
  from ddeutil.core import str2bool
21
19
  from ddeutil.io import YamlFlResolve, search_env_replace
22
20
  from ddeutil.io.paths import glob_files, is_ignored, read_ignore
23
- from pydantic import SecretStr
21
+ from pydantic import SecretStr, TypeAdapter
24
22
 
25
23
  from .__types import DictData
26
24
 
@@ -143,10 +141,6 @@ class Config: # pragma: no cov
143
141
  def log_datetime_format(self) -> str:
144
142
  return env("LOG_DATETIME_FORMAT", "%Y-%m-%d %H:%M:%S")
145
143
 
146
- @property
147
- def stage_raise_error(self) -> bool:
148
- return str2bool(env("CORE_STAGE_RAISE_ERROR", "false"))
149
-
150
144
  @property
151
145
  def stage_default_id(self) -> bool:
152
146
  return str2bool(env("CORE_STAGE_DEFAULT_ID", "false"))
@@ -163,28 +157,6 @@ class Config: # pragma: no cov
163
157
  def max_queue_complete_hist(self) -> int:
164
158
  return int(env("CORE_MAX_QUEUE_COMPLETE_HIST", "16"))
165
159
 
166
- # NOTE: App
167
- @property
168
- def max_schedule_process(self) -> int:
169
- return int(env("APP_MAX_PROCESS", "2"))
170
-
171
- @property
172
- def max_schedule_per_process(self) -> int:
173
- return int(env("APP_MAX_SCHEDULE_PER_PROCESS", "100"))
174
-
175
- @property
176
- def stop_boundary_delta(self) -> timedelta:
177
- stop_boundary_delta_str: str = env(
178
- "APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
179
- )
180
- try:
181
- return timedelta(**json.loads(stop_boundary_delta_str))
182
- except Exception as err:
183
- raise ValueError(
184
- "Config `WORKFLOW_APP_STOP_BOUNDARY_DELTA` can not parsing to"
185
- f"timedelta with {stop_boundary_delta_str}."
186
- ) from err
187
-
188
160
 
189
161
  class APIConfig:
190
162
  """API Config object."""
@@ -193,14 +165,6 @@ class APIConfig:
193
165
  def prefix_path(self) -> str:
194
166
  return env("API_PREFIX_PATH", "/api/v1")
195
167
 
196
- @property
197
- def enable_route_workflow(self) -> bool:
198
- return str2bool(env("API_ENABLE_ROUTE_WORKFLOW", "true"))
199
-
200
- @property
201
- def enable_route_schedule(self) -> bool:
202
- return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
203
-
204
168
 
205
169
  class BaseLoad(ABC): # pragma: no cov
206
170
  """Base Load object is the abstraction object for any Load object that
@@ -496,7 +460,7 @@ def pass_env(value: T) -> T: # pragma: no cov
496
460
  return None if rs == "null" else rs
497
461
 
498
462
 
499
- class WorkflowSecret(SecretStr): # pragma: no cov
463
+ class CallerSecret(SecretStr): # pragma: no cov
500
464
  """Workflow Secret String model."""
501
465
 
502
466
  def get_secret_value(self) -> str:
@@ -506,3 +470,7 @@ class WorkflowSecret(SecretStr): # pragma: no cov
506
470
  :rtype: str
507
471
  """
508
472
  return pass_env(super().get_secret_value())
473
+
474
+
475
+ # NOTE: Define the caller secret type for use it directly in the caller func.
476
+ CallerSecretType = TypeAdapter(CallerSecret)
@@ -11,6 +11,8 @@ from __future__ import annotations
11
11
 
12
12
  from typing import Literal, Optional, TypedDict, Union, overload
13
13
 
14
+ from .__types import DictData, StrOrInt
15
+
14
16
 
15
17
  class ErrorData(TypedDict):
16
18
  """Error data type dict for typing necessary keys of return of to_dict func
@@ -21,7 +23,7 @@ class ErrorData(TypedDict):
21
23
  message: str
22
24
 
23
25
 
24
- def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
26
+ def to_dict(exception: Exception, **kwargs) -> ErrorData: # pragma: no cov
25
27
  """Create dict data from exception instance.
26
28
 
27
29
  :param exception: An exception object.
@@ -31,17 +33,27 @@ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
31
33
  return {
32
34
  "name": exception.__class__.__name__,
33
35
  "message": str(exception),
36
+ **kwargs,
34
37
  }
35
38
 
36
39
 
37
- class BaseWorkflowException(Exception):
40
+ class BaseError(Exception):
38
41
  """Base Workflow exception class will implement the `refs` argument for
39
42
  making an error context to the result context.
40
43
  """
41
44
 
42
- def __init__(self, message: str, *, refs: Optional[str] = None):
45
+ def __init__(
46
+ self,
47
+ message: str,
48
+ *,
49
+ refs: Optional[StrOrInt] = None,
50
+ context: Optional[DictData] = None,
51
+ params: Optional[DictData] = None,
52
+ ) -> None:
43
53
  super().__init__(message)
44
54
  self.refs: Optional[str] = refs
55
+ self.context: DictData = context or {}
56
+ self.params: DictData = params or {}
45
57
 
46
58
  @overload
47
59
  def to_dict(
@@ -54,7 +66,9 @@ class BaseWorkflowException(Exception):
54
66
  ) -> ErrorData: ... # pragma: no cov
55
67
 
56
68
  def to_dict(
57
- self, with_refs: bool = False
69
+ self,
70
+ with_refs: bool = False,
71
+ **kwargs,
58
72
  ) -> Union[ErrorData, dict[str, ErrorData]]:
59
73
  """Return ErrorData data from the current exception object. If with_refs
60
74
  flag was set, it will return mapping of refs and itself data.
@@ -64,25 +78,46 @@ class BaseWorkflowException(Exception):
64
78
  data: ErrorData = to_dict(self)
65
79
  if with_refs and (self.refs is not None and self.refs != "EMPTY"):
66
80
  return {self.refs: data}
67
- return data
81
+ return data | kwargs
82
+
83
+
84
+ class UtilError(BaseError): ...
85
+
86
+
87
+ class ResultError(UtilError): ...
88
+
89
+
90
+ class StageError(BaseError): ...
91
+
92
+
93
+ class StageRetryError(StageError): ...
94
+
95
+
96
+ class StageCancelError(StageError): ...
97
+
98
+
99
+ class StageSkipError(StageError): ...
100
+
101
+
102
+ class JobError(BaseError): ...
68
103
 
69
104
 
70
- class UtilException(BaseWorkflowException): ...
105
+ class JobCancelError(JobError): ...
71
106
 
72
107
 
73
- class ResultException(UtilException): ...
108
+ class JobSkipError(JobError): ...
74
109
 
75
110
 
76
- class StageException(BaseWorkflowException): ...
111
+ class WorkflowError(BaseError): ...
77
112
 
78
113
 
79
- class JobException(BaseWorkflowException): ...
114
+ class WorkflowCancelError(WorkflowError): ...
80
115
 
81
116
 
82
- class WorkflowException(BaseWorkflowException): ...
117
+ class WorkflowSkipError(WorkflowError): ...
83
118
 
84
119
 
85
- class ParamValueException(WorkflowException): ...
120
+ class WorkflowTimeoutError(WorkflowError): ...
86
121
 
87
122
 
88
- class ScheduleException(BaseWorkflowException): ...
123
+ class ParamError(WorkflowError): ...