ddeutil-workflow 0.0.42__tar.gz → 0.0.44__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/PKG-INFO +29 -29
  2. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/README.md +27 -27
  3. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/pyproject.toml +2 -1
  4. ddeutil_workflow-0.0.44/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/__init__.py +5 -1
  6. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/exceptions.py +13 -3
  7. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/job.py +85 -80
  8. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/params.py +77 -18
  9. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/result.py +36 -8
  10. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/scheduler.py +6 -9
  11. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/stages.py +60 -86
  12. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/workflow.py +82 -104
  13. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil_workflow.egg-info/PKG-INFO +29 -29
  14. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil_workflow.egg-info/requires.txt +1 -1
  15. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_job.py +3 -3
  16. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_job_exec_strategy.py +8 -3
  17. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_params.py +27 -0
  18. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_stage.py +0 -10
  19. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_stage_handler_exec.py +78 -11
  20. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_workflow_exec.py +168 -2
  21. ddeutil_workflow-0.0.42/src/ddeutil/workflow/__about__.py +0 -1
  22. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/LICENSE +0 -0
  23. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/setup.cfg +0 -0
  24. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/__cron.py +0 -0
  25. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/__types.py +0 -0
  26. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/__init__.py +0 -0
  27. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/api.py +0 -0
  28. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/log.py +0 -0
  29. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/repeat.py +0 -0
  30. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  31. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/routes/job.py +0 -0
  32. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/routes/logs.py +0 -0
  33. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/routes/schedules.py +0 -0
  34. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
  35. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/conf.py +0 -0
  36. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/cron.py +0 -0
  37. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/logs.py +0 -0
  38. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/reusables.py +0 -0
  39. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil/workflow/utils.py +0 -0
  40. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
  41. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  42. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  43. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test__cron.py +0 -0
  44. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test__regex.py +0 -0
  45. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_conf.py +0 -0
  46. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_cron_on.py +0 -0
  47. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_job_exec.py +0 -0
  48. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_job_strategy.py +0 -0
  49. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_logs_audit.py +0 -0
  50. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_logs_trace.py +0 -0
  51. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_release.py +0 -0
  52. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_release_queue.py +0 -0
  53. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_result.py +0 -0
  54. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_reusables_call_tag.py +0 -0
  55. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_reusables_template.py +0 -0
  56. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_reusables_template_filter.py +0 -0
  57. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_schedule.py +0 -0
  58. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_schedule_pending.py +0 -0
  59. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_schedule_tasks.py +0 -0
  60. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_schedule_workflow.py +0 -0
  61. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_scheduler_control.py +0 -0
  62. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_utils.py +0 -0
  63. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_workflow.py +0 -0
  64. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_workflow_exec_job.py +0 -0
  65. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_workflow_exec_poke.py +0 -0
  66. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_workflow_exec_release.py +0 -0
  67. {ddeutil_workflow-0.0.42 → ddeutil_workflow-0.0.44}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.42
3
+ Version: 0.0.44
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -22,7 +22,7 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Requires-Python: >=3.9.13
23
23
  Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
- Requires-Dist: ddeutil>=0.4.6
25
+ Requires-Dist: ddeutil[checksum]>=0.4.6
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.10
27
27
  Requires-Dist: pydantic==2.11.1
28
28
  Requires-Dist: python-dotenv==1.1.0
@@ -212,7 +212,7 @@ execution time such as `run-date` should change base on that workflow running da
212
212
  ```python
213
213
  from ddeutil.workflow import Workflow, Result
214
214
 
215
- workflow: Workflow = Workflow.from_loader('run-py-local')
215
+ workflow: Workflow = Workflow.from_conf('run-py-local')
216
216
  result: Result = workflow.execute(
217
217
  params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
218
218
  )
@@ -246,7 +246,7 @@ from ddeutil.workflow import Schedule
246
246
 
247
247
  (
248
248
  Schedule
249
- .from_loader("schedule-run-local-wf")
249
+ .from_conf("schedule-run-local-wf")
250
250
  .pending(stop=None)
251
251
  )
252
252
  ```
@@ -261,31 +261,31 @@ it will use default value and do not raise any error to you.
261
261
  > The config value that you will set on the environment should combine with
262
262
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
263
263
 
264
- | Name | Component | Default | Description |
265
- |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
266
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
267
- | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
268
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
269
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
270
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
271
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
272
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
273
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
275
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
276
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
277
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
278
- | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
279
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
280
- | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
281
- | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
282
- | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
283
- | **TRACE_ENABLE_WRITE** | Log | `false` | |
284
- | **AUDIT_PATH** | Log | `./audits` | |
285
- | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
286
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
287
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
288
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
264
+ | Name | Component | Default | Override | Description |
265
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
266
+ | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
267
+ | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
268
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
269
+ | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
270
+ | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
271
+ | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
272
+ | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
273
+ | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
+ | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
275
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
276
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
277
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
278
+ | **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
279
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
280
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
281
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
282
+ | **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
283
+ | **TRACE_ENABLE_WRITE** | Log | `false` | No | |
284
+ | **AUDIT_PATH** | Log | `./audits` | No | |
285
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
286
+ | **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
287
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
288
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
289
289
 
290
290
  **API Application**:
291
291
 
@@ -168,7 +168,7 @@ execution time such as `run-date` should change base on that workflow running da
168
168
  ```python
169
169
  from ddeutil.workflow import Workflow, Result
170
170
 
171
- workflow: Workflow = Workflow.from_loader('run-py-local')
171
+ workflow: Workflow = Workflow.from_conf('run-py-local')
172
172
  result: Result = workflow.execute(
173
173
  params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
174
174
  )
@@ -202,7 +202,7 @@ from ddeutil.workflow import Schedule
202
202
 
203
203
  (
204
204
  Schedule
205
- .from_loader("schedule-run-local-wf")
205
+ .from_conf("schedule-run-local-wf")
206
206
  .pending(stop=None)
207
207
  )
208
208
  ```
@@ -217,31 +217,31 @@ it will use default value and do not raise any error to you.
217
217
  > The config value that you will set on the environment should combine with
218
218
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
219
219
 
220
- | Name | Component | Default | Description |
221
- |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
222
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
223
- | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
224
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
225
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
226
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
227
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
228
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
229
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
230
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
231
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
232
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
233
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
234
- | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
235
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
236
- | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
237
- | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
238
- | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
239
- | **TRACE_ENABLE_WRITE** | Log | `false` | |
240
- | **AUDIT_PATH** | Log | `./audits` | |
241
- | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
242
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
243
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
244
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
220
+ | Name | Component | Default | Override | Description |
221
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
222
+ | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
223
+ | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
224
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
225
+ | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
226
+ | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
227
+ | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
228
+ | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
229
+ | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
230
+ | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
231
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
232
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
233
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
234
+ | **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
235
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
236
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
237
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
238
+ | **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
239
+ | **TRACE_ENABLE_WRITE** | Log | `false` | No | |
240
+ | **AUDIT_PATH** | Log | `./audits` | No | |
241
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
242
+ | **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
243
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
244
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
245
245
 
246
246
  **API Application**:
247
247
 
@@ -26,7 +26,7 @@ classifiers = [
26
26
  ]
27
27
  requires-python = ">=3.9.13"
28
28
  dependencies = [
29
- "ddeutil>=0.4.6",
29
+ "ddeutil[checksum]>=0.4.6",
30
30
  "ddeutil-io[yaml,toml]>=0.2.10",
31
31
  "pydantic==2.11.1",
32
32
  "python-dotenv==1.1.0",
@@ -99,6 +99,7 @@ exclude_lines = [
99
99
 
100
100
  [tool.pytest.ini_options]
101
101
  pythonpath = ["src"]
102
+ asyncio_default_fixture_loop_scope = "fuction"
102
103
  # NOTE: You can deslect multiple markers by '-m "not (poke or api)"'
103
104
  markers = [
104
105
  "poke: marks tests as slow by poking (deselect with '-m \"not poke\"')",
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.44"
@@ -4,7 +4,7 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
- from .__types import Re
7
+ from .__types import DictData, DictStr, Matrix, Re, TupleStr
8
8
  from .conf import (
9
9
  Config,
10
10
  Loader,
@@ -47,6 +47,10 @@ from .params import (
47
47
  StrParam,
48
48
  )
49
49
  from .result import (
50
+ FAILED,
51
+ SKIP,
52
+ SUCCESS,
53
+ WAIT,
50
54
  Result,
51
55
  Status,
52
56
  )
@@ -9,10 +9,20 @@ annotate for handle error only.
9
9
  """
10
10
  from __future__ import annotations
11
11
 
12
- from typing import Any
12
+ from typing import TypedDict
13
13
 
14
+ ErrorData = TypedDict(
15
+ "ErrorData",
16
+ {
17
+ "class": Exception,
18
+ "name": str,
19
+ "message": str,
20
+ },
21
+ )
14
22
 
15
- def to_dict(exception: Exception) -> dict[str, Any]: # pragma: no cov
23
+
24
+ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
25
+ """Create dict data from exception instance."""
16
26
  return {
17
27
  "class": exception,
18
28
  "name": exception.__class__.__name__,
@@ -22,7 +32,7 @@ def to_dict(exception: Exception) -> dict[str, Any]: # pragma: no cov
22
32
 
23
33
  class BaseWorkflowException(Exception):
24
34
 
25
- def to_dict(self) -> dict[str, Any]:
35
+ def to_dict(self) -> ErrorData:
26
36
  return to_dict(self)
27
37
 
28
38
 
@@ -38,8 +38,9 @@ from .exceptions import (
38
38
  JobException,
39
39
  StageException,
40
40
  UtilException,
41
+ to_dict,
41
42
  )
42
- from .result import Result, Status
43
+ from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
43
44
  from .reusables import has_template, param2template
44
45
  from .stages import Stage
45
46
  from .utils import cross_product, filter_func, gen_id
@@ -51,7 +52,6 @@ __all__: TupleStr = (
51
52
  "Strategy",
52
53
  "Job",
53
54
  "TriggerRules",
54
- "TriggerState",
55
55
  "RunsOn",
56
56
  "RunsOnLocal",
57
57
  "RunsOnSelfHosted",
@@ -206,16 +206,6 @@ class TriggerRules(str, Enum):
206
206
  none_skipped: str = "none_skipped"
207
207
 
208
208
 
209
- class TriggerState(str, Enum):
210
- waiting: str = "waiting"
211
- passed: str = "passed"
212
- skipped: str = "skipped"
213
- failed: str = "failed"
214
-
215
- def is_waiting(self):
216
- return self.value == "waiting"
217
-
218
-
219
209
  class RunsOnType(str, Enum):
220
210
  """Runs-On enum object."""
221
211
 
@@ -407,30 +397,32 @@ class Job(BaseModel):
407
397
  def check_needs(
408
398
  self,
409
399
  jobs: dict[str, Any],
410
- ) -> TriggerState: # pragma: no cov
411
- """Return True if job's need exists in an input list of job's ID.
400
+ ) -> Status: # pragma: no cov
401
+ """Return Status enum for checking job's need trigger logic in an
402
+ input list of job's ID.
412
403
 
413
404
  :param jobs: A mapping of job ID and result context.
414
405
 
415
406
  :raise NotImplementedError: If the job trigger rule out of scope.
416
407
 
417
- :rtype: TriggerState
408
+ :rtype: Status
418
409
  """
419
410
  if not self.needs:
420
- return TriggerState.passed
411
+ return SUCCESS
421
412
 
422
- def make_return(result: bool) -> TriggerState:
423
- return TriggerState.passed if result else TriggerState.failed
413
+ def make_return(result: bool) -> Status:
414
+ return SUCCESS if result else FAILED
424
415
 
425
416
  need_exist: dict[str, Any] = {
426
417
  need: jobs[need] for need in self.needs if need in jobs
427
418
  }
419
+
428
420
  if len(need_exist) != len(self.needs):
429
- return TriggerState.waiting
421
+ return WAIT
430
422
  elif all("skipped" in need_exist[job] for job in need_exist):
431
- return TriggerState.skipped
423
+ return SKIP
432
424
  elif self.trigger_rule == TriggerRules.all_done:
433
- return TriggerState.passed
425
+ return SUCCESS
434
426
  elif self.trigger_rule == TriggerRules.all_success:
435
427
  rs = all(
436
428
  k not in need_exist[job]
@@ -640,19 +632,6 @@ def local_execute_strategy(
640
632
  result: Result = Result(run_id=gen_id(job.id or "not-set", unique=True))
641
633
 
642
634
  strategy_id: str = gen_id(strategy)
643
-
644
- # PARAGRAPH:
645
- #
646
- # Create strategy execution context and update a matrix and copied
647
- # of params. So, the context value will have structure like;
648
- #
649
- # {
650
- # "params": { ... }, <== Current input params
651
- # "jobs": { ... }, <== Current input params
652
- # "matrix": { ... } <== Current strategy value
653
- # "stages": { ... } <== Catching stage outputs
654
- # }
655
- #
656
635
  context: DictData = copy.deepcopy(params)
657
636
  context.update({"matrix": strategy, "stages": {}})
658
637
 
@@ -660,7 +639,6 @@ def local_execute_strategy(
660
639
  result.trace.info(f"[JOB]: Execute Strategy ID: {strategy_id}")
661
640
  result.trace.info(f"[JOB]: ... Matrix: {strategy_id}")
662
641
 
663
- # IMPORTANT: The stage execution only run sequentially one-by-one.
664
642
  for stage in job.stages:
665
643
 
666
644
  if stage.is_skipped(params=context):
@@ -674,7 +652,7 @@ def local_execute_strategy(
674
652
  "strategy execution."
675
653
  )
676
654
  return result.catch(
677
- status=Status.FAILED,
655
+ status=FAILED,
678
656
  context={
679
657
  strategy_id: {
680
658
  "matrix": strategy,
@@ -684,34 +662,30 @@ def local_execute_strategy(
684
662
  },
685
663
  )
686
664
 
687
- # PARAGRAPH:
688
- #
689
- # This step will add the stage result to `stages` key in that
690
- # stage id. It will have structure like;
691
- #
692
- # {
693
- # "params": { ... },
694
- # "jobs": { ... },
695
- # "matrix": { ... },
696
- # "stages": { { "stage-id-01": { "outputs": { ... } } }, ... }
697
- # }
698
- #
699
- # IMPORTANT:
700
- # This execution change all stage running IDs to the current job
701
- # running ID, but it still trac log to the same parent running ID
702
- # (with passing `run_id` and `parent_run_id` to the stage
703
- # execution arguments).
704
- #
705
665
  try:
706
- stage.set_outputs(
707
- stage.handler_execute(
708
- params=context,
709
- run_id=result.run_id,
710
- parent_run_id=result.parent_run_id,
711
- event=event,
712
- ).context,
713
- to=context,
666
+ rs: Result = stage.handler_execute(
667
+ params=context,
668
+ run_id=result.run_id,
669
+ parent_run_id=result.parent_run_id,
670
+ event=event,
714
671
  )
672
+ stage.set_outputs(rs.context, to=context)
673
+ if rs.status == FAILED:
674
+ error_msg: str = (
675
+ f"Job strategy was break because it has a stage, "
676
+ f"{stage.iden}, failed without raise error."
677
+ )
678
+ return result.catch(
679
+ status=FAILED,
680
+ context={
681
+ strategy_id: {
682
+ "matrix": strategy,
683
+ "stages": context.pop("stages", {}),
684
+ "errors": JobException(error_msg).to_dict(),
685
+ },
686
+ },
687
+ )
688
+
715
689
  except (StageException, UtilException) as err:
716
690
  result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
717
691
  do_raise: bool = dynamic(
@@ -724,7 +698,7 @@ def local_execute_strategy(
724
698
  ) from None
725
699
 
726
700
  return result.catch(
727
- status=Status.FAILED,
701
+ status=FAILED,
728
702
  context={
729
703
  strategy_id: {
730
704
  "matrix": strategy,
@@ -735,7 +709,7 @@ def local_execute_strategy(
735
709
  )
736
710
 
737
711
  return result.catch(
738
- status=Status.SUCCESS,
712
+ status=SUCCESS,
739
713
  context={
740
714
  strategy_id: {
741
715
  "matrix": strategy,
@@ -756,8 +730,8 @@ def local_execute(
756
730
  raise_error: bool | None = None,
757
731
  ) -> Result:
758
732
  """Local job execution with passing dynamic parameters from the workflow
759
- execution. It will generate matrix values at the first step and run
760
- multithread on this metrics to the `stages` field of this job.
733
+ execution or itself execution. It will generate matrix values at the first
734
+ step and run multithread on this metrics to the `stages` field of this job.
761
735
 
762
736
  This method does not raise any JobException if it runs with
763
737
  multi-threading strategy.
@@ -790,7 +764,7 @@ def local_execute(
790
764
 
791
765
  if event and event.is_set(): # pragma: no cov
792
766
  return result.catch(
793
- status=Status.FAILED,
767
+ status=FAILED,
794
768
  context={
795
769
  "errors": JobException(
796
770
  "Job strategy was canceled from event that had set "
@@ -808,7 +782,7 @@ def local_execute(
808
782
  raise_error=raise_error,
809
783
  )
810
784
 
811
- return result.catch(status=Status.SUCCESS)
785
+ return result.catch(status=result.status)
812
786
 
813
787
  fail_fast_flag: bool = job.strategy.fail_fast
814
788
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
@@ -819,7 +793,7 @@ def local_execute(
819
793
 
820
794
  if event and event.is_set(): # pragma: no cov
821
795
  return result.catch(
822
- status=Status.FAILED,
796
+ status=FAILED,
823
797
  context={
824
798
  "errors": JobException(
825
799
  "Job strategy was canceled from event that had set "
@@ -828,8 +802,6 @@ def local_execute(
828
802
  },
829
803
  )
830
804
 
831
- # IMPORTANT: Start running strategy execution by multithreading because
832
- # it will run by strategy values without waiting previous execution.
833
805
  with ThreadPoolExecutor(
834
806
  max_workers=job.strategy.max_parallel,
835
807
  thread_name_prefix="job_strategy_exec_",
@@ -849,7 +821,7 @@ def local_execute(
849
821
  ]
850
822
 
851
823
  context: DictData = {}
852
- status: Status = Status.SUCCESS
824
+ status: Status = SUCCESS
853
825
 
854
826
  if not fail_fast_flag:
855
827
  done = as_completed(futures, timeout=1800)
@@ -875,7 +847,7 @@ def local_execute(
875
847
  try:
876
848
  future.result()
877
849
  except JobException as err:
878
- status = Status.FAILED
850
+ status = FAILED
879
851
  result.trace.error(
880
852
  f"[JOB]: {ls} Catch:\n\t{err.__class__.__name__}:"
881
853
  f"\n\t{err}"
@@ -895,6 +867,22 @@ def self_hosted_execute(
895
867
  event: Event | None = None,
896
868
  raise_error: bool | None = None,
897
869
  ) -> Result: # pragma: no cov
870
+ """Self-Hosted job execution with passing dynamic parameters from the
871
+ workflow execution or itself execution. It will make request to the
872
+ self-hosted host url.
873
+
874
+ :param job: (Job) A job model that want to execute.
875
+ :param params: (DictData) An input parameters that use on job execution.
876
+ :param run_id: (str) A job running ID for this execution.
877
+ :param parent_run_id: (str) A parent workflow running ID for this release.
878
+ :param result: (Result) A result object for keeping context and status
879
+ data.
880
+ :param event: (Event) An event manager that pass to the PoolThreadExecutor.
881
+ :param raise_error: (bool) A flag that all this method raise error to the
882
+ strategy execution.
883
+
884
+ :rtype: Result
885
+ """
898
886
  result: Result = Result.construct_with_rs_or_id(
899
887
  result,
900
888
  run_id=run_id,
@@ -903,14 +891,31 @@ def self_hosted_execute(
903
891
  )
904
892
 
905
893
  if event and event.is_set():
906
- return result.catch(status=Status.FAILED)
894
+ return result.catch(
895
+ status=FAILED,
896
+ context={
897
+ "errors": JobException(
898
+ "Job self-hosted execution was canceled from event that "
899
+ "had set before start execution."
900
+ ).to_dict()
901
+ },
902
+ )
907
903
 
908
904
  import requests
909
905
 
910
- resp = requests.post(
911
- job.runs_on.args.host,
912
- data={"job": job.model_dump(), "params": params},
913
- )
906
+ try:
907
+ resp = requests.post(
908
+ job.runs_on.args.host,
909
+ headers={"Auth": f"Barer {job.runs_on.args.token}"},
910
+ data={
911
+ "job": job.model_dump(),
912
+ "params": params,
913
+ "result": result.__dict__,
914
+ "raise_error": raise_error,
915
+ },
916
+ )
917
+ except requests.exceptions.RequestException as e:
918
+ return result.catch(status=FAILED, context={"errors": to_dict(e)})
914
919
 
915
920
  if resp.status_code != 200:
916
921
  do_raise: bool = dynamic(
@@ -922,5 +927,5 @@ def self_hosted_execute(
922
927
  f"{job.runs_on.args.host!r}"
923
928
  )
924
929
 
925
- return result.catch(status=Status.FAILED)
926
- return result.catch(status=Status.SUCCESS)
930
+ return result.catch(status=FAILED)
931
+ return result.catch(status=SUCCESS)