ddeutil-workflow 0.0.44__tar.gz → 0.0.46__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/PKG-INFO +7 -8
  2. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/README.md +5 -5
  3. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/pyproject.toml +1 -3
  4. ddeutil_workflow-0.0.46/src/ddeutil/workflow/__about__.py +1 -0
  5. ddeutil_workflow-0.0.46/src/ddeutil/workflow/__main__.py +0 -0
  6. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/conf.py +39 -51
  7. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/job.py +3 -0
  8. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/logs.py +44 -5
  9. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/result.py +10 -1
  10. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/reusables.py +1 -0
  11. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/stages.py +16 -0
  12. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/workflow.py +7 -4
  13. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil_workflow.egg-info/PKG-INFO +7 -8
  14. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil_workflow.egg-info/SOURCES.txt +1 -0
  15. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil_workflow.egg-info/requires.txt +0 -1
  16. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_workflow.py +51 -0
  17. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_workflow_exec.py +52 -1
  18. ddeutil_workflow-0.0.44/src/ddeutil/workflow/__about__.py +0 -1
  19. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/LICENSE +0 -0
  20. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/setup.cfg +0 -0
  21. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/__cron.py +0 -0
  22. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/__init__.py +0 -0
  23. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/__types.py +0 -0
  24. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/__init__.py +0 -0
  25. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/api.py +0 -0
  26. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/log.py +0 -0
  27. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/repeat.py +0 -0
  28. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  29. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/routes/job.py +0 -0
  30. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/routes/logs.py +0 -0
  31. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/routes/schedules.py +0 -0
  32. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
  33. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/cron.py +0 -0
  34. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/exceptions.py +0 -0
  35. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/params.py +0 -0
  36. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/scheduler.py +0 -0
  37. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil/workflow/utils.py +0 -0
  38. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  39. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  40. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test__cron.py +0 -0
  41. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test__regex.py +0 -0
  42. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_conf.py +0 -0
  43. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_cron_on.py +0 -0
  44. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_job.py +0 -0
  45. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_job_exec.py +0 -0
  46. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_job_exec_strategy.py +0 -0
  47. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_job_strategy.py +0 -0
  48. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_logs_audit.py +0 -0
  49. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_logs_trace.py +0 -0
  50. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_params.py +0 -0
  51. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_release.py +0 -0
  52. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_release_queue.py +0 -0
  53. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_result.py +0 -0
  54. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_reusables_call_tag.py +0 -0
  55. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_reusables_template.py +0 -0
  56. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_reusables_template_filter.py +0 -0
  57. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_schedule.py +0 -0
  58. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_schedule_pending.py +0 -0
  59. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_schedule_tasks.py +0 -0
  60. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_schedule_workflow.py +0 -0
  61. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_scheduler_control.py +0 -0
  62. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_stage.py +0 -0
  63. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_stage_handler_exec.py +0 -0
  64. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_utils.py +0 -0
  65. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_workflow_exec_job.py +0 -0
  66. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_workflow_exec_poke.py +0 -0
  67. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_workflow_exec_release.py +0 -0
  68. {ddeutil_workflow-0.0.44 → ddeutil_workflow-0.0.46}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.44
3
+ Version: 0.0.46
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -9,7 +9,7 @@ Project-URL: Source Code, https://github.com/ddeutils/ddeutil-workflow/
9
9
  Keywords: orchestration,workflow
10
10
  Classifier: Topic :: Utilities
11
11
  Classifier: Natural Language :: English
12
- Classifier: Development Status :: 4 - Beta
12
+ Classifier: Development Status :: 5 - Production/Stable
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: Operating System :: OS Independent
15
15
  Classifier: Programming Language :: Python
@@ -30,7 +30,6 @@ Requires-Dist: schedule<2.0.0,==1.2.2
30
30
  Provides-Extra: all
31
31
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
32
32
  Requires-Dist: httpx; extra == "all"
33
- Requires-Dist: ujson; extra == "all"
34
33
  Requires-Dist: aiofiles; extra == "all"
35
34
  Requires-Dist: aiohttp; extra == "all"
36
35
  Provides-Extra: api
@@ -71,9 +70,9 @@ configuration. It called **Metadata Driven Data Workflow**.
71
70
 
72
71
  **:pushpin: <u>Rules of This Workflow engine</u>**:
73
72
 
74
- 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
73
+ 1. The Minimum frequency unit of built-in scheduling is **1 Minute** 🕘
75
74
  2. **Can not** re-run only failed stage and its pending downstream ↩️
76
- 3. All parallel tasks inside workflow engine use **Multi-Threading**
75
+ 3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
77
76
  (Python 3.13 unlock GIL 🐍🔓)
78
77
 
79
78
  ---
@@ -266,11 +265,11 @@ it will use default value and do not raise any error to you.
266
265
  | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
267
266
  | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
268
267
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
269
- | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
268
+ | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
270
269
  | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
271
- | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
270
+ | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
272
271
  | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
273
- | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
272
+ | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
273
  | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
275
274
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
276
275
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
@@ -27,9 +27,9 @@ configuration. It called **Metadata Driven Data Workflow**.
27
27
 
28
28
  **:pushpin: <u>Rules of This Workflow engine</u>**:
29
29
 
30
- 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
30
+ 1. The Minimum frequency unit of built-in scheduling is **1 Minute** 🕘
31
31
  2. **Can not** re-run only failed stage and its pending downstream ↩️
32
- 3. All parallel tasks inside workflow engine use **Multi-Threading**
32
+ 3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
33
33
  (Python 3.13 unlock GIL 🐍🔓)
34
34
 
35
35
  ---
@@ -222,11 +222,11 @@ it will use default value and do not raise any error to you.
222
222
  | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
223
223
  | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
224
224
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
225
- | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
225
+ | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
226
226
  | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
227
- | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
227
+ | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
228
228
  | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
229
- | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
229
+ | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
230
230
  | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
231
231
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
232
232
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
@@ -12,8 +12,7 @@ keywords = ['orchestration', 'workflow']
12
12
  classifiers = [
13
13
  "Topic :: Utilities",
14
14
  "Natural Language :: English",
15
- "Development Status :: 4 - Beta",
16
- # "Development Status :: 5 - Production/Stable",
15
+ "Development Status :: 5 - Production/Stable",
17
16
  "Intended Audience :: Developers",
18
17
  "Operating System :: OS Independent",
19
18
  "Programming Language :: Python",
@@ -38,7 +37,6 @@ dynamic = ["version"]
38
37
  all = [
39
38
  "fastapi>=0.115.0,<1.0.0",
40
39
  "httpx",
41
- "ujson",
42
40
  "aiofiles",
43
41
  "aiohttp",
44
42
  ]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.46"
@@ -26,7 +26,10 @@ PREFIX: str = "WORKFLOW"
26
26
 
27
27
 
28
28
  def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
- """Get environment variable with uppercase and adding prefix string."""
29
+ """Get environment variable with uppercase and adding prefix string.
30
+
31
+ :rtype: str | None
32
+ """
30
33
  return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
31
34
 
32
35
 
@@ -42,29 +45,7 @@ __all__: TupleStr = (
42
45
  )
43
46
 
44
47
 
45
- class BaseConfig: # pragma: no cov
46
- """BaseConfig object inheritable."""
47
-
48
- __slots__ = ()
49
-
50
- @property
51
- def root_path(self) -> Path:
52
- """Root path or the project path.
53
-
54
- :rtype: Path
55
- """
56
- return Path(os.getenv("ROOT_PATH", "."))
57
-
58
- @property
59
- def conf_path(self) -> Path:
60
- """Config path that use root_path class argument for this construction.
61
-
62
- :rtype: Path
63
- """
64
- return self.root_path / os.getenv("CONF_PATH", "conf")
65
-
66
-
67
- class Config(BaseConfig): # pragma: no cov
48
+ class Config: # pragma: no cov
68
49
  """Config object for keeping core configurations on the current session
69
50
  without changing when if the application still running.
70
51
 
@@ -217,6 +198,7 @@ class Config(BaseConfig): # pragma: no cov
217
198
 
218
199
 
219
200
  class APIConfig:
201
+ """API Config object."""
220
202
 
221
203
  @property
222
204
  def prefix_path(self) -> str:
@@ -341,6 +323,32 @@ class SimLoad:
341
323
  )
342
324
 
343
325
 
326
+ config: Config = Config()
327
+ api_config: APIConfig = APIConfig()
328
+
329
+
330
+ def dynamic(
331
+ key: Optional[str] = None,
332
+ *,
333
+ f: Optional[T] = None,
334
+ extras: Optional[DictData] = None,
335
+ ) -> Optional[T]:
336
+ """Dynamic get config if extra value was passed at run-time.
337
+
338
+ :param key: (str) A config key that get from Config object.
339
+ :param f: An inner config function scope.
340
+ :param extras: An extra values that pass at run-time.
341
+ """
342
+ rsx: Optional[T] = extras[key] if extras and key in extras else None
343
+ rs: Optional[T] = f or getattr(config, key, None)
344
+ if rsx is not None and not isinstance(rsx, type(rs)):
345
+ raise TypeError(
346
+ f"Type of config {key!r} from extras: {rsx!r} does not valid "
347
+ f"as config {type(rs)}."
348
+ )
349
+ return rsx or rs
350
+
351
+
344
352
  class Loader(SimLoad):
345
353
  """Loader Object that get the config `yaml` file from current path.
346
354
 
@@ -355,6 +363,7 @@ class Loader(SimLoad):
355
363
  *,
356
364
  included: list[str] | None = None,
357
365
  excluded: list[str] | None = None,
366
+ path: Path | None = None,
358
367
  **kwargs,
359
368
  ) -> Iterator[tuple[str, DictData]]:
360
369
  """Override the find class method from the Simple Loader object.
@@ -362,44 +371,23 @@ class Loader(SimLoad):
362
371
  :param obj: An object that want to validate matching before return.
363
372
  :param included:
364
373
  :param excluded:
374
+ :param path:
365
375
 
366
376
  :rtype: Iterator[tuple[str, DictData]]
367
377
  """
368
378
  return super().finds(
369
379
  obj=obj,
370
- conf_path=config.conf_path,
380
+ conf_path=(path or config.conf_path),
371
381
  included=included,
372
382
  excluded=excluded,
373
383
  )
374
384
 
375
385
  def __init__(self, name: str, externals: DictData) -> None:
376
- super().__init__(name, conf_path=config.conf_path, externals=externals)
377
-
378
-
379
- config: Config = Config()
380
- api_config: APIConfig = APIConfig()
381
-
382
-
383
- def dynamic(
384
- key: Optional[str] = None,
385
- *,
386
- f: Optional[T] = None,
387
- extras: Optional[DictData] = None,
388
- ) -> Optional[T]:
389
- """Dynamic get config if extra value was passed at run-time.
390
-
391
- :param key: (str) A config key that get from Config object.
392
- :param f: An inner config function scope.
393
- :param extras: An extra values that pass at run-time.
394
- """
395
- rsx: Optional[T] = extras[key] if extras and key in extras else None
396
- rs: Optional[T] = f or getattr(config, key, None)
397
- if rsx is not None and not isinstance(rsx, type(rs)):
398
- raise TypeError(
399
- f"Type of config {key!r} from extras: {rsx!r} does not valid "
400
- f"as config {type(rs)}."
386
+ super().__init__(
387
+ name,
388
+ conf_path=dynamic("conf_path", extras=externals),
389
+ externals=externals,
401
390
  )
402
- return rsx or rs
403
391
 
404
392
 
405
393
  @lru_cache
@@ -641,6 +641,9 @@ def local_execute_strategy(
641
641
 
642
642
  for stage in job.stages:
643
643
 
644
+ if job.extras:
645
+ stage.extras = job.extras
646
+
644
647
  if stage.is_skipped(params=context):
645
648
  result.trace.info(f"[STAGE]: Skip stage: {stage.iden!r}")
646
649
  stage.set_outputs(output={"skipped": True}, to=context)
@@ -80,6 +80,8 @@ class TraceMeda(BaseModel): # pragma: no cov
80
80
 
81
81
 
82
82
  class TraceData(BaseModel): # pragma: no cov
83
+ """Trace Data model for keeping data for any Trace models."""
84
+
83
85
  stdout: str = Field(description="A standard output trace data.")
84
86
  stderr: str = Field(description="A standard error trace data.")
85
87
  meta: list[TraceMeda] = Field(
@@ -92,6 +94,12 @@ class TraceData(BaseModel): # pragma: no cov
92
94
 
93
95
  @classmethod
94
96
  def from_path(cls, file: Path) -> Self:
97
+ """Construct this trace data model with a trace path.
98
+
99
+ :param file: (Path) A trace path.
100
+
101
+ :rtype: Self
102
+ """
95
103
  data: DictStr = {"stdout": "", "stderr": "", "meta": []}
96
104
 
97
105
  if (file / "stdout.txt").exists():
@@ -207,27 +215,52 @@ class BaseTraceLog(ABC): # pragma: no cov
207
215
  logger.exception(msg, stacklevel=2)
208
216
 
209
217
  async def adebug(self, message: str) -> None: # pragma: no cov
218
+ """Async write trace log with append mode and logging this message with
219
+ the DEBUG level.
220
+
221
+ :param message: (str) A message that want to log.
222
+ """
210
223
  msg: str = self.make_message(message)
211
224
  if config.debug:
212
225
  await self.awriter(msg)
213
226
  logger.info(msg, stacklevel=2)
214
227
 
215
228
  async def ainfo(self, message: str) -> None: # pragma: no cov
229
+ """Async write trace log with append mode and logging this message with
230
+ the INFO level.
231
+
232
+ :param message: (str) A message that want to log.
233
+ """
216
234
  msg: str = self.make_message(message)
217
235
  await self.awriter(msg)
218
236
  logger.info(msg, stacklevel=2)
219
237
 
220
238
  async def awarning(self, message: str) -> None: # pragma: no cov
239
+ """Async write trace log with append mode and logging this message with
240
+ the WARNING level.
241
+
242
+ :param message: (str) A message that want to log.
243
+ """
221
244
  msg: str = self.make_message(message)
222
245
  await self.awriter(msg)
223
246
  logger.warning(msg, stacklevel=2)
224
247
 
225
248
  async def aerror(self, message: str) -> None: # pragma: no cov
249
+ """Async write trace log with append mode and logging this message with
250
+ the ERROR level.
251
+
252
+ :param message: (str) A message that want to log.
253
+ """
226
254
  msg: str = self.make_message(message)
227
255
  await self.awriter(msg, is_err=True)
228
256
  logger.error(msg, stacklevel=2)
229
257
 
230
258
  async def aexception(self, message: str) -> None: # pragma: no cov
259
+ """Async write trace log with append mode and logging this message with
260
+ the EXCEPTION level.
261
+
262
+ :param message: (str) A message that want to log.
263
+ """
231
264
  msg: str = self.make_message(message)
232
265
  await self.awriter(msg, is_err=True)
233
266
  logger.exception(msg, stacklevel=2)
@@ -237,23 +270,29 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
237
270
  """Trace Log object that write file to the local storage."""
238
271
 
239
272
  @classmethod
240
- def find_logs(cls) -> Iterator[TraceData]: # pragma: no cov
273
+ def find_logs(
274
+ cls, path: Path | None = None
275
+ ) -> Iterator[TraceData]: # pragma: no cov
276
+ """Find trace logs."""
241
277
  for file in sorted(
242
- config.log_path.glob("./run_id=*"),
278
+ (path or config.log_path).glob("./run_id=*"),
243
279
  key=lambda f: f.lstat().st_mtime,
244
280
  ):
245
281
  yield TraceData.from_path(file)
246
282
 
247
283
  @classmethod
248
284
  def find_log_with_id(
249
- cls, run_id: str, force_raise: bool = True
285
+ cls, run_id: str, force_raise: bool = True, *, path: Path | None = None
250
286
  ) -> TraceData:
251
- file: Path = config.log_path / f"run_id={run_id}"
287
+ """Find trace log with an input specific run ID."""
288
+ base_path: Path = path or config.log_path
289
+ file: Path = base_path / f"run_id={run_id}"
252
290
  if file.exists():
253
291
  return TraceData.from_path(file)
254
292
  elif force_raise:
255
293
  raise FileNotFoundError(
256
- f"Trace log on path 'run_id={run_id}' does not found."
294
+ f"Trace log on path {base_path}, does not found trace "
295
+ f"'run_id={run_id}'."
257
296
  )
258
297
  return {}
259
298
 
@@ -72,6 +72,7 @@ class Result:
72
72
  ts: datetime = field(default_factory=get_dt_tznow, compare=False)
73
73
 
74
74
  trace: Optional[TraceLog] = field(default=None, compare=False, repr=False)
75
+ extras: DictData = field(default_factory=dict)
75
76
 
76
77
  @classmethod
77
78
  def construct_with_rs_or_id(
@@ -80,6 +81,8 @@ class Result:
80
81
  run_id: str | None = None,
81
82
  parent_run_id: str | None = None,
82
83
  id_logic: str | None = None,
84
+ *,
85
+ extras: DictData | None = None,
83
86
  ) -> Self:
84
87
  """Create the Result object or set parent running id if passing Result
85
88
  object.
@@ -88,16 +91,22 @@ class Result:
88
91
  :param run_id:
89
92
  :param parent_run_id:
90
93
  :param id_logic:
94
+ :param extras:
91
95
 
92
96
  :rtype: Self
93
97
  """
94
98
  if result is None:
95
- result: Result = cls(
99
+ return cls(
96
100
  run_id=(run_id or gen_id(id_logic or "", unique=True)),
97
101
  parent_run_id=parent_run_id,
102
+ extras=(extras or {}),
98
103
  )
99
104
  elif parent_run_id:
100
105
  result.set_parent_run_id(parent_run_id)
106
+
107
+ if extras is not None:
108
+ result.extras.update(extras)
109
+
101
110
  return result
102
111
 
103
112
  @model_validator(mode="after")
@@ -499,6 +499,7 @@ class CallSearchData:
499
499
 
500
500
  def extract_call(
501
501
  call: str,
502
+ *,
502
503
  registries: Optional[list[str]] = None,
503
504
  ) -> Callable[[], TagFunc]:
504
505
  """Extract Call function from string value to call partial function that
@@ -814,6 +814,8 @@ class CallStage(BaseStage):
814
814
  run_id=gen_id(self.name + (self.id or ""), unique=True)
815
815
  )
816
816
 
817
+ print("Extras in CallStage", self.extras)
818
+
817
819
  t_func: TagFunc = extract_call(
818
820
  param2template(self.uses, params, extras=self.extras),
819
821
  registries=self.extras.get("regis_call"),
@@ -976,6 +978,8 @@ class ParallelStage(BaseStage): # pragma: no cov
976
978
  params: DictData,
977
979
  result: Result,
978
980
  stages: list[Stage],
981
+ *,
982
+ extras: DictData | None = None,
979
983
  ) -> DictData:
980
984
  """Task execution method for passing a branch to each thread.
981
985
 
@@ -984,12 +988,16 @@ class ParallelStage(BaseStage): # pragma: no cov
984
988
  :param result: (Result) A result object for keeping context and status
985
989
  data.
986
990
  :param stages:
991
+ :param extras
987
992
 
988
993
  :rtype: DictData
989
994
  """
990
995
  context = {"branch": branch, "stages": {}}
991
996
  result.trace.debug(f"[STAGE]: Execute parallel branch: {branch!r}")
992
997
  for stage in stages:
998
+ if extras:
999
+ stage.extras = extras
1000
+
993
1001
  try:
994
1002
  stage.set_outputs(
995
1003
  stage.handler_execute(
@@ -1048,6 +1056,7 @@ class ParallelStage(BaseStage): # pragma: no cov
1048
1056
  params=params,
1049
1057
  result=result,
1050
1058
  stages=self.parallel[branch],
1059
+ extras=self.extras,
1051
1060
  )
1052
1061
  )
1053
1062
 
@@ -1144,6 +1153,10 @@ class ForEachStage(BaseStage):
1144
1153
  context = {"stages": {}}
1145
1154
 
1146
1155
  for stage in self.stages:
1156
+
1157
+ if self.extras:
1158
+ stage.extras = self.extras
1159
+
1147
1160
  try:
1148
1161
  stage.set_outputs(
1149
1162
  stage.handler_execute(
@@ -1284,6 +1297,9 @@ class CaseStage(BaseStage): # pragma: no cov
1284
1297
 
1285
1298
  if match == _condition:
1286
1299
  stage: Stage = match.stage
1300
+ if self.extras:
1301
+ stage.extras = self.extras
1302
+
1287
1303
  try:
1288
1304
  stage.set_outputs(
1289
1305
  stage.handler_execute(
@@ -314,7 +314,8 @@ class Workflow(BaseModel):
314
314
 
315
315
  loader_data: DictData = copy.deepcopy(loader.data)
316
316
  loader_data["name"] = name.replace(" ", "_")
317
- if extras: # pragma: no cov
317
+
318
+ if extras:
318
319
  loader_data["extras"] = extras
319
320
 
320
321
  cls.__bypass_on__(loader_data, path=loader.conf_path, extras=extras)
@@ -325,6 +326,7 @@ class Workflow(BaseModel):
325
326
  cls,
326
327
  name: str,
327
328
  path: Path,
329
+ *,
328
330
  extras: DictData | None = None,
329
331
  ) -> Self:
330
332
  """Create Workflow instance from the specific path. The loader object
@@ -349,7 +351,8 @@ class Workflow(BaseModel):
349
351
 
350
352
  loader_data: DictData = copy.deepcopy(loader.data)
351
353
  loader_data["name"] = name.replace(" ", "_")
352
- if extras: # pragma: no cov
354
+
355
+ if extras:
353
356
  loader_data["extras"] = extras
354
357
 
355
358
  cls.__bypass_on__(loader_data, path=path, extras=extras)
@@ -1136,7 +1139,7 @@ class Workflow(BaseModel):
1136
1139
  not_timeout_flag := ((time.monotonic() - ts) < timeout)
1137
1140
  ):
1138
1141
  job_id: str = job_queue.get()
1139
- job: Job = self.jobs[job_id]
1142
+ job: Job = self.job(name=job_id)
1140
1143
 
1141
1144
  if (check := job.check_needs(context["jobs"])) == WAIT:
1142
1145
  job_queue.task_done()
@@ -1228,7 +1231,7 @@ class Workflow(BaseModel):
1228
1231
  not_timeout_flag := ((time.monotonic() - ts) < timeout)
1229
1232
  ):
1230
1233
  job_id: str = job_queue.get()
1231
- job: Job = self.jobs[job_id]
1234
+ job: Job = self.job(name=job_id)
1232
1235
 
1233
1236
  if (check := job.check_needs(context["jobs"])) == WAIT:
1234
1237
  job_queue.task_done()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.44
3
+ Version: 0.0.46
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -9,7 +9,7 @@ Project-URL: Source Code, https://github.com/ddeutils/ddeutil-workflow/
9
9
  Keywords: orchestration,workflow
10
10
  Classifier: Topic :: Utilities
11
11
  Classifier: Natural Language :: English
12
- Classifier: Development Status :: 4 - Beta
12
+ Classifier: Development Status :: 5 - Production/Stable
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: Operating System :: OS Independent
15
15
  Classifier: Programming Language :: Python
@@ -30,7 +30,6 @@ Requires-Dist: schedule<2.0.0,==1.2.2
30
30
  Provides-Extra: all
31
31
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
32
32
  Requires-Dist: httpx; extra == "all"
33
- Requires-Dist: ujson; extra == "all"
34
33
  Requires-Dist: aiofiles; extra == "all"
35
34
  Requires-Dist: aiohttp; extra == "all"
36
35
  Provides-Extra: api
@@ -71,9 +70,9 @@ configuration. It called **Metadata Driven Data Workflow**.
71
70
 
72
71
  **:pushpin: <u>Rules of This Workflow engine</u>**:
73
72
 
74
- 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
73
+ 1. The Minimum frequency unit of built-in scheduling is **1 Minute** 🕘
75
74
  2. **Can not** re-run only failed stage and its pending downstream ↩️
76
- 3. All parallel tasks inside workflow engine use **Multi-Threading**
75
+ 3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
77
76
  (Python 3.13 unlock GIL 🐍🔓)
78
77
 
79
78
  ---
@@ -266,11 +265,11 @@ it will use default value and do not raise any error to you.
266
265
  | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
267
266
  | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
268
267
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
269
- | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
268
+ | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
270
269
  | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
271
- | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
270
+ | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
272
271
  | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
273
- | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
272
+ | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
273
  | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
275
274
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
276
275
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
@@ -4,6 +4,7 @@ pyproject.toml
4
4
  src/ddeutil/workflow/__about__.py
5
5
  src/ddeutil/workflow/__cron.py
6
6
  src/ddeutil/workflow/__init__.py
7
+ src/ddeutil/workflow/__main__.py
7
8
  src/ddeutil/workflow/__types.py
8
9
  src/ddeutil/workflow/conf.py
9
10
  src/ddeutil/workflow/cron.py
@@ -7,7 +7,6 @@ schedule<2.0.0,==1.2.2
7
7
  [all]
8
8
  fastapi<1.0.0,>=0.115.0
9
9
  httpx
10
- ujson
11
10
  aiofiles
12
11
  aiohttp
13
12
 
@@ -1,3 +1,6 @@
1
+ import shutil
2
+ from pathlib import Path
3
+
1
4
  import pytest
2
5
  from ddeutil.workflow import Workflow
3
6
  from ddeutil.workflow.exceptions import WorkflowException
@@ -151,6 +154,54 @@ def test_workflow_from_path(test_path):
151
154
  assert rs.context == {}
152
155
 
153
156
 
157
+ def test_workflow_from_loader_override(test_path):
158
+ conf_path: Path = test_path / "mock_conf"
159
+ conf_path.mkdir(exist_ok=True)
160
+ (conf_path / "demo").mkdir(exist_ok=True)
161
+
162
+ with dump_yaml_context(
163
+ conf_path / "demo/01_99_wf_test_override_config.yml",
164
+ data="""
165
+ tmp-wf-override-conf:
166
+ type: Workflow
167
+ param: {name: str}
168
+ jobs:
169
+ first-job:
170
+ stages:
171
+ - name: "Hello"
172
+ echo: "Hello ${{ params.name }}"
173
+
174
+ tmp-wf-override-conf-trigger:
175
+ type: Workflow
176
+ params: {name: str}
177
+ jobs:
178
+ trigger-job:
179
+ stages:
180
+ - name: "Trigger override"
181
+ id: trigger-stage
182
+ trigger: tmp-wf-override-conf
183
+ params:
184
+ name: ${{ params.name }}
185
+ """,
186
+ ):
187
+ workflow = Workflow.from_conf(
188
+ name="tmp-wf-override-conf", extras={"conf_path": conf_path}
189
+ )
190
+ rs: Result = workflow.execute(params={"name": "foo"})
191
+ print(rs.context)
192
+
193
+ workflow = Workflow.from_conf(
194
+ name="tmp-wf-override-conf-trigger", extras={"conf_path": conf_path}
195
+ )
196
+ stage = workflow.job(name="trigger-job").stage("trigger-stage")
197
+ assert stage.extras == {"conf_path": conf_path}
198
+
199
+ rs: Result = workflow.execute(params={"name": "bar"})
200
+ print(rs.context)
201
+
202
+ shutil.rmtree(conf_path)
203
+
204
+
154
205
  def test_workflow_from_loader_raise(test_path):
155
206
  test_file = test_path / "conf/demo/01_01_wf_run_raise.yml"
156
207
 
@@ -1,8 +1,10 @@
1
+ import shutil
1
2
  from datetime import datetime
3
+ from textwrap import dedent
2
4
  from unittest import mock
3
5
 
4
6
  from ddeutil.core import getdot
5
- from ddeutil.workflow import SUCCESS, Workflow
7
+ from ddeutil.workflow import SUCCESS, Workflow, extract_call
6
8
  from ddeutil.workflow.conf import Config
7
9
  from ddeutil.workflow.job import Job
8
10
  from ddeutil.workflow.result import FAILED, Result
@@ -488,6 +490,55 @@ def test_workflow_exec_call(test_path):
488
490
  } == rs.context
489
491
 
490
492
 
493
+ def test_workflow_exec_call_override_registry(test_path):
494
+ task_path = test_path.parent / "mock_tests"
495
+ task_path.mkdir(exist_ok=True)
496
+ (task_path / "__init__.py").open(mode="w")
497
+ (task_path / "mock_tasks").mkdir(exist_ok=True)
498
+
499
+ with (task_path / "mock_tasks/__init__.py").open(mode="w") as f:
500
+ f.write(
501
+ dedent(
502
+ """
503
+ from ddeutil.workflow import tag, Result
504
+
505
+ @tag("v1", alias="get-info")
506
+ def get_info(result: Result):
507
+ result.trace.info("... [CALLER]: Info from mock tasks")
508
+ return {"get-info": "success"}
509
+
510
+ """.strip(
511
+ "\n"
512
+ )
513
+ )
514
+ )
515
+
516
+ with dump_yaml_context(
517
+ test_path / "conf/demo/01_99_wf_test_wf_exec_call_override.yml",
518
+ data="""
519
+ tmp-wf-exec-call-override:
520
+ type: Workflow
521
+ jobs:
522
+ first-job:
523
+ stages:
524
+ - name: "Call from mock tasks"
525
+ uses: mock_tasks/get-info@v1
526
+ """,
527
+ ):
528
+ func = extract_call("mock_tasks/get-info@v1", registries=["mock_tests"])
529
+ assert func().name == "get-info"
530
+
531
+ workflow = Workflow.from_conf(
532
+ name="tmp-wf-exec-call-override",
533
+ extras={"regis_call": ["mock_tests"]},
534
+ )
535
+ rs = workflow.execute(params={})
536
+ assert rs.status == SUCCESS
537
+ print(rs.context)
538
+
539
+ shutil.rmtree(task_path)
540
+
541
+
491
542
  def test_workflow_exec_call_with_prefix(test_path):
492
543
  with dump_yaml_context(
493
544
  test_path / "conf/demo/01_99_wf_test_wf_call_mssql_proc.yml",
@@ -1 +0,0 @@
1
- __version__: str = "0.0.44"