ddeutil-workflow 0.0.28__tar.gz → 0.0.29__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/PKG-INFO +12 -4
  2. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/README.md +10 -2
  3. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/pyproject.toml +7 -1
  4. ddeutil_workflow-0.0.29/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/__init__.py +1 -0
  6. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/__types.py +11 -9
  7. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/conf.py +38 -10
  8. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/hook.py +4 -2
  9. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/templates.py +1 -4
  10. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/workflow.py +4 -1
  11. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil_workflow.egg-info/PKG-INFO +12 -4
  12. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil_workflow.egg-info/SOURCES.txt +1 -5
  13. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil_workflow.egg-info/requires.txt +1 -1
  14. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_conf.py +4 -2
  15. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_hook_tag.py +3 -2
  16. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_stage.py +1 -0
  17. ddeutil_workflow-0.0.29/tests/test_stage_handler_exec.py +182 -0
  18. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_exec.py +82 -2
  19. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_poke.py +5 -0
  20. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_schedule.py +1 -3
  21. ddeutil_workflow-0.0.28/src/ddeutil/workflow/__about__.py +0 -1
  22. ddeutil_workflow-0.0.28/tests/test_stage_exec_bash.py +0 -34
  23. ddeutil_workflow-0.0.28/tests/test_stage_exec_hook.py +0 -46
  24. ddeutil_workflow-0.0.28/tests/test_stage_exec_py.py +0 -87
  25. ddeutil_workflow-0.0.28/tests/test_stage_exec_trigger.py +0 -30
  26. ddeutil_workflow-0.0.28/tests/test_workflow_exec_needs.py +0 -74
  27. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/LICENSE +0 -0
  28. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/setup.cfg +0 -0
  29. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/__cron.py +0 -0
  30. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/api/__init__.py +0 -0
  31. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/api/api.py +0 -0
  32. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/api/repeat.py +0 -0
  33. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/api/route.py +0 -0
  34. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/cron.py +0 -0
  35. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/exceptions.py +0 -0
  36. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/job.py +0 -0
  37. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/params.py +0 -0
  38. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/result.py +0 -0
  39. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/scheduler.py +0 -0
  40. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/stage.py +0 -0
  41. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil/workflow/utils.py +0 -0
  42. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  43. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  44. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test__cron.py +0 -0
  45. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test__regex.py +0 -0
  46. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_conf_log.py +0 -0
  47. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_cron_on.py +0 -0
  48. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_job.py +0 -0
  49. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_job_exec_py.py +0 -0
  50. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_job_exec_strategy.py +0 -0
  51. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_job_strategy.py +0 -0
  52. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_params.py +0 -0
  53. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_result.py +0 -0
  54. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_schedule.py +0 -0
  55. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_schedule_control.py +0 -0
  56. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_schedule_tasks.py +0 -0
  57. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_templates.py +0 -0
  58. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_templates_filter.py +0 -0
  59. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_utils.py +0 -0
  60. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow.py +0 -0
  61. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_exec_hook.py +0 -0
  62. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_job_exec.py +0 -0
  63. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_release.py +0 -0
  64. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_release_and_queue.py +0 -0
  65. {ddeutil_workflow-0.0.28 → ddeutil_workflow-0.0.29}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.28
3
+ Version: 0.0.29
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -22,7 +22,7 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Requires-Python: >=3.9.13
23
23
  Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
- Requires-Dist: ddeutil==0.4.6
25
+ Requires-Dist: ddeutil>=0.4.6
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
27
27
  Requires-Dist: pydantic==2.10.6
28
28
  Requires-Dist: python-dotenv==1.0.1
@@ -55,6 +55,8 @@ the input parameters per use-case instead.
55
55
  This way I can handle a lot of logical workflows in our orgs with only metadata
56
56
  configuration. It called **Metadata Driven Data Workflow**.
57
57
 
58
+ ---
59
+
58
60
  **:pushpin: <u>Rules of This Workflow engine</u>**:
59
61
 
60
62
  1. The Minimum frequency unit of scheduling is **1 minute** :warning:
@@ -62,8 +64,14 @@ configuration. It called **Metadata Driven Data Workflow**.
62
64
  3. All parallel tasks inside workflow engine use Multi-Threading
63
65
  (Python 3.13 unlock GIL :unlock:)
64
66
 
67
+ ---
68
+
65
69
  **:memo: <u>Workflow Diagrams</u>**:
66
70
 
71
+ This diagram show where is this application run on the production infrastructure.
72
+ You will see that this application do only running code with stress-less which mean
73
+ you should to set the data layer separate this core program before run this application.
74
+
67
75
  ```mermaid
68
76
  flowchart LR
69
77
  subgraph Interface
@@ -215,8 +223,8 @@ and do not raise any error to you.
215
223
  | Name | Component | Default | Description |
216
224
  |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
217
225
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
218
- | **REGISTRY** | Core | `src` | List of importable string for the hook stage. |
219
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.utils` | List of importable string for the filter template. |
226
+ | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
227
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
220
228
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
221
229
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
222
230
  | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
@@ -23,6 +23,8 @@ the input parameters per use-case instead.
23
23
  This way I can handle a lot of logical workflows in our orgs with only metadata
24
24
  configuration. It called **Metadata Driven Data Workflow**.
25
25
 
26
+ ---
27
+
26
28
  **:pushpin: <u>Rules of This Workflow engine</u>**:
27
29
 
28
30
  1. The Minimum frequency unit of scheduling is **1 minute** :warning:
@@ -30,8 +32,14 @@ configuration. It called **Metadata Driven Data Workflow**.
30
32
  3. All parallel tasks inside workflow engine use Multi-Threading
31
33
  (Python 3.13 unlock GIL :unlock:)
32
34
 
35
+ ---
36
+
33
37
  **:memo: <u>Workflow Diagrams</u>**:
34
38
 
39
+ This diagram show where is this application run on the production infrastructure.
40
+ You will see that this application do only running code with stress-less which mean
41
+ you should to set the data layer separate this core program before run this application.
42
+
35
43
  ```mermaid
36
44
  flowchart LR
37
45
  subgraph Interface
@@ -183,8 +191,8 @@ and do not raise any error to you.
183
191
  | Name | Component | Default | Description |
184
192
  |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
185
193
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
186
- | **REGISTRY** | Core | `src` | List of importable string for the hook stage. |
187
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.utils` | List of importable string for the filter template. |
194
+ | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
195
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
188
196
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
189
197
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
190
198
  | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
@@ -26,7 +26,7 @@ classifiers = [
26
26
  ]
27
27
  requires-python = ">=3.9.13"
28
28
  dependencies = [
29
- "ddeutil==0.4.6",
29
+ "ddeutil>=0.4.6",
30
30
  "ddeutil-io[yaml,toml]>=0.2.3",
31
31
  "pydantic==2.10.6",
32
32
  "python-dotenv==1.0.1",
@@ -77,6 +77,12 @@ exclude_lines = [
77
77
 
78
78
  [tool.pytest.ini_options]
79
79
  pythonpath = ["src"]
80
+ # NOTE: You can deslect multiple markers by '-m "not (poke or api)"'
81
+ markers = [
82
+ "poke: marks tests as slow by poking (deselect with '-m \"not poke\"')",
83
+ "schedule: marks tests as schedule (deselect with '-m \"not schedule\"')",
84
+ "api: marks tests as api (deselect with '-m \"not api\"')",
85
+ ]
80
86
  console_output_style = "count"
81
87
  addopts = [
82
88
  "--strict-config",
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.29"
@@ -4,6 +4,7 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
+ from .__types import Re
7
8
  from .conf import (
8
9
  Config,
9
10
  Loader,
@@ -27,6 +27,8 @@ Matrix = dict[str, Union[list[str], list[int]]]
27
27
 
28
28
 
29
29
  class Context(TypedDict):
30
+ """TypeDict support the Context."""
31
+
30
32
  params: dict[str, Any]
31
33
  jobs: dict[str, Any]
32
34
 
@@ -71,14 +73,14 @@ class Re:
71
73
  # - ${{ params.source?.schema }}
72
74
  #
73
75
  __re_caller: str = r"""
74
- \$
75
- {{
76
- \s*
76
+ \$ # start with $
77
+ {{ # value open with {{
78
+ \s* # whitespace or not
77
79
  (?P<caller>
78
80
  (?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)
79
81
  (?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??)
80
82
  )
81
- \s*
83
+ \s* # whitespace or not
82
84
  (?P<post_filters>
83
85
  (?:
84
86
  \|\s*
@@ -88,7 +90,7 @@ class Re:
88
90
  )\s*
89
91
  )*
90
92
  )
91
- }}
93
+ }} # value close with }}
92
94
  """
93
95
  RE_CALLER: Pattern = re.compile(
94
96
  __re_caller, MULTILINE | IGNORECASE | UNICODE | VERBOSE
@@ -103,13 +105,13 @@ class Re:
103
105
  # - tasks/function@dummy
104
106
  #
105
107
  __re_task_fmt: str = r"""
106
- ^
108
+ ^ # start task format
107
109
  (?P<path>[^/@]+)
108
- /
110
+ / # start get function with /
109
111
  (?P<func>[^@]+)
110
- @
112
+ @ # start tag with @
111
113
  (?P<tag>.+)
112
- $
114
+ $ # end task format
113
115
  """
114
116
  RE_TASK_FMT: Pattern = re.compile(
115
117
  __re_task_fmt, MULTILINE | IGNORECASE | UNICODE | VERBOSE
@@ -13,7 +13,7 @@ from collections.abc import Iterator
13
13
  from datetime import datetime, timedelta
14
14
  from functools import cached_property, lru_cache
15
15
  from pathlib import Path
16
- from typing import ClassVar, Optional, Union
16
+ from typing import ClassVar, Optional, TypeVar, Union
17
17
  from zoneinfo import ZoneInfo
18
18
 
19
19
  from ddeutil.core import str2bool
@@ -39,6 +39,7 @@ __all__: TupleStr = (
39
39
  "env",
40
40
  "get_logger",
41
41
  "get_log",
42
+ "C",
42
43
  "Config",
43
44
  "SimLoad",
44
45
  "Loader",
@@ -81,18 +82,42 @@ def get_logger(name: str):
81
82
  return lg
82
83
 
83
84
 
84
- class Config: # pragma: no cov
85
+ class BaseConfig: # pragma: no cov
86
+ """BaseConfig object inheritable."""
87
+
88
+ __slots__ = ()
89
+
90
+ @property
91
+ def root_path(self) -> Path:
92
+ """Root path or the project path.
93
+
94
+ :rtype: Path
95
+ """
96
+ return Path(os.getenv("ROOT_PATH", "."))
97
+
98
+ @property
99
+ def conf_path(self) -> Path:
100
+ """Config path that use root_path class argument for this construction.
101
+
102
+ :rtype: Path
103
+ """
104
+ return self.root_path / os.getenv("CONF_PATH", "conf")
105
+
106
+
107
+ class Config(BaseConfig): # pragma: no cov
85
108
  """Config object for keeping core configurations on the current session
86
109
  without changing when if the application still running.
87
110
 
88
111
  The config value can change when you call that config property again.
89
112
  """
90
113
 
91
- __slots__ = ()
92
-
93
114
  # NOTE: Core
94
115
  @property
95
116
  def root_path(self) -> Path:
117
+ """Root path or the project path.
118
+
119
+ :rtype: Path
120
+ """
96
121
  return Path(env("CORE_ROOT_PATH", "."))
97
122
 
98
123
  @property
@@ -114,7 +139,7 @@ class Config: # pragma: no cov
114
139
  # NOTE: Register
115
140
  @property
116
141
  def regis_hook(self) -> list[str]:
117
- regis_hook_str: str = env("CORE_REGISTRY", "src")
142
+ regis_hook_str: str = env("CORE_REGISTRY", ".")
118
143
  return [r.strip() for r in regis_hook_str.split(",")]
119
144
 
120
145
  @property
@@ -220,6 +245,9 @@ class Config: # pragma: no cov
220
245
  return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
221
246
 
222
247
 
248
+ C = TypeVar("C", bound=BaseConfig)
249
+
250
+
223
251
  class SimLoad:
224
252
  """Simple Load Object that will search config data by given some identity
225
253
  value like name of workflow or on.
@@ -243,7 +271,7 @@ class SimLoad:
243
271
  def __init__(
244
272
  self,
245
273
  name: str,
246
- conf: Config,
274
+ conf: C,
247
275
  externals: DictData | None = None,
248
276
  ) -> None:
249
277
  self.data: DictData = {}
@@ -256,7 +284,7 @@ class SimLoad:
256
284
  if not self.data:
257
285
  raise ValueError(f"Config {name!r} does not found on conf path")
258
286
 
259
- self.conf: Config = conf
287
+ self.conf: C = conf
260
288
  self.externals: DictData = externals or {}
261
289
  self.data.update(self.externals)
262
290
 
@@ -264,7 +292,7 @@ class SimLoad:
264
292
  def finds(
265
293
  cls,
266
294
  obj: object,
267
- conf: Config,
295
+ conf: C,
268
296
  *,
269
297
  included: list[str] | None = None,
270
298
  excluded: list[str] | None = None,
@@ -273,7 +301,7 @@ class SimLoad:
273
301
  method can use include and exclude list of identity name for filter and
274
302
  adds-on.
275
303
 
276
- :param obj: A object that want to validate matching before return.
304
+ :param obj: An object that want to validate matching before return.
277
305
  :param conf: A config object.
278
306
  :param included:
279
307
  :param excluded:
@@ -338,7 +366,7 @@ class Loader(SimLoad):
338
366
  ) -> Iterator[tuple[str, DictData]]:
339
367
  """Override the find class method from the Simple Loader object.
340
368
 
341
- :param obj: A object that want to validate matching before return.
369
+ :param obj: An object that want to validate matching before return.
342
370
  :param included:
343
371
  :param excluded:
344
372
 
@@ -50,6 +50,7 @@ def tag(
50
50
  :param: name: A tag name for make different use-case of a function.
51
51
  :param: alias: A alias function name that keeping in registries. If this
52
52
  value does not supply, it will use original function name from __name__.
53
+
53
54
  :rtype: Callable[P, TagFunc]
54
55
  """
55
56
 
@@ -58,7 +59,7 @@ def tag(
58
59
  func.name = alias or func.__name__.replace("_", "-")
59
60
 
60
61
  @wraps(func)
61
- def wrapped(*args, **kwargs):
62
+ def wrapped(*args: P.args, **kwargs: P.kwargs) -> TagFunc:
62
63
  # NOTE: Able to do anything before calling hook function.
63
64
  return func(*args, **kwargs)
64
65
 
@@ -74,10 +75,11 @@ def make_registry(submodule: str) -> dict[str, Registry]:
74
75
  """Return registries of all functions that able to called with task.
75
76
 
76
77
  :param submodule: A module prefix that want to import registry.
78
+
77
79
  :rtype: dict[str, Registry]
78
80
  """
79
81
  rs: dict[str, Registry] = {}
80
- for module in config.regis_hook:
82
+ for module in config.regis_hook | ["ddeutil.vendors"]:
81
83
  # NOTE: try to sequential import task functions
82
84
  try:
83
85
  importer = import_module(f"{module}.{submodule}")
@@ -295,10 +295,7 @@ def str2template(
295
295
  return search_env_replace(value)
296
296
 
297
297
 
298
- def param2template(
299
- value: Any,
300
- params: DictData,
301
- ) -> Any:
298
+ def param2template(value: Any, params: DictData) -> Any:
302
299
  """Pass param to template string that can search by ``RE_CALLER`` regular
303
300
  expression.
304
301
 
@@ -884,7 +884,9 @@ class Workflow(BaseModel):
884
884
  :param run_id: A workflow running ID for this job execution.
885
885
  :type run_id: str | None (default: None)
886
886
  :param timeout: A workflow execution time out in second unit that use
887
- for limit time of execution and waiting job dependency.
887
+ for limit time of execution and waiting job dependency. This value
888
+ does not force stop the task that still running more than this limit
889
+ time.
888
890
  :type timeout: int (default: 0)
889
891
 
890
892
  :rtype: Result
@@ -1220,6 +1222,7 @@ class WorkflowTask:
1220
1222
  return queue
1221
1223
 
1222
1224
  def __repr__(self) -> str:
1225
+ """Override ___repr__ method."""
1223
1226
  return (
1224
1227
  f"{self.__class__.__name__}(alias={self.alias!r}, "
1225
1228
  f"workflow={self.workflow.name!r}, runner={self.runner!r}, "
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.28
3
+ Version: 0.0.29
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -22,7 +22,7 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Requires-Python: >=3.9.13
23
23
  Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
- Requires-Dist: ddeutil==0.4.6
25
+ Requires-Dist: ddeutil>=0.4.6
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
27
27
  Requires-Dist: pydantic==2.10.6
28
28
  Requires-Dist: python-dotenv==1.0.1
@@ -55,6 +55,8 @@ the input parameters per use-case instead.
55
55
  This way I can handle a lot of logical workflows in our orgs with only metadata
56
56
  configuration. It called **Metadata Driven Data Workflow**.
57
57
 
58
+ ---
59
+
58
60
  **:pushpin: <u>Rules of This Workflow engine</u>**:
59
61
 
60
62
  1. The Minimum frequency unit of scheduling is **1 minute** :warning:
@@ -62,8 +64,14 @@ configuration. It called **Metadata Driven Data Workflow**.
62
64
  3. All parallel tasks inside workflow engine use Multi-Threading
63
65
  (Python 3.13 unlock GIL :unlock:)
64
66
 
67
+ ---
68
+
65
69
  **:memo: <u>Workflow Diagrams</u>**:
66
70
 
71
+ This diagram show where is this application run on the production infrastructure.
72
+ You will see that this application do only running code with stress-less which mean
73
+ you should to set the data layer separate this core program before run this application.
74
+
67
75
  ```mermaid
68
76
  flowchart LR
69
77
  subgraph Interface
@@ -215,8 +223,8 @@ and do not raise any error to you.
215
223
  | Name | Component | Default | Description |
216
224
  |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
217
225
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
218
- | **REGISTRY** | Core | `src` | List of importable string for the hook stage. |
219
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.utils` | List of importable string for the filter template. |
226
+ | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
227
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
220
228
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
221
229
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
222
230
  | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
@@ -42,17 +42,13 @@ tests/test_schedule.py
42
42
  tests/test_schedule_control.py
43
43
  tests/test_schedule_tasks.py
44
44
  tests/test_stage.py
45
- tests/test_stage_exec_bash.py
46
- tests/test_stage_exec_hook.py
47
- tests/test_stage_exec_py.py
48
- tests/test_stage_exec_trigger.py
45
+ tests/test_stage_handler_exec.py
49
46
  tests/test_templates.py
50
47
  tests/test_templates_filter.py
51
48
  tests/test_utils.py
52
49
  tests/test_workflow.py
53
50
  tests/test_workflow_exec.py
54
51
  tests/test_workflow_exec_hook.py
55
- tests/test_workflow_exec_needs.py
56
52
  tests/test_workflow_job_exec.py
57
53
  tests/test_workflow_poke.py
58
54
  tests/test_workflow_release.py
@@ -1,4 +1,4 @@
1
- ddeutil==0.4.6
1
+ ddeutil>=0.4.6
2
2
  ddeutil-io[toml,yaml]>=0.2.3
3
3
  pydantic==2.10.6
4
4
  python-dotenv==1.0.1
@@ -87,9 +87,11 @@ def test_simple_load_finds(target_path: Path):
87
87
  "test_simple_load_config",
88
88
  {"type": "Config"},
89
89
  )
90
- ] == list(SimLoad.finds(Config, Config(), included="type"))
90
+ ] == list(SimLoad.finds(Config, Config(), included=["type"]))
91
91
  assert [] == list(
92
- SimLoad.finds(Config, Config(), excluded="test_simple_load_config")
92
+ SimLoad.finds(
93
+ Config, Config(), excluded=["test_simple_load_config"]
94
+ )
93
95
  )
94
96
 
95
97
  dummy_file.unlink()
@@ -3,7 +3,7 @@ from pathlib import Path
3
3
  from textwrap import dedent
4
4
 
5
5
  import pytest
6
- from ddeutil.workflow.hook import make_registry
6
+ from ddeutil.workflow.hook import Registry, make_registry
7
7
 
8
8
 
9
9
  @pytest.fixture(scope="module")
@@ -39,11 +39,12 @@ def hook_function(test_path: Path):
39
39
 
40
40
 
41
41
  def test_make_registry_not_found():
42
- rs = make_registry("not_found")
42
+ rs: dict[str, Registry] = make_registry("not_found")
43
43
  assert rs == {}
44
44
 
45
45
 
46
46
  def test_make_registry_raise(hook_function):
47
47
 
48
+ # NOTE: Raise error duplicate tag name, polars-dir, that set in this module.
48
49
  with pytest.raises(ValueError):
49
50
  make_registry("new_tasks")
@@ -14,6 +14,7 @@ def test_stage():
14
14
  )
15
15
  assert stage.iden == "Empty Stage"
16
16
  assert stage.name == "Empty Stage"
17
+ assert stage == EmptyStage(name="Empty Stage", echo="hello world")
17
18
 
18
19
  # NOTE: Copy the stage model with adding the id field.
19
20
  new_stage: Stage = stage.model_copy(update={"id": "stage-empty"})
@@ -0,0 +1,182 @@
1
+ from datetime import datetime
2
+ from inspect import isfunction
3
+ from unittest import mock
4
+
5
+ import pytest
6
+ from ddeutil.core import getdot
7
+ from ddeutil.workflow import Workflow
8
+ from ddeutil.workflow.conf import Config
9
+ from ddeutil.workflow.exceptions import StageException
10
+ from ddeutil.workflow.result import Result
11
+ from ddeutil.workflow.stage import Stage
12
+
13
+
14
+ def test_stage_exec_bash():
15
+ workflow: Workflow = Workflow.from_loader(name="wf-run-common")
16
+ stage: Stage = workflow.job("bash-run").stage("echo")
17
+ rs: Result = stage.handler_execute({})
18
+ assert {
19
+ "return_code": 0,
20
+ "stdout": "Hello World\nVariable Foo",
21
+ "stderr": None,
22
+ } == rs.context
23
+
24
+
25
+ def test_stage_exec_bash_env():
26
+ workflow: Workflow = Workflow.from_loader(name="wf-run-common")
27
+ stage: Stage = workflow.job("bash-run-env").stage("echo-env")
28
+ rs: Result = stage.handler_execute({})
29
+ assert {
30
+ "return_code": 0,
31
+ "stdout": "Hello World\nVariable Foo\nENV Bar",
32
+ "stderr": None,
33
+ } == rs.context
34
+
35
+
36
+ def test_stage_exec_bash_env_raise():
37
+ workflow: Workflow = Workflow.from_loader(name="wf-run-common")
38
+ stage: Stage = workflow.job("bash-run-env").stage("raise-error")
39
+
40
+ # NOTE: Raise error from bash that force exit 1.
41
+ with pytest.raises(StageException):
42
+ stage.handler_execute({})
43
+
44
+
45
+ def test_stage_exec_hook():
46
+ workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
47
+ stage: Stage = workflow.job("second-job").stage("extract-load")
48
+ rs: Result = stage.handler_execute({})
49
+
50
+ assert 0 == rs.status
51
+ assert {"records": 1} == rs.context
52
+
53
+
54
+ def test_stage_exec_hook_raise_return_type():
55
+ workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
56
+ stage: Stage = workflow.job("first-job").stage("valid-type")
57
+
58
+ with pytest.raises(StageException):
59
+ stage.handler_execute({})
60
+
61
+
62
+ def test_stage_exec_hook_raise_args():
63
+ workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
64
+ stage: Stage = workflow.job("first-job").stage("args-necessary")
65
+
66
+ with pytest.raises(StageException):
67
+ stage.handler_execute({})
68
+
69
+
70
+ def test_stage_exec_hook_not_valid():
71
+ workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
72
+ stage: Stage = workflow.job("first-job").stage("hook-not-valid")
73
+
74
+ with pytest.raises(StageException):
75
+ stage.handler_execute({})
76
+
77
+
78
+ def test_stage_exec_hook_not_register():
79
+ workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
80
+ stage: Stage = workflow.job("first-job").stage("hook-not-register")
81
+
82
+ with pytest.raises(StageException):
83
+ stage.handler_execute({})
84
+
85
+
86
+ def test_stage_exec_py_raise():
87
+ with mock.patch.object(Config, "stage_raise_error", True):
88
+ workflow: Workflow = Workflow.from_loader(name="wf-run-common")
89
+ stage: Stage = workflow.job("raise-run").stage(stage_id="raise-error")
90
+ with pytest.raises(StageException):
91
+ stage.handler_execute(params={"x": "Foo"})
92
+
93
+
94
+ def test_stage_exec_py_not_raise():
95
+ with mock.patch.object(Config, "stage_raise_error", False):
96
+ workflow: Workflow = Workflow.from_loader(name="wf-run-common")
97
+ stage: Stage = workflow.job("raise-run").stage(stage_id="raise-error")
98
+
99
+ rs = stage.handler_execute(params={"x": "Foo"})
100
+
101
+ assert rs.status == 1
102
+
103
+ # NOTE:
104
+ # Context that return from error will be:
105
+ # {
106
+ # 'error': ValueError("Testing ... PyStage!!!"),
107
+ # 'error_message': "ValueError: Testing ... PyStage!!!",
108
+ # }
109
+ assert isinstance(rs.context["error"], ValueError)
110
+ assert rs.context["error_message"] == (
111
+ "ValueError: Testing raise error inside PyStage!!!"
112
+ )
113
+
114
+ rs_out = stage.set_outputs(rs.context, {})
115
+ assert rs_out == {
116
+ "stages": {
117
+ "raise-error": {
118
+ "outputs": {
119
+ "error": getdot(
120
+ "stages.raise-error.outputs.error", rs_out
121
+ ),
122
+ "error_message": (
123
+ "ValueError: Testing raise error inside PyStage!!!"
124
+ ),
125
+ },
126
+ },
127
+ },
128
+ }
129
+
130
+
131
+ def test_stage_exec_py_with_vars():
132
+ workflow: Workflow = Workflow.from_loader(name="wf-run-common")
133
+ stage: Stage = workflow.job("demo-run").stage(stage_id="run-var")
134
+ assert stage.id == "run-var"
135
+
136
+ params = {
137
+ "params": {"name": "Author"},
138
+ "stages": {"hello-world": {"outputs": {"x": "Foo"}}},
139
+ }
140
+ rs_out = stage.set_outputs(
141
+ stage.handler_execute(params=params).context, to=params
142
+ )
143
+ assert {
144
+ "params": {"name": "Author"},
145
+ "stages": {
146
+ "hello-world": {"outputs": {"x": "Foo"}},
147
+ "run-var": {"outputs": {"x": 1}},
148
+ },
149
+ } == rs_out
150
+
151
+
152
+ def test_stage_exec_py_func():
153
+ workflow: Workflow = Workflow.from_loader(name="wf-run-python")
154
+ stage: Stage = workflow.job("second-job").stage(stage_id="create-func")
155
+ rs: Result = stage.handler_execute(params={})
156
+ rs_out = stage.set_outputs(rs.context, to={})
157
+ assert ("var_inside", "echo") == tuple(
158
+ rs_out["stages"]["create-func"]["outputs"].keys()
159
+ )
160
+ assert isfunction(rs_out["stages"]["create-func"]["outputs"]["echo"])
161
+
162
+
163
+ def test_stage_exec_trigger():
164
+ workflow = Workflow.from_loader(name="wf-trigger", externals={})
165
+ stage: Stage = workflow.job("trigger-job").stage(stage_id="trigger-stage")
166
+ rs: Result = stage.handler_execute(params={})
167
+ assert all(k in ("params", "jobs") for k in rs.context.keys())
168
+ assert {
169
+ "author-run": "Trigger Runner",
170
+ "run-date": datetime(2024, 8, 1),
171
+ } == rs.context["params"]
172
+
173
+
174
+ def test_stage_exec_trigger_from_workflow():
175
+ workflow = Workflow.from_loader(name="wf-trigger", externals={})
176
+ rs: Result = workflow.execute(params={})
177
+ assert {
178
+ "author-run": "Trigger Runner",
179
+ "run-date": datetime(2024, 8, 1),
180
+ } == getdot(
181
+ "jobs.trigger-job.stages.trigger-stage.outputs.params", rs.context
182
+ )
@@ -15,11 +15,21 @@ def test_workflow_exec():
15
15
  workflow: Workflow = Workflow(
16
16
  name="demo-workflow", jobs={"sleep-run": job, "sleep-again-run": job}
17
17
  )
18
- workflow.execute(params={})
18
+ rs: Result = workflow.execute(params={})
19
+ assert rs.status == 0
20
+ assert rs.context == {
21
+ "params": {},
22
+ "jobs": {
23
+ "sleep-again-run": {
24
+ "matrix": {},
25
+ "stages": {"7972360640": {"outputs": {}}},
26
+ },
27
+ },
28
+ }
19
29
 
20
30
 
21
31
  @mock.patch.object(Config, "max_job_parallel", 1)
22
- def test_workflow_exec_timeout():
32
+ def test_workflow_exec_raise_timeout():
23
33
  job: Job = Job(
24
34
  stages=[{"name": "Sleep", "run": "import time\ntime.sleep(2)"}],
25
35
  )
@@ -27,6 +37,7 @@ def test_workflow_exec_timeout():
27
37
  name="demo-workflow", jobs={"sleep-run": job, "sleep-again-run": job}
28
38
  )
29
39
  rs: Result = workflow.execute(params={}, timeout=1)
40
+ assert rs.status == 1
30
41
  assert rs.context["error_message"] == (
31
42
  "WorkflowException: Execution: 'demo-workflow' was timeout."
32
43
  )
@@ -268,3 +279,72 @@ def test_workflow_exec_with_matrix():
268
279
  },
269
280
  },
270
281
  } == rs.context
282
+
283
+
284
+ def test_workflow_exec_needs():
285
+ workflow = Workflow.from_loader(name="wf-run-depends", externals={})
286
+ rs: Result = workflow.execute(params={"name": "bar"})
287
+ assert {
288
+ "params": {"name": "bar"},
289
+ "jobs": {
290
+ "final-job": {
291
+ "matrix": {},
292
+ "stages": {
293
+ "8797330324": {
294
+ "outputs": {},
295
+ },
296
+ },
297
+ },
298
+ "first-job": {
299
+ "matrix": {},
300
+ "stages": {
301
+ "7824513474": {
302
+ "outputs": {},
303
+ },
304
+ },
305
+ },
306
+ "second-job": {
307
+ "matrix": {},
308
+ "stages": {
309
+ "1772094681": {
310
+ "outputs": {},
311
+ },
312
+ },
313
+ },
314
+ },
315
+ } == rs.context
316
+
317
+
318
+ def test_workflow_exec_needs_parallel():
319
+ with mock.patch.object(Config, "max_job_parallel", 3):
320
+ workflow = Workflow.from_loader(name="wf-run-depends", externals={})
321
+ rs: Result = workflow.execute(params={"name": "bar"})
322
+ assert {
323
+ "params": {"name": "bar"},
324
+ "jobs": {
325
+ "final-job": {
326
+ "matrix": {},
327
+ "stages": {
328
+ "8797330324": {
329
+ "outputs": {},
330
+ },
331
+ },
332
+ },
333
+ "first-job": {
334
+ "matrix": {},
335
+ "stages": {
336
+ "7824513474": {
337
+ "outputs": {},
338
+ },
339
+ },
340
+ },
341
+ "second-job": {
342
+ "matrix": {},
343
+ "stages": {
344
+ "1772094681": {
345
+ "outputs": {},
346
+ },
347
+ },
348
+ },
349
+ },
350
+ } == rs.context
@@ -10,6 +10,7 @@ from ddeutil.workflow.result import Result
10
10
  from .utils import dump_yaml_context
11
11
 
12
12
 
13
+ @pytest.mark.poke
13
14
  @mock.patch.object(Config, "enable_write_log", False)
14
15
  def test_workflow_poke(test_path):
15
16
  with dump_yaml_context(
@@ -68,6 +69,7 @@ def test_workflow_poke(test_path):
68
69
  assert results[0].run_id != results[0].parent_run_id
69
70
 
70
71
 
72
+ @pytest.mark.poke
71
73
  @mock.patch.object(Config, "enable_write_log", False)
72
74
  def test_workflow_poke_no_queue(test_path):
73
75
  with dump_yaml_context(
@@ -93,6 +95,7 @@ def test_workflow_poke_no_queue(test_path):
93
95
  assert results == []
94
96
 
95
97
 
98
+ @pytest.mark.poke
96
99
  def test_workflow_poke_raise():
97
100
  workflow = Workflow.from_loader(name="wf-scheduling-common")
98
101
 
@@ -101,6 +104,7 @@ def test_workflow_poke_raise():
101
104
  workflow.poke(periods=-1)
102
105
 
103
106
 
107
+ @pytest.mark.poke
104
108
  @mock.patch.object(Config, "enable_write_log", False)
105
109
  def test_workflow_poke_with_start_date_and_period(test_path):
106
110
  with dump_yaml_context(
@@ -134,6 +138,7 @@ def test_workflow_poke_with_start_date_and_period(test_path):
134
138
  assert results[0].parent_run_id == results[1].parent_run_id
135
139
 
136
140
 
141
+ @pytest.mark.poke
137
142
  @mock.patch.object(Config, "enable_write_log", False)
138
143
  def test_workflow_poke_no_on(test_path):
139
144
  with dump_yaml_context(
@@ -25,7 +25,7 @@ def test_workflow_schedule():
25
25
  wf_schedule = WorkflowSchedule(name="demo", on=[{"cronjob": "2 * * * *"}])
26
26
  assert len(wf_schedule.on) == 1
27
27
 
28
- # NOTE: Raise if do not pass any data to WorkflowSchedule
28
+ # NOTE: Raise if it does not pass any data to WorkflowSchedule
29
29
  with pytest.raises(ValidationError):
30
30
  WorkflowSchedule.model_validate({})
31
31
 
@@ -118,9 +118,7 @@ def test_workflow_schedule_tasks(test_path):
118
118
  assert len(tasks) == 1
119
119
 
120
120
  task = tasks[0]
121
-
122
121
  task.release(queue=queue["tmp-wf-schedule-tasks"])
123
-
124
122
  task.release(queue=queue["tmp-wf-schedule-tasks"])
125
123
 
126
124
  assert task.runner.date == datetime(2024, 1, 1, 1, 4, tzinfo=tz)
@@ -1 +0,0 @@
1
- __version__: str = "0.0.28"
@@ -1,34 +0,0 @@
1
- import pytest
2
- from ddeutil.workflow import Workflow
3
- from ddeutil.workflow.exceptions import StageException
4
- from ddeutil.workflow.result import Result
5
- from ddeutil.workflow.stage import Stage
6
-
7
-
8
- def test_stage_exec_bash():
9
- workflow: Workflow = Workflow.from_loader(name="wf-run-common")
10
- stage: Stage = workflow.job("bash-run").stage("echo")
11
- rs: Result = stage.handler_execute({})
12
- assert {
13
- "return_code": 0,
14
- "stdout": "Hello World\nVariable Foo",
15
- "stderr": None,
16
- } == rs.context
17
-
18
-
19
- def test_stage_exec_bash_env():
20
- workflow: Workflow = Workflow.from_loader(name="wf-run-common")
21
- stage: Stage = workflow.job("bash-run-env").stage("echo-env")
22
- rs: Result = stage.handler_execute({})
23
- assert {
24
- "return_code": 0,
25
- "stdout": "Hello World\nVariable Foo\nENV Bar",
26
- "stderr": None,
27
- } == rs.context
28
-
29
-
30
- def test_stage_exec_bash_env_raise():
31
- workflow: Workflow = Workflow.from_loader(name="wf-run-common")
32
- stage: Stage = workflow.job("bash-run-env").stage("raise-error")
33
- with pytest.raises(StageException):
34
- stage.handler_execute({})
@@ -1,46 +0,0 @@
1
- import pytest
2
- from ddeutil.workflow import Workflow
3
- from ddeutil.workflow.exceptions import StageException
4
- from ddeutil.workflow.result import Result
5
- from ddeutil.workflow.stage import Stage
6
-
7
-
8
- def test_stage_exec_hook():
9
- workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
10
- stage: Stage = workflow.job("second-job").stage("extract-load")
11
- rs: Result = stage.handler_execute({})
12
-
13
- assert 0 == rs.status
14
- assert {"records": 1} == rs.context
15
-
16
-
17
- def test_stage_exec_hook_raise_return_type():
18
- workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
19
- stage: Stage = workflow.job("first-job").stage("valid-type")
20
-
21
- with pytest.raises(StageException):
22
- stage.handler_execute({})
23
-
24
-
25
- def test_stage_exec_hook_raise_args():
26
- workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
27
- stage: Stage = workflow.job("first-job").stage("args-necessary")
28
-
29
- with pytest.raises(StageException):
30
- stage.handler_execute({})
31
-
32
-
33
- def test_stage_exec_hook_not_valid():
34
- workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
35
- stage: Stage = workflow.job("first-job").stage("hook-not-valid")
36
-
37
- with pytest.raises(StageException):
38
- stage.handler_execute({})
39
-
40
-
41
- def test_stage_exec_hook_not_register():
42
- workflow: Workflow = Workflow.from_loader(name="wf-hook-return-type")
43
- stage: Stage = workflow.job("first-job").stage("hook-not-register")
44
-
45
- with pytest.raises(StageException):
46
- stage.handler_execute({})
@@ -1,87 +0,0 @@
1
- from inspect import isfunction
2
- from unittest import mock
3
-
4
- import pytest
5
- from ddeutil.core import getdot
6
- from ddeutil.workflow import Workflow
7
- from ddeutil.workflow.conf import Config
8
- from ddeutil.workflow.exceptions import StageException
9
- from ddeutil.workflow.result import Result
10
- from ddeutil.workflow.stage import Stage
11
-
12
-
13
- def test_stage_exec_py_raise():
14
- with mock.patch.object(Config, "stage_raise_error", True):
15
- workflow: Workflow = Workflow.from_loader(name="wf-run-common")
16
- stage: Stage = workflow.job("raise-run").stage(stage_id="raise-error")
17
- with pytest.raises(StageException):
18
- stage.handler_execute(params={"x": "Foo"})
19
-
20
-
21
- def test_stage_exec_py_not_raise():
22
- with mock.patch.object(Config, "stage_raise_error", False):
23
- workflow: Workflow = Workflow.from_loader(name="wf-run-common")
24
- stage: Stage = workflow.job("raise-run").stage(stage_id="raise-error")
25
-
26
- rs = stage.handler_execute(params={"x": "Foo"})
27
-
28
- assert rs.status == 1
29
-
30
- # NOTE:
31
- # Context that return from error will be:
32
- # {
33
- # 'error': ValueError("Testing ... PyStage!!!"),
34
- # 'error_message': "ValueError: Testing ... PyStage!!!",
35
- # }
36
- assert isinstance(rs.context["error"], ValueError)
37
- assert rs.context["error_message"] == (
38
- "ValueError: Testing raise error inside PyStage!!!"
39
- )
40
-
41
- rs_out = stage.set_outputs(rs.context, {})
42
- assert rs_out == {
43
- "stages": {
44
- "raise-error": {
45
- "outputs": {
46
- "error": getdot(
47
- "stages.raise-error.outputs.error", rs_out
48
- ),
49
- "error_message": (
50
- "ValueError: Testing raise error inside PyStage!!!"
51
- ),
52
- },
53
- },
54
- },
55
- }
56
-
57
-
58
- def test_stage_exec_py_with_vars():
59
- workflow: Workflow = Workflow.from_loader(name="wf-run-common")
60
- stage: Stage = workflow.job("demo-run").stage(stage_id="run-var")
61
- assert stage.id == "run-var"
62
-
63
- params = {
64
- "params": {"name": "Author"},
65
- "stages": {"hello-world": {"outputs": {"x": "Foo"}}},
66
- }
67
- rs_out = stage.set_outputs(
68
- stage.handler_execute(params=params).context, to=params
69
- )
70
- assert {
71
- "params": {"name": "Author"},
72
- "stages": {
73
- "hello-world": {"outputs": {"x": "Foo"}},
74
- "run-var": {"outputs": {"x": 1}},
75
- },
76
- } == rs_out
77
-
78
-
79
- def test_stage_exec_py_func():
80
- workflow: Workflow = Workflow.from_loader(name="wf-run-python")
81
- stage: Stage = workflow.job("second-job").stage(stage_id="create-func")
82
- rs: Result = stage.handler_execute(params={})
83
- rs_out = stage.set_outputs(rs.context, to={})
84
- assert ("var_inside", "echo") == tuple(
85
- rs_out["stages"]["create-func"]["outputs"].keys()
86
- )
87
- assert isfunction(rs_out["stages"]["create-func"]["outputs"]["echo"])
@@ -1,30 +0,0 @@
1
- from datetime import datetime
2
-
3
- import ddeutil.workflow as wf
4
- import ddeutil.workflow.stage as st
5
- from ddeutil.core import getdot
6
- from ddeutil.workflow.result import Result
7
-
8
-
9
- def test_stage_exec_trigger():
10
- workflow = wf.Workflow.from_loader(name="wf-trigger", externals={})
11
- stage: st.Stage = workflow.job("trigger-job").stage(
12
- stage_id="trigger-stage"
13
- )
14
- rs: Result = stage.handler_execute(params={})
15
- assert all(k in ("params", "jobs") for k in rs.context.keys())
16
- assert {
17
- "author-run": "Trigger Runner",
18
- "run-date": datetime(2024, 8, 1),
19
- } == rs.context["params"]
20
-
21
-
22
- def test_stage_exec_trigger_from_workflow():
23
- workflow = wf.Workflow.from_loader(name="wf-trigger", externals={})
24
- rs: Result = workflow.execute(params={})
25
- assert {
26
- "author-run": "Trigger Runner",
27
- "run-date": datetime(2024, 8, 1),
28
- } == getdot(
29
- "jobs.trigger-job.stages.trigger-stage.outputs.params", rs.context
30
- )
@@ -1,74 +0,0 @@
1
- from unittest import mock
2
-
3
- from ddeutil.workflow import Workflow
4
- from ddeutil.workflow.conf import Config
5
- from ddeutil.workflow.result import Result
6
-
7
-
8
- def test_workflow_exec_needs():
9
- workflow = Workflow.from_loader(name="wf-run-depends", externals={})
10
- rs: Result = workflow.execute(params={"name": "bar"})
11
- assert {
12
- "params": {"name": "bar"},
13
- "jobs": {
14
- "final-job": {
15
- "matrix": {},
16
- "stages": {
17
- "8797330324": {
18
- "outputs": {},
19
- },
20
- },
21
- },
22
- "first-job": {
23
- "matrix": {},
24
- "stages": {
25
- "7824513474": {
26
- "outputs": {},
27
- },
28
- },
29
- },
30
- "second-job": {
31
- "matrix": {},
32
- "stages": {
33
- "1772094681": {
34
- "outputs": {},
35
- },
36
- },
37
- },
38
- },
39
- } == rs.context
40
-
41
-
42
- def test_workflow_exec_needs_parallel():
43
- with mock.patch.object(Config, "max_job_parallel", 3):
44
- workflow = Workflow.from_loader(name="wf-run-depends", externals={})
45
- rs: Result = workflow.execute(params={"name": "bar"})
46
- assert {
47
- "params": {"name": "bar"},
48
- "jobs": {
49
- "final-job": {
50
- "matrix": {},
51
- "stages": {
52
- "8797330324": {
53
- "outputs": {},
54
- },
55
- },
56
- },
57
- "first-job": {
58
- "matrix": {},
59
- "stages": {
60
- "7824513474": {
61
- "outputs": {},
62
- },
63
- },
64
- },
65
- "second-job": {
66
- "matrix": {},
67
- "stages": {
68
- "1772094681": {
69
- "outputs": {},
70
- },
71
- },
72
- },
73
- },
74
- } == rs.context