ddeutil-workflow 0.0.74__tar.gz → 0.0.76__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/PKG-INFO +3 -3
  2. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/README.md +2 -2
  3. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/pyproject.toml +1 -0
  4. ddeutil_workflow-0.0.76/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/__cron.py +18 -7
  6. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/__init__.py +2 -1
  7. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/audits.py +15 -10
  8. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/cli.py +87 -21
  9. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/conf.py +10 -10
  10. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/errors.py +8 -10
  11. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/job.py +13 -10
  12. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/params.py +6 -4
  13. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/result.py +15 -14
  14. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/stages.py +11 -9
  15. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/traces.py +19 -14
  16. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/utils.py +9 -22
  17. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/workflow.py +67 -55
  18. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil_workflow.egg-info/PKG-INFO +3 -3
  19. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_conf.py +6 -6
  20. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_event.py +10 -0
  21. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_params.py +11 -4
  22. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_reusables_template.py +1 -1
  23. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_utils.py +11 -6
  24. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_workflow_exec.py +9 -6
  25. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_workflow_exec_job.py +2 -2
  26. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_workflow_release.py +11 -7
  27. ddeutil_workflow-0.0.74/src/ddeutil/workflow/__about__.py +0 -1
  28. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/LICENSE +0 -0
  29. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/setup.cfg +0 -0
  30. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/__main__.py +0 -0
  31. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/__types.py +0 -0
  32. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/api/__init__.py +0 -0
  33. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/api/log_conf.py +0 -0
  34. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  35. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/api/routes/job.py +0 -0
  36. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/api/routes/logs.py +0 -0
  37. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
  38. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/event.py +0 -0
  39. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil/workflow/reusables.py +0 -0
  40. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
  41. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  42. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
  43. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  44. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  45. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test__cron.py +0 -0
  46. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test__regex.py +0 -0
  47. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_audits.py +0 -0
  48. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_cli.py +0 -0
  49. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_errors.py +0 -0
  50. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_job.py +0 -0
  51. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_job_exec.py +0 -0
  52. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_job_exec_strategy.py +0 -0
  53. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_result.py +0 -0
  54. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_reusables_call_tag.py +0 -0
  55. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_reusables_func_model.py +0 -0
  56. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_reusables_template_filter.py +0 -0
  57. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_strategy.py +0 -0
  58. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_traces.py +0 -0
  59. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_workflow.py +0 -0
  60. {ddeutil_workflow-0.0.74 → ddeutil_workflow-0.0.76}/tests/test_workflow_rerun.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.74
3
+ Version: 0.0.76
4
4
  Summary: Lightweight workflow orchestration with YAML template
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -68,7 +68,7 @@ by a `.yaml` template.
68
68
  3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
69
69
  (Python 3.13 unlock GIL 🐍🔓)
70
70
  4. Recommend to pass a **Secret Value** with environment variable in YAML template 🔐
71
- 5. Any datatime value convert to **No Timezone**
71
+ 5. Any datatime value convert to **UTC Timezone** 🌐
72
72
 
73
73
  ---
74
74
 
@@ -288,10 +288,10 @@ it will use default value and do not raise any error to you.
288
288
  | **REGISTRY_CALLER** | CORE | `.` | List of importable string for the call stage. |
289
289
  | **REGISTRY_FILTER** | CORE | `ddeutil.workflow.templates` | List of importable string for the filter template. |
290
290
  | **CONF_PATH** | CORE | `./conf` | The config path that keep all template `.yaml` files. |
291
- | **TIMEZONE** | CORE | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
292
291
  | **STAGE_DEFAULT_ID** | CORE | `false` | A flag that enable default stage ID that use for catch an execution output. |
293
292
  | **GENERATE_ID_SIMPLE_MODE** | CORE | `true` | A flog that enable generating ID with `md5` algorithm. |
294
293
  | **DEBUG_MODE** | LOG | `true` | A flag that enable logging with debug level mode. |
294
+ | **TIMEZONE** | LOG | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
295
295
  | **FORMAT** | LOG | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | A trace message console format. |
296
296
  | **FORMAT_FILE** | LOG | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | A trace message format that use to write to target pointer. |
297
297
  | **DATETIME_FORMAT** | LOG | `%Y-%m-%d %H:%M:%S` | A datetime format of the trace log. |
@@ -26,7 +26,7 @@ by a `.yaml` template.
26
26
  3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
27
27
  (Python 3.13 unlock GIL 🐍🔓)
28
28
  4. Recommend to pass a **Secret Value** with environment variable in YAML template 🔐
29
- 5. Any datatime value convert to **No Timezone**
29
+ 5. Any datatime value convert to **UTC Timezone** 🌐
30
30
 
31
31
  ---
32
32
 
@@ -246,10 +246,10 @@ it will use default value and do not raise any error to you.
246
246
  | **REGISTRY_CALLER** | CORE | `.` | List of importable string for the call stage. |
247
247
  | **REGISTRY_FILTER** | CORE | `ddeutil.workflow.templates` | List of importable string for the filter template. |
248
248
  | **CONF_PATH** | CORE | `./conf` | The config path that keep all template `.yaml` files. |
249
- | **TIMEZONE** | CORE | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
250
249
  | **STAGE_DEFAULT_ID** | CORE | `false` | A flag that enable default stage ID that use for catch an execution output. |
251
250
  | **GENERATE_ID_SIMPLE_MODE** | CORE | `true` | A flog that enable generating ID with `md5` algorithm. |
252
251
  | **DEBUG_MODE** | LOG | `true` | A flag that enable logging with debug level mode. |
252
+ | **TIMEZONE** | LOG | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
253
253
  | **FORMAT** | LOG | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | A trace message console format. |
254
254
  | **FORMAT_FILE** | LOG | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | A trace message format that use to write to target pointer. |
255
255
  | **DATETIME_FORMAT** | LOG | `%Y-%m-%d %H:%M:%S` | A datetime format of the trace log. |
@@ -64,6 +64,7 @@ where = ["src"]
64
64
  [tool.shelf.version]
65
65
  version = "./src/ddeutil/workflow/__about__.py"
66
66
  changelog = "CHANGELOG.md"
67
+ files = ["json-schema.json"]
67
68
  commit_msg_format = "- {subject}"
68
69
 
69
70
  [tool.shelf.git]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.76"
@@ -793,10 +793,11 @@ class CronRunner:
793
793
  "Invalid type of `tz` parameter, it should be str or "
794
794
  "ZoneInfo instance."
795
795
  )
796
- try:
797
- self.tz = ZoneInfo(tz)
798
- except ZoneInfoNotFoundError as err:
799
- raise ValueError(f"Invalid timezone: {tz}") from err
796
+ else:
797
+ try:
798
+ self.tz = ZoneInfo(tz)
799
+ except ZoneInfoNotFoundError as err:
800
+ raise ValueError(f"Invalid timezone: {tz}") from err
800
801
 
801
802
  # NOTE: Prepare date
802
803
  if date:
@@ -807,6 +808,7 @@ class CronRunner:
807
808
  if tz is not None:
808
809
  self.date: datetime = date.astimezone(self.tz)
809
810
  else:
811
+ self.tz = date.tzinfo
810
812
  self.date: datetime = date
811
813
  else:
812
814
  self.date: datetime = datetime.now(tz=self.tz)
@@ -841,7 +843,11 @@ class CronRunner:
841
843
 
842
844
  @property
843
845
  def next(self) -> datetime:
844
- """Returns the next time of the schedule."""
846
+ """Returns the next time of the schedule.
847
+
848
+ Returns:
849
+ datetime: A next datetime from the current with shifting step.
850
+ """
845
851
  self.date = (
846
852
  self.date
847
853
  if self.reset_flag
@@ -858,7 +864,11 @@ class CronRunner:
858
864
  def find_date(self, reverse: bool = False) -> datetime:
859
865
  """Returns the time the schedule would run by `next` or `prev` methods.
860
866
 
861
- :param reverse: A reverse flag.
867
+ Args:
868
+ reverse: A reverse flag.
869
+
870
+ Returns:
871
+ datetime: A next datetime from shifting step.
862
872
  """
863
873
  # NOTE: Set reset flag to false if start any action.
864
874
  self.reset_flag: bool = False
@@ -868,7 +878,8 @@ class CronRunner:
868
878
  max(self.shift_limit, 100) if self.is_year else self.shift_limit
869
879
  ):
870
880
 
871
- # NOTE: Shift the date
881
+ # NOTE: Shift the date from year to minute.
882
+ mode: DatetimeMode # noqa: F842
872
883
  if all(
873
884
  not self.__shift_date(mode, reverse)
874
885
  for mode in ("year", "month", "day", "hour", "minute")
@@ -113,6 +113,7 @@ from .result import (
113
113
  WAIT,
114
114
  Result,
115
115
  Status,
116
+ get_status_from_error,
116
117
  )
117
118
  from .reusables import *
118
119
  from .stages import (
@@ -131,7 +132,7 @@ from .stages import (
131
132
  VirtualPyStage,
132
133
  )
133
134
  from .traces import (
134
- ConsoleTrace,
135
+ BaseTrace,
135
136
  FileTrace,
136
137
  Trace,
137
138
  TraceData,
@@ -79,7 +79,10 @@ class BaseAudit(BaseModel, ABC):
79
79
  default=None, description="A parent running ID."
80
80
  )
81
81
  run_id: str = Field(description="A running ID")
82
- execution_time: float = Field(default=0, description="An execution time.")
82
+ runs_metadata: DictData = Field(
83
+ default_factory=dict,
84
+ description="A runs metadata that will use to tracking this audit log.",
85
+ )
83
86
 
84
87
  @model_validator(mode="after")
85
88
  def __model_action(self) -> Self:
@@ -296,20 +299,22 @@ class FileAudit(BaseAudit):
296
299
 
297
300
 
298
301
  class SQLiteAudit(BaseAudit): # pragma: no cov
299
- """SQLite Audit Pydantic Model."""
302
+ """SQLite Audit model."""
300
303
 
301
304
  table_name: ClassVar[str] = "audits"
302
305
  schemas: ClassVar[
303
306
  str
304
307
  ] = """
305
- workflow str,
306
- release int,
307
- type str,
308
- context json,
309
- parent_run_id int,
310
- run_id int,
311
- update datetime
312
- primary key ( run_id )
308
+ workflow str
309
+ , release int
310
+ , type str
311
+ , context JSON
312
+ , parent_run_id int
313
+ , run_id int
314
+ , metadata JSON
315
+ , created_at datetime
316
+ , updated_at datetime
317
+ primary key ( workflow, release )
313
318
  """
314
319
 
315
320
  @classmethod
@@ -8,6 +8,7 @@ from __future__ import annotations
8
8
  import json
9
9
  from pathlib import Path
10
10
  from platform import python_version
11
+ from textwrap import dedent
11
12
  from typing import Annotated, Any, Literal, Optional, Union
12
13
 
13
14
  import typer
@@ -15,15 +16,13 @@ from pydantic import Field, TypeAdapter
15
16
 
16
17
  from .__about__ import __version__
17
18
  from .__types import DictData
19
+ from .conf import config
18
20
  from .errors import JobError
19
21
  from .job import Job
20
22
  from .params import Param
21
- from .result import Result
22
23
  from .workflow import Workflow
23
24
 
24
- app = typer.Typer(
25
- pretty_exceptions_enable=True,
26
- )
25
+ app = typer.Typer(pretty_exceptions_enable=True)
27
26
 
28
27
 
29
28
  @app.callback()
@@ -41,12 +40,70 @@ def version() -> None:
41
40
  typer.echo(f"python-version=={python_version()}")
42
41
 
43
42
 
43
+ @app.command()
44
+ def init() -> None:
45
+ """Initialize a Workflow structure on the current context."""
46
+ config.conf_path.mkdir(exist_ok=True)
47
+ (config.conf_path / ".confignore").touch()
48
+
49
+ conf_example_path: Path = config.conf_path / "examples"
50
+ conf_example_path.mkdir(exist_ok=True)
51
+
52
+ example_template: Path = conf_example_path / "wf_examples.yml"
53
+ example_template.write_text(
54
+ dedent(
55
+ """
56
+ # Example workflow template.
57
+ wf-example:
58
+ type: Workflow
59
+ desc: |
60
+ An example workflow template.
61
+ params:
62
+ name:
63
+ type: str
64
+ default: "World"
65
+ jobs:
66
+ first-job:
67
+ stages:
68
+ - name: "Call tasks"
69
+ uses: tasks/say-hello-func@example
70
+ with:
71
+ name: ${{ params.name }}
72
+ """
73
+ ).lstrip("\n")
74
+ )
75
+
76
+ if "." in config.registry_caller:
77
+ task_path = Path("./tasks")
78
+ task_path.mkdir(exist_ok=True)
79
+
80
+ dummy_tasks_path = task_path / "example.py"
81
+ dummy_tasks_path.write_text(
82
+ dedent(
83
+ """
84
+ from ddeutil.workflow import Result, tag
85
+
86
+ @tag(name="example", alias="say-hello-func")
87
+ def hello_world_task(name: str, rs: Result) -> dict[str, str]:
88
+ \"\"\"Logging hello task function\"\"\"
89
+ rs.trace.info(f"Hello, {name}")
90
+ return {"name": name}
91
+ """
92
+ ).lstrip("\n")
93
+ )
94
+
95
+ init_path = task_path / "__init__.py"
96
+ init_path.write_text("from .example import hello_world_task\n")
97
+ typer.echo(
98
+ "Starter command: `workflow-cli workflows execute --name=wf-example`"
99
+ )
100
+
101
+
44
102
  @app.command(name="job")
45
103
  def execute_job(
46
104
  params: Annotated[str, typer.Option(help="A job execute parameters")],
47
105
  job: Annotated[str, typer.Option(help="A job model")],
48
- parent_run_id: Annotated[str, typer.Option(help="A parent running ID")],
49
- run_id: Annotated[Optional[str], typer.Option(help="A running ID")] = None,
106
+ run_id: Annotated[str, typer.Option(help="A running ID")],
50
107
  ) -> None:
51
108
  """Job execution on the local.
52
109
 
@@ -62,26 +119,19 @@ def execute_job(
62
119
  job_dict: dict[str, Any] = json.loads(job)
63
120
  _job: Job = Job.model_validate(obj=job_dict)
64
121
  except json.JSONDecodeError as e:
65
- raise ValueError(f"Params does not support format: {params!r}.") from e
122
+ raise ValueError(f"Jobs does not support format: {job!r}.") from e
66
123
 
67
124
  typer.echo(f"Job params: {params_dict}")
68
- rs: Result = Result(
69
- run_id=run_id,
70
- parent_run_id=parent_run_id,
71
- )
72
-
73
125
  context: DictData = {}
74
126
  try:
75
127
  _job.set_outputs(
76
- _job.execute(
77
- params=params_dict,
78
- run_id=rs.run_id,
79
- parent_run_id=rs.parent_run_id,
80
- ).context,
128
+ _job.execute(params=params_dict, run_id=run_id).context,
81
129
  to=context,
82
130
  )
131
+ typer.echo("[JOB]: Context result:")
132
+ typer.echo(json.dumps(context, default=str, indent=0))
83
133
  except JobError as err:
84
- rs.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
134
+ typer.echo(f"[JOB]: {err.__class__.__name__}: {err}")
85
135
 
86
136
 
87
137
  @app.command()
@@ -136,8 +186,24 @@ def workflow_callback():
136
186
 
137
187
 
138
188
  @workflow_app.command(name="execute")
139
- def workflow_execute():
140
- """"""
189
+ def workflow_execute(
190
+ name: Annotated[
191
+ str,
192
+ typer.Option(help="A name of workflow template."),
193
+ ],
194
+ params: Annotated[
195
+ str,
196
+ typer.Option(help="A workflow execute parameters"),
197
+ ] = "{}",
198
+ ):
199
+ """Execute workflow by passing a workflow template name."""
200
+ try:
201
+ params_dict: dict[str, Any] = json.loads(params)
202
+ except json.JSONDecodeError as e:
203
+ raise ValueError(f"Params does not support format: {params!r}.") from e
204
+
205
+ typer.echo(f"Start execute workflow template: {name}")
206
+ typer.echo(f"... with params: {params_dict}")
141
207
 
142
208
 
143
209
  WORKFLOW_TYPE = Literal["Workflow"]
@@ -167,7 +233,7 @@ def workflow_json_schema(
167
233
  template_schema: dict[str, str] = {
168
234
  "$schema": "http://json-schema.org/draft-07/schema#",
169
235
  "title": "Workflow Configuration Schema",
170
- "version": "1.0.0",
236
+ "version": __version__,
171
237
  }
172
238
  with open(output, mode="w", encoding="utf-8") as f:
173
239
  json.dump(template_schema | json_schema, f, indent=2)
@@ -89,16 +89,6 @@ class Config: # pragma: no cov
89
89
  """
90
90
  return Path(env("CORE_CONF_PATH", "./conf"))
91
91
 
92
- @property
93
- def tz(self) -> ZoneInfo:
94
- """Timezone value that return with the `ZoneInfo` object and use for all
95
- datetime object in this workflow engine.
96
-
97
- Returns:
98
- ZoneInfo: The timezone configuration for the workflow engine.
99
- """
100
- return ZoneInfo(env("CORE_TIMEZONE", "UTC"))
101
-
102
92
  @property
103
93
  def generate_id_simple_mode(self) -> bool:
104
94
  """Flag for generate running ID with simple mode. That does not use
@@ -143,6 +133,16 @@ class Config: # pragma: no cov
143
133
  """
144
134
  return str2bool(env("LOG_DEBUG_MODE", "true"))
145
135
 
136
+ @property
137
+ def log_tz(self) -> ZoneInfo:
138
+ """Timezone value that return with the `ZoneInfo` object and use for all
139
+ datetime object in this workflow engine.
140
+
141
+ Returns:
142
+ ZoneInfo: The timezone configuration for the workflow engine.
143
+ """
144
+ return ZoneInfo(env("LOG_TIMEZONE", "UTC"))
145
+
146
146
  @property
147
147
  def log_format(self) -> str:
148
148
  return env(
@@ -136,16 +136,14 @@ class BaseError(Exception):
136
136
  ErrorData or dict: Exception data, optionally mapped by reference ID
137
137
 
138
138
  Example:
139
- ```python
140
- error = BaseError("Something failed", refs="stage-1")
141
-
142
- # Simple format
143
- data = error.to_dict()
144
- # Returns: {"name": "BaseError", "message": "Something failed"}
145
-
146
- # With reference mapping
147
- ref_data = error.to_dict(with_refs=True)
148
- # Returns: {"stage-1": {"name": "BaseError", "message": "Something failed"}}
139
+ >>> error = BaseError("Something failed", refs="stage-1")
140
+ >>> # Simple format
141
+ >>> error.to_dict()
142
+ >>> # Returns: {"name": "BaseError", "message": "Something failed"}
143
+
144
+ >>> # With reference mapping
145
+ >>> error.to_dict(with_refs=True)
146
+ >>> # Returns: {"stage-1": {"name": "BaseError", "message": "Something failed"}}
149
147
  ```
150
148
  """
151
149
  data: ErrorData = to_dict(self)
@@ -656,6 +656,7 @@ class Job(BaseModel):
656
656
  to: DictData,
657
657
  *,
658
658
  job_id: StrOrNone = None,
659
+ **kwargs,
659
660
  ) -> DictData:
660
661
  """Set an outputs from execution result context to the received context
661
662
  with a `to` input parameter. The result context from job strategy
@@ -693,12 +694,15 @@ class Job(BaseModel):
693
694
  :raise JobError: If the job's ID does not set and the setting
694
695
  default job ID flag does not set.
695
696
 
696
- :param output: (DictData) A result data context that want to extract
697
- and transfer to the `strategies` key in receive context.
698
- :param to: (DictData) A received context data.
699
- :param job_id: (StrOrNone) A job ID if the `id` field does not set.
697
+ Args:
698
+ output: (DictData) A result data context that want to extract
699
+ and transfer to the `strategies` key in receive context.
700
+ to: (DictData) A received context data.
701
+ job_id: (StrOrNone) A job ID if the `id` field does not set.
702
+ kwargs: Any values that want to add to the target context.
700
703
 
701
- :rtype: DictData
704
+ Returns:
705
+ DictData: Return updated the target context with a result context.
702
706
  """
703
707
  if "jobs" not in to:
704
708
  to["jobs"] = {}
@@ -716,8 +720,9 @@ class Job(BaseModel):
716
720
  status: dict[str, Status] = (
717
721
  {"status": output.pop("status")} if "status" in output else {}
718
722
  )
723
+ kwargs: DictData = kwargs or {}
719
724
  if self.strategy.is_set():
720
- to["jobs"][_id] = {"strategies": output} | errors | status
725
+ to["jobs"][_id] = {"strategies": output} | errors | status | kwargs
721
726
  elif len(k := output.keys()) > 1: # pragma: no cov
722
727
  raise JobError(
723
728
  "Strategy output from execution return more than one ID while "
@@ -726,7 +731,7 @@ class Job(BaseModel):
726
731
  else:
727
732
  _output: DictData = {} if len(k) == 0 else output[list(k)[0]]
728
733
  _output.pop("matrix", {})
729
- to["jobs"][_id] = _output | errors | status
734
+ to["jobs"][_id] = _output | errors | status | kwargs
730
735
  return to
731
736
 
732
737
  def get_outputs(
@@ -800,8 +805,7 @@ class Job(BaseModel):
800
805
  return docker_execution(
801
806
  self,
802
807
  params,
803
- run_id=run_id,
804
- parent_run_id=parent_run_id,
808
+ run_id=parent_run_id,
805
809
  event=event,
806
810
  ).make_info({"execution_time": time.monotonic() - ts})
807
811
 
@@ -1294,7 +1298,6 @@ def docker_execution(
1294
1298
  params: DictData,
1295
1299
  *,
1296
1300
  run_id: StrOrNone = None,
1297
- parent_run_id: StrOrNone = None,
1298
1301
  event: Optional[Event] = None,
1299
1302
  ): # pragma: no cov
1300
1303
  """Docker job execution.
@@ -52,7 +52,7 @@ from pydantic import BaseModel, Field
52
52
 
53
53
  from .__types import StrOrInt
54
54
  from .errors import ParamError
55
- from .utils import get_d_now, get_dt_now
55
+ from .utils import UTC, get_d_now, get_dt_now
56
56
 
57
57
  T = TypeVar("T")
58
58
 
@@ -169,16 +169,18 @@ class DatetimeParam(DefaultParam):
169
169
  return self.default
170
170
 
171
171
  if isinstance(value, datetime):
172
- return value
172
+ if value.tzinfo is None:
173
+ return value.replace(tzinfo=UTC)
174
+ return value.astimezone(UTC)
173
175
  elif isinstance(value, date):
174
- return datetime(value.year, value.month, value.day)
176
+ return datetime(value.year, value.month, value.day, tzinfo=UTC)
175
177
  elif not isinstance(value, str):
176
178
  raise ParamError(
177
179
  f"Value that want to convert to datetime does not support for "
178
180
  f"type: {type(value)}"
179
181
  )
180
182
  try:
181
- return datetime.fromisoformat(value)
183
+ return datetime.fromisoformat(value).replace(tzinfo=UTC)
182
184
  except ValueError:
183
185
  raise ParamError(
184
186
  f"Invalid the ISO format string for datetime: {value!r}"
@@ -16,7 +16,6 @@ Classes:
16
16
  Functions:
17
17
  validate_statuses: Determine final status from multiple status values
18
18
  get_status_from_error: Convert exception types to appropriate status
19
- get_dt_tznow: Get current datetime with timezone configuration
20
19
  """
21
20
  from __future__ import annotations
22
21
 
@@ -43,7 +42,7 @@ from . import (
43
42
  from .__types import DictData
44
43
  from .audits import Trace, get_trace
45
44
  from .errors import ResultError
46
- from .utils import default_gen_id, get_dt_ntz_now
45
+ from .utils import default_gen_id, get_dt_now
47
46
 
48
47
 
49
48
  class Status(str, Enum):
@@ -89,6 +88,7 @@ class Status(str, Enum):
89
88
  return self.name
90
89
 
91
90
  def is_result(self) -> bool:
91
+ """Return True if this status is the status for result object."""
92
92
  return self in ResultStatuses
93
93
 
94
94
 
@@ -115,15 +115,13 @@ def validate_statuses(statuses: list[Status]) -> Status:
115
115
  Status: Final consolidated status based on workflow logic
116
116
 
117
117
  Example:
118
- ```python
119
- # Mixed statuses - FAILED takes priority
120
- result = validate_statuses([SUCCESS, FAILED, SUCCESS])
121
- # Returns: FAILED
122
-
123
- # All same status
124
- result = validate_statuses([SUCCESS, SUCCESS, SUCCESS])
125
- # Returns: SUCCESS
126
- ```
118
+ >>> # Mixed statuses - FAILED takes priority
119
+ >>> validate_statuses([SUCCESS, FAILED, SUCCESS])
120
+ >>> # Returns: FAILED
121
+
122
+ >>> # All same status
123
+ >>> validate_statuses([SUCCESS, SUCCESS, SUCCESS])
124
+ >>> # Returns: SUCCESS
127
125
  """
128
126
  if any(s == CANCEL for s in statuses):
129
127
  return CANCEL
@@ -153,6 +151,9 @@ def get_status_from_error(
153
151
  ) -> Status:
154
152
  """Get the Status from the error object.
155
153
 
154
+ Args:
155
+ error: An error object.
156
+
156
157
  Returns:
157
158
  Status: The status from the specific exception class.
158
159
  """
@@ -189,8 +190,8 @@ class Result:
189
190
  context: DictData = field(default_factory=default_context)
190
191
  info: DictData = field(default_factory=dict)
191
192
  run_id: Optional[str] = field(default_factory=default_gen_id)
192
- parent_run_id: Optional[str] = field(default=None, compare=False)
193
- ts: datetime = field(default_factory=get_dt_ntz_now, compare=False)
193
+ parent_run_id: Optional[str] = field(default=None)
194
+ ts: datetime = field(default_factory=get_dt_now, compare=False)
194
195
  trace: Optional[Trace] = field(default=None, compare=False, repr=False)
195
196
  extras: DictData = field(default_factory=dict, compare=False, repr=False)
196
197
 
@@ -266,7 +267,7 @@ class Result:
266
267
 
267
268
  :rtype: float
268
269
  """
269
- return (get_dt_ntz_now() - self.ts).total_seconds()
270
+ return (get_dt_now() - self.ts).total_seconds()
270
271
 
271
272
 
272
273
  def catch(
@@ -295,7 +295,7 @@ class BaseStage(BaseModel, ABC):
295
295
  ts: float = time.monotonic()
296
296
  parent_run_id: str = run_id
297
297
  run_id: str = run_id or gen_id(self.iden, unique=True)
298
- context: DictData = {}
298
+ context: DictData = {"status": WAIT}
299
299
  trace: Trace = get_trace(
300
300
  run_id, parent_run_id=parent_run_id, extras=self.extras
301
301
  )
@@ -413,7 +413,7 @@ class BaseStage(BaseModel, ABC):
413
413
  self,
414
414
  output: DictData,
415
415
  to: DictData,
416
- info: Optional[DictData] = None,
416
+ **kwargs,
417
417
  ) -> DictData:
418
418
  """Set an outputs from execution result context to the received context
419
419
  with a `to` input parameter. The result context from stage execution
@@ -447,12 +447,14 @@ class BaseStage(BaseModel, ABC):
447
447
  to the `to` argument. The result context was soft copied before set
448
448
  output step.
449
449
 
450
- :param output: (DictData) A result data context that want to extract
451
- and transfer to the `outputs` key in receive context.
452
- :param to: (DictData) A received context data.
453
- :param info: (DictData)
450
+ Args:
451
+ output: (DictData) A result data context that want to extract
452
+ and transfer to the `outputs` key in receive context.
453
+ to: (DictData) A received context data.
454
+ kwargs: Any values that want to add to the target context.
454
455
 
455
- :rtype: DictData
456
+ Returns:
457
+ DictData: Return updated the target context with a result context.
456
458
  """
457
459
  if "stages" not in to:
458
460
  to["stages"] = {}
@@ -470,8 +472,8 @@ class BaseStage(BaseModel, ABC):
470
472
  status: dict[str, Status] = (
471
473
  {"status": output.pop("status")} if "status" in output else {}
472
474
  )
473
- info: DictData = {"info": info} if info else {}
474
- to["stages"][_id] = {"outputs": output} | errors | status | info
475
+ kwargs: DictData = kwargs or {}
476
+ to["stages"][_id] = {"outputs": output} | errors | status | kwargs
475
477
  return to
476
478
 
477
479
  def get_outputs(self, output: DictData) -> DictData: