ddeutil-workflow 0.0.81__py3-none-any.whl → 0.0.83__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1,2 @@
1
- __version__: str = "0.0.81"
1
+ __version__: str = "0.0.83"
2
+ __python_version__: str = "3.9"
@@ -715,7 +715,7 @@ class CronJob:
715
715
  self,
716
716
  date: Optional[datetime] = None,
717
717
  *,
718
- tz: Optional[str] = None,
718
+ tz: Optional[Union[str, ZoneInfo]] = None,
719
719
  ) -> CronRunner:
720
720
  """Returns CronRunner instance that be datetime runner with this
721
721
  cronjob. It can use `next`, `prev`, or `reset` methods to generate
@@ -50,19 +50,37 @@ Note:
50
50
  from .__cron import CronRunner
51
51
  from .__types import DictData, DictStr, Matrix, Re, TupleStr
52
52
  from .audits import (
53
+ DRYRUN,
54
+ FORCE,
55
+ NORMAL,
56
+ RERUN,
53
57
  Audit,
54
- FileAudit,
58
+ LocalFileAudit,
55
59
  get_audit,
56
60
  )
57
- from .conf import *
61
+ from .conf import (
62
+ PREFIX,
63
+ CallerSecret,
64
+ Config,
65
+ YamlParser,
66
+ api_config,
67
+ config,
68
+ dynamic,
69
+ env,
70
+ pass_env,
71
+ )
58
72
  from .errors import (
59
73
  BaseError,
74
+ EventError,
60
75
  JobCancelError,
61
76
  JobError,
62
77
  JobSkipError,
63
78
  ResultError,
64
79
  StageCancelError,
65
80
  StageError,
81
+ StageNestedCancelError,
82
+ StageNestedError,
83
+ StageNestedSkipError,
66
84
  StageSkipError,
67
85
  UtilError,
68
86
  WorkflowCancelError,
@@ -132,15 +150,11 @@ from .stages import (
132
150
  VirtualPyStage,
133
151
  )
134
152
  from .traces import (
135
- TraceManager,
153
+ Trace,
136
154
  get_trace,
137
155
  )
138
156
  from .utils import *
139
157
  from .workflow import (
140
- EVENT,
141
- FORCE,
142
- NORMAL,
143
- RERUN,
144
158
  ReleaseType,
145
159
  Workflow,
146
160
  )
@@ -1,4 +1,283 @@
1
- from .cli import app
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import json
9
+ from pathlib import Path
10
+ from platform import python_version
11
+ from textwrap import dedent
12
+ from typing import Annotated, Any, Literal, Optional, Union
13
+
14
+ import typer
15
+ from pydantic import Field, TypeAdapter
16
+
17
+ from .__about__ import __version__
18
+ from .__types import DictData
19
+ from .conf import config
20
+ from .errors import JobError
21
+ from .job import Job
22
+ from .params import Param
23
+ from .workflow import Workflow
24
+
25
+ app = typer.Typer(pretty_exceptions_enable=True)
26
+
27
+
28
+ @app.callback()
29
+ def callback() -> None:
30
+ """Manage Workflow Orchestration CLI.
31
+
32
+ Use it with the interface workflow engine.
33
+ """
34
+
35
+
36
+ @app.command()
37
+ def version() -> None:
38
+ """Get the ddeutil-workflow package version."""
39
+ typer.echo(f"ddeutil-workflow=={__version__}")
40
+ typer.echo(f"python-version=={python_version()}")
41
+
42
+
43
+ @app.command()
44
+ def init() -> None:
45
+ """Initialize a Workflow structure on the current context."""
46
+ config.conf_path.mkdir(exist_ok=True)
47
+ (config.conf_path / ".confignore").touch()
48
+
49
+ conf_example_path: Path = config.conf_path / "examples"
50
+ conf_example_path.mkdir(exist_ok=True)
51
+
52
+ example_template: Path = conf_example_path / "wf_examples.yml"
53
+ example_template.write_text(
54
+ dedent(
55
+ """
56
+ # Example workflow template.
57
+ name: wf-example:
58
+ type: Workflow
59
+ desc: |
60
+ An example workflow template that provide the demo of workflow.
61
+ params:
62
+ name:
63
+ type: str
64
+ default: "World"
65
+ jobs:
66
+ first-job:
67
+ stages:
68
+
69
+ - name: "Hello Stage"
70
+ echo: "Start say hi to the console"
71
+
72
+ - name: "Call tasks"
73
+ uses: tasks/say-hello-func@example
74
+ with:
75
+ name: ${{ params.name }}
76
+
77
+ second-job:
78
+
79
+ - name: "Hello Env"
80
+ echo: "Start say hi with ${ WORKFLOW_DEMO_HELLO }"
81
+ """
82
+ ).lstrip("\n")
83
+ )
84
+
85
+ if "." in config.registry_caller:
86
+ task_path = Path("./tasks")
87
+ task_path.mkdir(exist_ok=True)
88
+
89
+ dummy_tasks_path = task_path / "example.py"
90
+ dummy_tasks_path.write_text(
91
+ dedent(
92
+ """
93
+ from typing import Any, Optional
94
+
95
+ from ddeutil.workflow import Result, tag
96
+
97
+ @tag(name="example", alias="say-hello-func")
98
+ def hello_world_task(name: str, rs: Result, extras: Optional[dict[str, Any]] = None) -> dict[str, str]:
99
+ \"\"\"Logging hello task function\"\"\"
100
+ _extras = extras or {}
101
+ # NOTE: I will use custom newline logging if you pass `||`.
102
+ rs.trace.info(
103
+ f"Hello, {name}||"
104
+ f"> running ID: {rs.run_id}"
105
+ f"> extras: {_extras}"
106
+ )
107
+ return {"name": name}
108
+ """
109
+ ).lstrip("\n")
110
+ )
111
+
112
+ init_path = task_path / "__init__.py"
113
+ init_path.write_text("from .example import hello_world_task\n")
114
+
115
+ dotenv_file = Path(".env")
116
+ mode: str = "a" if dotenv_file.exists() else "w"
117
+ with dotenv_file.open(mode=mode) as f:
118
+ f.write("\n# Workflow Environment Variables\n")
119
+ f.write(
120
+ "WORKFLOW_DEMO_HELLO=foo\n"
121
+ "WORKFLOW_CORE_DEBUG_MODE=true\n"
122
+ "WORKFLOW_LOG_TIMEZONE=Asia/Bangkok\n"
123
+ 'WORKFLOW_LOG_TRACE_HANDLERS=\'[{"type": "console"}]\'\n'
124
+ 'WORKFLOW_LOG_AUDIT_CONF=\'{"type": "file", "path": "./audits"}\''
125
+ "WORKFLOW_LOG_AUDIT_ENABLE_WRITE=true\n"
126
+ )
127
+
128
+ typer.echo("Starter command:")
129
+ typer.echo(
130
+ ">>> `source .env && workflow-cli workflows execute --name=wf-example`"
131
+ )
132
+
133
+
134
+ @app.command(name="job")
135
+ def execute_job(
136
+ params: Annotated[str, typer.Option(help="A job execute parameters")],
137
+ job: Annotated[str, typer.Option(help="A job model")],
138
+ run_id: Annotated[str, typer.Option(help="A running ID")],
139
+ ) -> None:
140
+ """Job execution on the local.
141
+
142
+ Example:
143
+ ... workflow-cli job --params \"{\\\"test\\\": 1}\"
144
+ """
145
+ try:
146
+ params_dict: dict[str, Any] = json.loads(params)
147
+ except json.JSONDecodeError as e:
148
+ raise ValueError(f"Params does not support format: {params!r}.") from e
149
+
150
+ try:
151
+ job_dict: dict[str, Any] = json.loads(job)
152
+ _job: Job = Job.model_validate(obj=job_dict)
153
+ except json.JSONDecodeError as e:
154
+ raise ValueError(f"Jobs does not support format: {job!r}.") from e
155
+
156
+ typer.echo(f"Job params: {params_dict}")
157
+ context: DictData = {}
158
+ try:
159
+ _job.set_outputs(
160
+ _job.execute(params=params_dict, run_id=run_id).context,
161
+ to=context,
162
+ )
163
+ typer.echo("[JOB]: Context result:")
164
+ typer.echo(json.dumps(context, default=str, indent=0))
165
+ except JobError as err:
166
+ typer.echo(f"[JOB]: {err.__class__.__name__}: {err}")
167
+
168
+
169
+ @app.command()
170
+ def api(
171
+ host: Annotated[str, typer.Option(help="A host url.")] = "0.0.0.0",
172
+ port: Annotated[int, typer.Option(help="A port url.")] = 80,
173
+ debug: Annotated[bool, typer.Option(help="A debug mode flag")] = True,
174
+ workers: Annotated[int, typer.Option(help="A worker number")] = None,
175
+ reload: Annotated[bool, typer.Option(help="A reload flag")] = False,
176
+ ) -> None:
177
+ """
178
+ Provision API application from the FastAPI.
179
+ """
180
+ import uvicorn
181
+
182
+ from .api import app as fastapp
183
+ from .api.log_conf import LOGGING_CONFIG
184
+
185
+ # LOGGING_CONFIG = {}
186
+
187
+ uvicorn.run(
188
+ fastapp,
189
+ host=host,
190
+ port=port,
191
+ log_config=uvicorn.config.LOGGING_CONFIG | LOGGING_CONFIG,
192
+ # NOTE: Logging level of uvicorn should be lowered case.
193
+ log_level=("debug" if debug else "info"),
194
+ workers=workers,
195
+ reload=reload,
196
+ )
197
+
198
+
199
+ @app.command()
200
+ def make(
201
+ name: Annotated[Path, typer.Argument()],
202
+ ) -> None:
203
+ """
204
+ Create Workflow YAML template.
205
+
206
+ :param name:
207
+ """
208
+ typer.echo(f"Start create YAML template filename: {name.resolve()}")
209
+
210
+
211
+ workflow_app = typer.Typer()
212
+ app.add_typer(workflow_app, name="workflows", help="An Only Workflow CLI.")
213
+
214
+
215
+ @workflow_app.callback()
216
+ def workflow_callback():
217
+ """Manage Only Workflow CLI."""
218
+
219
+
220
+ @workflow_app.command(name="execute")
221
+ def workflow_execute(
222
+ name: Annotated[
223
+ str,
224
+ typer.Option(help="A name of workflow template."),
225
+ ],
226
+ params: Annotated[
227
+ str,
228
+ typer.Option(help="A workflow execute parameters"),
229
+ ] = "{}",
230
+ ):
231
+ """Execute workflow by passing a workflow template name."""
232
+ try:
233
+ params_dict: dict[str, Any] = json.loads(params)
234
+ except json.JSONDecodeError as e:
235
+ raise ValueError(f"Params does not support format: {params!r}.") from e
236
+
237
+ typer.echo(f"Start execute workflow template: {name}")
238
+ typer.echo(f"... with params: {params_dict}")
239
+
240
+
241
+ class WorkflowSchema(Workflow):
242
+ """Override workflow model fields for generate JSON schema file."""
243
+
244
+ type: Literal["Workflow"] = Field(
245
+ description="A type of workflow template that should be `Workflow`."
246
+ )
247
+ name: Optional[str] = Field(default=None, description="A workflow name.")
248
+ params: dict[str, Union[Param, str]] = Field(
249
+ default_factory=dict,
250
+ description="A parameters that need to use on this workflow.",
251
+ )
252
+
253
+
254
+ @workflow_app.command(name="json-schema")
255
+ def workflow_json_schema(
256
+ output: Annotated[
257
+ Path,
258
+ typer.Option(help="An output file to export the JSON schema."),
259
+ ] = Path("./json-schema.json"),
260
+ ) -> None:
261
+ """Generate JSON schema file from the Workflow model."""
262
+ template = dict[str, WorkflowSchema]
263
+ json_schema = TypeAdapter(template).json_schema(by_alias=True)
264
+ template_schema: dict[str, str] = {
265
+ "$schema": "http://json-schema.org/draft-07/schema#",
266
+ "title": "Workflow Configuration JSON Schema",
267
+ "version": __version__,
268
+ }
269
+ with open(output, mode="w", encoding="utf-8") as f:
270
+ json.dump(template_schema | json_schema, f, indent=2)
271
+
272
+
273
+ log_app = typer.Typer()
274
+ app.add_typer(log_app, name="logs", help="An Only Log CLI.")
275
+
276
+
277
+ @log_app.callback()
278
+ def log_callback():
279
+ """Manage Only Log CLI."""
280
+
2
281
 
3
282
  if __name__ == "__main__":
4
283
  app()
@@ -16,17 +16,26 @@ from re import (
16
16
  Match,
17
17
  Pattern,
18
18
  )
19
- from typing import Any, Optional, TypedDict, Union
19
+ from typing import Any, Optional, TypedDict, Union, cast
20
20
 
21
21
  from typing_extensions import Self
22
22
 
23
23
  StrOrNone = Optional[str]
24
24
  StrOrInt = Union[str, int]
25
25
  TupleStr = tuple[str, ...]
26
+ ListStr = list[str]
27
+ ListInt = list[int]
26
28
  DictData = dict[str, Any]
29
+ DictRange = dict[int, Any]
27
30
  DictStr = dict[str, str]
28
31
  Matrix = dict[str, Union[list[str], list[int]]]
29
32
 
33
+
34
+ def cast_dict(value: TypedDict[...]) -> DictData:
35
+ """Cast any TypedDict object to DictData type."""
36
+ return cast(DictData, value)
37
+
38
+
30
39
  # Pre-compile regex patterns for better performance
31
40
  _RE_CALLER_PATTERN = r"""
32
41
  \$ # start with $
@@ -15,7 +15,7 @@ from fastapi.responses import UJSONResponse
15
15
  from ...__types import DictData
16
16
  from ...errors import JobError
17
17
  from ...job import Job
18
- from ...traces import TraceManager, get_trace
18
+ from ...traces import Trace, get_trace
19
19
  from ...utils import gen_id
20
20
 
21
21
  logger = logging.getLogger("uvicorn.error")
@@ -41,7 +41,7 @@ async def job_execute(
41
41
  if extras:
42
42
  job.extras = extras
43
43
 
44
- trace: TraceManager = get_trace(
44
+ trace: Trace = get_trace(
45
45
  run_id, parent_run_id=parent_run_id, extras=job.extras
46
46
  )
47
47
 
@@ -3,7 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """This route include audit and trace log paths."""
6
+ """This route include audit log path."""
7
7
  from __future__ import annotations
8
8
 
9
9
  from fastapi import APIRouter, Path, Query
@@ -11,7 +11,6 @@ from fastapi import status as st
11
11
  from fastapi.responses import UJSONResponse
12
12
 
13
13
  from ...audits import get_audit
14
- from ...result import Result
15
14
 
16
15
  router = APIRouter(
17
16
  prefix="/logs",
@@ -20,63 +19,6 @@ router = APIRouter(
20
19
  )
21
20
 
22
21
 
23
- @router.get(
24
- path="/traces/",
25
- response_class=UJSONResponse,
26
- status_code=st.HTTP_200_OK,
27
- summary="Read all trace logs.",
28
- tags=["trace"],
29
- )
30
- async def get_traces(
31
- offset: int = Query(default=0, gt=0),
32
- limit: int = Query(default=100, gt=0),
33
- ):
34
- """Return all trace logs from the current trace log path that config with
35
- `WORKFLOW_LOG_PATH` environment variable name.
36
- """
37
- result = Result()
38
- return {
39
- "message": (
40
- f"Getting trace logs with offset: {offset} and limit: {limit}"
41
- ),
42
- "traces": [
43
- trace.model_dump(
44
- by_alias=True,
45
- exclude_none=True,
46
- exclude_unset=True,
47
- )
48
- for trace in result.trace.find_traces()
49
- ],
50
- }
51
-
52
-
53
- @router.get(
54
- path="/traces/{run_id}",
55
- response_class=UJSONResponse,
56
- status_code=st.HTTP_200_OK,
57
- summary="Read trace log with specific running ID.",
58
- tags=["trace"],
59
- )
60
- async def get_trace_with_id(run_id: str):
61
- """Return trace log with specific running ID from the current trace log path
62
- that config with `WORKFLOW_LOG_PATH` environment variable name.
63
-
64
- - **run_id**: A running ID that want to search a trace log from the log
65
- path.
66
- """
67
- result = Result()
68
- return {
69
- "message": f"Getting trace log with specific running ID: {run_id}",
70
- "trace": (
71
- result.trace.find_trace_with_id(run_id).model_dump(
72
- by_alias=True,
73
- exclude_none=True,
74
- exclude_unset=True,
75
- )
76
- ),
77
- }
78
-
79
-
80
22
  @router.get(
81
23
  path="/audits/",
82
24
  response_class=UJSONResponse,
@@ -84,12 +26,17 @@ async def get_trace_with_id(run_id: str):
84
26
  summary="Read all audit logs.",
85
27
  tags=["audit"],
86
28
  )
87
- async def get_audits():
29
+ async def get_audits(
30
+ offset: int = Query(default=0, gt=0),
31
+ limit: int = Query(default=100, gt=0),
32
+ ):
88
33
  """Return all audit logs from the current audit log path that config with
89
34
  `WORKFLOW_AUDIT_URL` environment variable name.
90
35
  """
91
36
  return {
92
- "message": "Getting audit logs",
37
+ "message": (
38
+ f"Getting audit logs with offset: {offset} and limit: {limit}",
39
+ ),
93
40
  "audits": list(get_audit().find_audits(name="demo")),
94
41
  }
95
42