ddeutil-workflow 0.0.68__py3-none-any.whl → 0.0.70__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.68"
1
+ __version__: str = "0.0.70"
@@ -5,23 +5,16 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
7
  from .__types import DictData, DictStr, Matrix, Re, TupleStr
8
- from .conf import *
9
- from .errors import *
10
- from .event import *
11
- from .job import *
12
- from .logs import (
8
+ from .audits import (
13
9
  Audit,
14
10
  AuditModel,
15
11
  FileAudit,
16
- FileTrace,
17
- Trace,
18
- TraceData,
19
- TraceMeta,
20
- TraceModel,
21
12
  get_audit,
22
- get_dt_tznow,
23
- get_trace,
24
13
  )
14
+ from .conf import *
15
+ from .errors import *
16
+ from .event import *
17
+ from .job import *
25
18
  from .params import *
26
19
  from .result import (
27
20
  CANCEL,
@@ -34,5 +27,14 @@ from .result import (
34
27
  )
35
28
  from .reusables import *
36
29
  from .stages import *
30
+ from .traces import (
31
+ ConsoleTrace,
32
+ FileTrace,
33
+ Trace,
34
+ TraceData,
35
+ TraceMeta,
36
+ TraceModel,
37
+ get_trace,
38
+ )
37
39
  from .utils import *
38
40
  from .workflow import *
@@ -1,15 +1,16 @@
1
1
  from typing import Any
2
2
 
3
+ from uvicorn.config import LOGGING_CONFIG as LOGGING_CONFIG_UVICORN
4
+
3
5
  from ..conf import config
4
6
 
5
7
  LOGGING_CONFIG: dict[str, Any] = { # pragma: no cov
6
8
  "version": 1,
7
9
  "disable_existing_loggers": False,
8
10
  "formatters": {
9
- "standard": {
10
- "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
11
- },
12
- "custom_formatter": {
11
+ "default": LOGGING_CONFIG_UVICORN["formatters"]["default"],
12
+ "access": LOGGING_CONFIG_UVICORN["formatters"]["access"],
13
+ "custom": {
13
14
  "format": config.log_format,
14
15
  "datefmt": config.log_datetime_format,
15
16
  },
@@ -18,14 +19,10 @@ LOGGING_CONFIG: dict[str, Any] = { # pragma: no cov
18
19
  "level": "DEBUG" if config.debug else "INFO",
19
20
  },
20
21
  "handlers": {
21
- "default": {
22
- "formatter": "standard",
23
- "class": "logging.StreamHandler",
24
- "stream": "ext://sys.stderr",
25
- },
26
- "stream_handler": {
27
- # "formatter": "standard",
28
- "formatter": "custom_formatter",
22
+ "default": LOGGING_CONFIG_UVICORN["handlers"]["default"],
23
+ "access": LOGGING_CONFIG_UVICORN["handlers"]["access"],
24
+ "stream_custom": {
25
+ "formatter": "custom",
29
26
  "class": "logging.StreamHandler",
30
27
  "stream": "ext://sys.stdout",
31
28
  },
@@ -39,34 +36,24 @@ LOGGING_CONFIG: dict[str, Any] = { # pragma: no cov
39
36
  },
40
37
  "loggers": {
41
38
  "uvicorn": {
42
- # "handlers": ["default", "file_handler"],
43
39
  "handlers": ["default"],
44
40
  "level": "DEBUG" if config.debug else "INFO",
45
41
  "propagate": False,
46
42
  },
47
43
  "uvicorn.access": {
48
- # "handlers": ["stream_handler", "file_handler"],
49
- "handlers": ["stream_handler"],
44
+ "handlers": ["access"],
50
45
  "level": "DEBUG" if config.debug else "INFO",
51
46
  "propagate": False,
52
47
  },
53
48
  "uvicorn.error": {
54
- # "handlers": ["stream_handler", "file_handler"],
55
- "handlers": ["stream_handler"],
49
+ "handlers": ["default"],
56
50
  "level": "DEBUG" if config.debug else "INFO",
57
- "propagate": False,
58
51
  },
59
- "uvicorn.asgi": {
60
- # "handlers": ["stream_handler", "file_handler"],
61
- "handlers": ["stream_handler"],
62
- "level": "TRACE",
63
- "propagate": False,
52
+ "ddeutil.workflow": {
53
+ "handlers": ["stream_custom"],
54
+ "level": "INFO",
55
+ # "propagate": False,
56
+ "propagate": True,
64
57
  },
65
- # "ddeutil.workflow": {
66
- # "handlers": ["stream_handler"],
67
- # "level": "INFO",
68
- # # "propagate": False,
69
- # "propagate": True,
70
- # },
71
58
  },
72
59
  }
@@ -10,7 +10,7 @@ from fastapi import APIRouter, Path, Query
10
10
  from fastapi import status as st
11
11
  from fastapi.responses import UJSONResponse
12
12
 
13
- from ...logs import get_audit
13
+ from ...audits import get_audit
14
14
  from ...result import Result
15
15
 
16
16
  router = APIRouter(
@@ -16,8 +16,8 @@ from fastapi.responses import UJSONResponse
16
16
  from pydantic import BaseModel
17
17
 
18
18
  from ...__types import DictData
19
- from ...conf import Loader
20
- from ...logs import AuditModel, get_audit
19
+ from ...audits import AuditModel, get_audit
20
+ from ...conf import YamlParser
21
21
  from ...result import Result
22
22
  from ...workflow import Workflow
23
23
 
@@ -32,7 +32,7 @@ router = APIRouter(
32
32
  @router.get(path="/", status_code=st.HTTP_200_OK)
33
33
  async def get_workflows() -> DictData:
34
34
  """Return all workflow workflows that exists in config path."""
35
- workflows: DictData = dict(Loader.finds(Workflow))
35
+ workflows: DictData = dict(YamlParser.finds(Workflow))
36
36
  return {
37
37
  "message": f"Getting all workflows: {len(workflows)}",
38
38
  "count": len(workflows),
@@ -0,0 +1,374 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import json
9
+ import logging
10
+ import os
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Iterator
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import ClassVar, Optional, TypeVar, Union
16
+
17
+ from pydantic import BaseModel, Field
18
+ from pydantic.functional_validators import model_validator
19
+ from typing_extensions import Self
20
+
21
+ from .__types import DictData
22
+ from .conf import dynamic
23
+ from .traces import TraceModel, get_trace, set_logging
24
+
25
+ logger = logging.getLogger("ddeutil.workflow")
26
+
27
+
28
+ class BaseAudit(BaseModel, ABC):
29
+ """Base Audit Pydantic Model with abstraction class property that implement
30
+ only model fields. This model should to use with inherit to logging
31
+ subclass like file, sqlite, etc.
32
+ """
33
+
34
+ extras: DictData = Field(
35
+ default_factory=dict,
36
+ description="An extras parameter that want to override core config",
37
+ )
38
+ name: str = Field(description="A workflow name.")
39
+ release: datetime = Field(description="A release datetime.")
40
+ type: str = Field(description="A running type before logging.")
41
+ context: DictData = Field(
42
+ default_factory=dict,
43
+ description="A context that receive from a workflow execution result.",
44
+ )
45
+ parent_run_id: Optional[str] = Field(
46
+ default=None, description="A parent running ID."
47
+ )
48
+ run_id: str = Field(description="A running ID")
49
+ execution_time: float = Field(default=0, description="An execution time.")
50
+
51
+ @model_validator(mode="after")
52
+ def __model_action(self) -> Self:
53
+ """Do before the Audit action with WORKFLOW_AUDIT_ENABLE_WRITE env variable.
54
+
55
+ :rtype: Self
56
+ """
57
+ if dynamic("enable_write_audit", extras=self.extras):
58
+ self.do_before()
59
+
60
+ # NOTE: Start setting log config in this line with cache.
61
+ set_logging("ddeutil.workflow")
62
+ return self
63
+
64
+ @classmethod
65
+ @abstractmethod
66
+ def is_pointed(
67
+ cls,
68
+ name: str,
69
+ release: datetime,
70
+ *,
71
+ extras: Optional[DictData] = None,
72
+ ) -> bool:
73
+ raise NotImplementedError(
74
+ "Audit should implement `is_pointed` class-method"
75
+ )
76
+
77
+ @classmethod
78
+ @abstractmethod
79
+ def find_audits(
80
+ cls,
81
+ name: str,
82
+ *,
83
+ extras: Optional[DictData] = None,
84
+ ) -> Iterator[Self]:
85
+ raise NotImplementedError(
86
+ "Audit should implement `find_audits` class-method"
87
+ )
88
+
89
+ @classmethod
90
+ @abstractmethod
91
+ def find_audit_with_release(
92
+ cls,
93
+ name: str,
94
+ release: Optional[datetime] = None,
95
+ *,
96
+ extras: Optional[DictData] = None,
97
+ ) -> Self:
98
+ raise NotImplementedError(
99
+ "Audit should implement `find_audit_with_release` class-method"
100
+ )
101
+
102
+ def do_before(self) -> None: # pragma: no cov
103
+ """To something before end up of initial log model."""
104
+
105
+ @abstractmethod
106
+ def save(self, excluded: Optional[list[str]]) -> None: # pragma: no cov
107
+ """Save this model logging to target logging store."""
108
+ raise NotImplementedError("Audit should implement `save` method.")
109
+
110
+
111
+ class NullAudit(BaseAudit):
112
+
113
+ @classmethod
114
+ def is_pointed(
115
+ cls,
116
+ name: str,
117
+ release: datetime,
118
+ *,
119
+ extras: Optional[DictData] = None,
120
+ ) -> bool:
121
+ return False
122
+
123
+ @classmethod
124
+ def find_audits(
125
+ cls,
126
+ name: str,
127
+ *,
128
+ extras: Optional[DictData] = None,
129
+ ) -> Iterator[Self]:
130
+ raise NotImplementedError()
131
+
132
+ @classmethod
133
+ def find_audit_with_release(
134
+ cls,
135
+ name: str,
136
+ release: Optional[datetime] = None,
137
+ *,
138
+ extras: Optional[DictData] = None,
139
+ ) -> Self:
140
+ raise NotImplementedError()
141
+
142
+ def save(self, excluded: Optional[list[str]]) -> None:
143
+ """Do nothing when do not set audit."""
144
+ return
145
+
146
+
147
+ class FileAudit(BaseAudit):
148
+ """File Audit Pydantic Model that use to saving log data from result of
149
+ workflow execution. It inherits from BaseAudit model that implement the
150
+ ``self.save`` method for file.
151
+ """
152
+
153
+ filename_fmt: ClassVar[str] = (
154
+ "workflow={name}/release={release:%Y%m%d%H%M%S}"
155
+ )
156
+
157
+ def do_before(self) -> None:
158
+ """Create directory of release before saving log file."""
159
+ self.pointer().mkdir(parents=True, exist_ok=True)
160
+
161
+ @classmethod
162
+ def find_audits(
163
+ cls, name: str, *, extras: Optional[DictData] = None
164
+ ) -> Iterator[Self]:
165
+ """Generate the audit data that found from logs path with specific a
166
+ workflow name.
167
+
168
+ :param name: A workflow name that want to search release logging data.
169
+ :param extras: An extra parameter that want to override core config.
170
+
171
+ :rtype: Iterator[Self]
172
+ """
173
+ pointer: Path = (
174
+ dynamic("audit_path", extras=extras) / f"workflow={name}"
175
+ )
176
+ if not pointer.exists():
177
+ raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
178
+
179
+ for file in pointer.glob("./release=*/*.log"):
180
+ with file.open(mode="r", encoding="utf-8") as f:
181
+ yield cls.model_validate(obj=json.load(f))
182
+
183
+ @classmethod
184
+ def find_audit_with_release(
185
+ cls,
186
+ name: str,
187
+ release: Optional[datetime] = None,
188
+ *,
189
+ extras: Optional[DictData] = None,
190
+ ) -> Self:
191
+ """Return the audit data that found from logs path with specific
192
+ workflow name and release values. If a release does not pass to an input
193
+ argument, it will return the latest release from the current log path.
194
+
195
+ :param name: (str) A workflow name that want to search log.
196
+ :param release: (datetime) A release datetime that want to search log.
197
+ :param extras: An extra parameter that want to override core config.
198
+
199
+ :raise FileNotFoundError:
200
+ :raise NotImplementedError: If an input release does not pass to this
201
+ method. Because this method does not implement latest log.
202
+
203
+ :rtype: Self
204
+ """
205
+ if release is None:
206
+ raise NotImplementedError("Find latest log does not implement yet.")
207
+
208
+ pointer: Path = (
209
+ dynamic("audit_path", extras=extras)
210
+ / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
211
+ )
212
+ if not pointer.exists():
213
+ raise FileNotFoundError(
214
+ f"Pointer: ./logs/workflow={name}/"
215
+ f"release={release:%Y%m%d%H%M%S} does not found."
216
+ )
217
+
218
+ latest_file: Path = max(pointer.glob("./*.log"), key=os.path.getctime)
219
+ with latest_file.open(mode="r", encoding="utf-8") as f:
220
+ return cls.model_validate(obj=json.load(f))
221
+
222
+ @classmethod
223
+ def is_pointed(
224
+ cls,
225
+ name: str,
226
+ release: datetime,
227
+ *,
228
+ extras: Optional[DictData] = None,
229
+ ) -> bool:
230
+ """Check the release log already pointed or created at the destination
231
+ log path.
232
+
233
+ :param name: (str) A workflow name.
234
+ :param release: (datetime) A release datetime.
235
+ :param extras: An extra parameter that want to override core config.
236
+
237
+ :rtype: bool
238
+ :return: Return False if the release log was not pointed or created.
239
+ """
240
+ # NOTE: Return False if enable writing log flag does not set.
241
+ if not dynamic("enable_write_audit", extras=extras):
242
+ return False
243
+
244
+ # NOTE: create pointer path that use the same logic of pointer method.
245
+ pointer: Path = dynamic(
246
+ "audit_path", extras=extras
247
+ ) / cls.filename_fmt.format(name=name, release=release)
248
+
249
+ return pointer.exists()
250
+
251
+ def pointer(self) -> Path:
252
+ """Return release directory path that was generated from model data.
253
+
254
+ :rtype: Path
255
+ """
256
+ return dynamic(
257
+ "audit_path", extras=self.extras
258
+ ) / self.filename_fmt.format(name=self.name, release=self.release)
259
+
260
+ def save(self, excluded: Optional[list[str]] = None) -> Self:
261
+ """Save logging data that receive a context data from a workflow
262
+ execution result.
263
+
264
+ :param excluded: An excluded list of key name that want to pass in the
265
+ model_dump method.
266
+
267
+ :rtype: Self
268
+ """
269
+ trace: TraceModel = get_trace(
270
+ self.run_id,
271
+ parent_run_id=self.parent_run_id,
272
+ extras=self.extras,
273
+ )
274
+
275
+ # NOTE: Check environ variable was set for real writing.
276
+ if not dynamic("enable_write_audit", extras=self.extras):
277
+ trace.debug("[AUDIT]: Skip writing log cause config was set")
278
+ return self
279
+
280
+ log_file: Path = (
281
+ self.pointer() / f"{self.parent_run_id or self.run_id}.log"
282
+ )
283
+ log_file.write_text(
284
+ json.dumps(
285
+ self.model_dump(exclude=excluded),
286
+ default=str,
287
+ indent=2,
288
+ ),
289
+ encoding="utf-8",
290
+ )
291
+ return self
292
+
293
+
294
+ class SQLiteAudit(BaseAudit): # pragma: no cov
295
+ """SQLite Audit Pydantic Model."""
296
+
297
+ table_name: ClassVar[str] = "audits"
298
+ schemas: ClassVar[
299
+ str
300
+ ] = """
301
+ workflow str,
302
+ release int,
303
+ type str,
304
+ context json,
305
+ parent_run_id int,
306
+ run_id int,
307
+ update datetime
308
+ primary key ( run_id )
309
+ """
310
+
311
+ @classmethod
312
+ def is_pointed(
313
+ cls,
314
+ name: str,
315
+ release: datetime,
316
+ *,
317
+ extras: Optional[DictData] = None,
318
+ ) -> bool: ...
319
+
320
+ @classmethod
321
+ def find_audits(
322
+ cls,
323
+ name: str,
324
+ *,
325
+ extras: Optional[DictData] = None,
326
+ ) -> Iterator[Self]: ...
327
+
328
+ @classmethod
329
+ def find_audit_with_release(
330
+ cls,
331
+ name: str,
332
+ release: Optional[datetime] = None,
333
+ *,
334
+ extras: Optional[DictData] = None,
335
+ ) -> Self: ...
336
+
337
+ def save(self, excluded: Optional[list[str]]) -> SQLiteAudit:
338
+ """Save logging data that receive a context data from a workflow
339
+ execution result.
340
+ """
341
+ trace: TraceModel = get_trace(
342
+ self.run_id,
343
+ parent_run_id=self.parent_run_id,
344
+ extras=self.extras,
345
+ )
346
+
347
+ # NOTE: Check environ variable was set for real writing.
348
+ if not dynamic("enable_write_audit", extras=self.extras):
349
+ trace.debug("[AUDIT]: Skip writing log cause config was set")
350
+ return self
351
+
352
+ raise NotImplementedError("SQLiteAudit does not implement yet.")
353
+
354
+
355
+ Audit = TypeVar("Audit", bound=BaseAudit)
356
+ AuditModel = Union[
357
+ NullAudit,
358
+ FileAudit,
359
+ SQLiteAudit,
360
+ ]
361
+
362
+
363
+ def get_audit(
364
+ extras: Optional[DictData] = None,
365
+ ) -> type[AuditModel]: # pragma: no cov
366
+ """Get an audit class that dynamic base on the config audit path value.
367
+
368
+ :param extras: An extra parameter that want to override the core config.
369
+
370
+ :rtype: type[Audit]
371
+ """
372
+ if dynamic("audit_path", extras=extras).is_file():
373
+ return SQLiteAudit
374
+ return FileAudit
ddeutil/workflow/cli.py CHANGED
@@ -1,17 +1,21 @@
1
1
  import json
2
2
  from pathlib import Path
3
3
  from platform import python_version
4
- from typing import Annotated, Any, Optional
4
+ from typing import Annotated, Any, Literal, Optional, Union
5
5
 
6
6
  import typer
7
7
  import uvicorn
8
+ from pydantic import Field, TypeAdapter
8
9
 
9
10
  from .__about__ import __version__
10
11
  from .__types import DictData
11
12
  from .api import app as fastapp
12
13
  from .errors import JobError
14
+ from .event import Crontab
13
15
  from .job import Job
16
+ from .params import Param
14
17
  from .result import Result
18
+ from .workflow import Workflow
15
19
 
16
20
  app = typer.Typer(
17
21
  pretty_exceptions_enable=True,
@@ -19,22 +23,22 @@ app = typer.Typer(
19
23
 
20
24
 
21
25
  @app.callback()
22
- def callback():
23
- """Manage Workflow CLI app.
26
+ def callback() -> None:
27
+ """Manage Workflow Orchestration CLI.
24
28
 
25
29
  Use it with the interface workflow engine.
26
30
  """
27
31
 
28
32
 
29
33
  @app.command()
30
- def version():
34
+ def version() -> None:
31
35
  """Get the ddeutil-workflow package version."""
32
36
  typer.echo(f"ddeutil-workflow=={__version__}")
33
37
  typer.echo(f"python-version=={python_version()}")
34
38
 
35
39
 
36
- @app.command()
37
- def job(
40
+ @app.command(name="job")
41
+ def execute_job(
38
42
  params: Annotated[str, typer.Option(help="A job execute parameters")],
39
43
  job: Annotated[str, typer.Option(help="A job model")],
40
44
  parent_run_id: Annotated[str, typer.Option(help="A parent running ID")],
@@ -115,5 +119,65 @@ def make(
115
119
  typer.echo(f"Start create YAML template filename: {name.resolve()}")
116
120
 
117
121
 
122
+ workflow_app = typer.Typer()
123
+ app.add_typer(workflow_app, name="workflow", help="An Only Workflow CLI.")
124
+
125
+
126
+ @workflow_app.callback()
127
+ def workflow_callback():
128
+ """Manage Only Workflow CLI."""
129
+
130
+
131
+ @workflow_app.command(name="execute")
132
+ def workflow_execute():
133
+ """"""
134
+
135
+
136
+ WORKFLOW_TYPE = Literal["Workflow"]
137
+
138
+
139
+ class WorkflowSchema(Workflow):
140
+ """Override workflow model fields for generate JSON schema file."""
141
+
142
+ type: WORKFLOW_TYPE = Field(description="A type of workflow template.")
143
+ name: Optional[str] = Field(default=None, description="A workflow name.")
144
+ params: dict[str, Union[Param, str]] = Field(
145
+ default_factory=dict,
146
+ description="A parameters that need to use on this workflow.",
147
+ )
148
+ on: Union[list[Union[Crontab, str]], str] = Field(
149
+ default_factory=list,
150
+ description="A list of Crontab instance for this workflow schedule.",
151
+ )
152
+
153
+
154
+ CRONTAB_TYPE = Literal["Crontab"]
155
+
156
+
157
+ class CrontabSchema(Crontab):
158
+ """Override crontab model fields for generate JSON schema file."""
159
+
160
+ type: CRONTAB_TYPE = Field(description="A type of crontab template.")
161
+
162
+
163
+ @workflow_app.command(name="json-schema")
164
+ def workflow_json_schema(
165
+ output: Annotated[
166
+ Path,
167
+ typer.Option(help="An output file to export the JSON schema."),
168
+ ] = Path("./json-schema.json"),
169
+ ) -> None:
170
+ """Generate JSON schema file from the Workflow model."""
171
+ template = dict[str, Union[WorkflowSchema, CrontabSchema]]
172
+ json_schema = TypeAdapter(template).json_schema(by_alias=True)
173
+ template_schema: dict[str, str] = {
174
+ "$schema": "http://json-schema.org/draft-07/schema#",
175
+ "title": "Workflow Configuration Schema",
176
+ "version": "1.0.0",
177
+ }
178
+ with open(output, mode="w", encoding="utf-8") as f:
179
+ json.dump(template_schema | json_schema, f, indent=2)
180
+
181
+
118
182
  if __name__ == "__main__":
119
183
  app()