ddeutil-workflow 0.0.33__py3-none-any.whl → 0.0.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.33"
1
+ __version__: str = "0.0.34"
@@ -9,6 +9,13 @@ from .audit import (
9
9
  Audit,
10
10
  get_audit,
11
11
  )
12
+ from .call import (
13
+ ReturnTagFunc,
14
+ TagFunc,
15
+ extract_call,
16
+ make_registry,
17
+ tag,
18
+ )
12
19
  from .conf import (
13
20
  Config,
14
21
  Loader,
@@ -28,13 +35,6 @@ from .exceptions import (
28
35
  UtilException,
29
36
  WorkflowException,
30
37
  )
31
- from .hook import (
32
- ReturnTagFunc,
33
- TagFunc,
34
- extract_hook,
35
- make_registry,
36
- tag,
37
- )
38
38
  from .job import (
39
39
  Job,
40
40
  Strategy,
@@ -46,7 +46,13 @@ from .params import (
46
46
  Param,
47
47
  StrParam,
48
48
  )
49
- from .result import Result
49
+ from .result import (
50
+ Result,
51
+ Status,
52
+ TraceLog,
53
+ default_gen_id,
54
+ get_dt_tznow,
55
+ )
50
56
  from .scheduler import (
51
57
  Schedule,
52
58
  ScheduleWorkflow,
@@ -54,10 +60,10 @@ from .scheduler import (
54
60
  schedule_runner,
55
61
  schedule_task,
56
62
  )
57
- from .stage import (
63
+ from .stages import (
58
64
  BashStage,
65
+ CallStage,
59
66
  EmptyStage,
60
- HookStage,
61
67
  PyStage,
62
68
  Stage,
63
69
  TriggerStage,
@@ -110,7 +110,7 @@ async def get_workflow_logs(name: str):
110
110
  exclude_unset=True,
111
111
  exclude_defaults=True,
112
112
  )
113
- for log in get_audit().find_logs(name=name)
113
+ for log in get_audit().find_audits(name=name)
114
114
  ],
115
115
  }
116
116
  except FileNotFoundError:
@@ -123,7 +123,7 @@ async def get_workflow_logs(name: str):
123
123
  @workflow_route.get(path="/{name}/logs/{release}")
124
124
  async def get_workflow_release_log(name: str, release: str):
125
125
  try:
126
- log: Audit = get_audit().find_log_with_release(
126
+ log: Audit = get_audit().find_audit_with_release(
127
127
  name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
128
128
  )
129
129
  except FileNotFoundError:
ddeutil/workflow/audit.py CHANGED
@@ -12,16 +12,15 @@ from abc import ABC, abstractmethod
12
12
  from collections.abc import Iterator
13
13
  from datetime import datetime
14
14
  from pathlib import Path
15
- from typing import Any, ClassVar, Optional, Union
15
+ from typing import ClassVar, Optional, Union
16
16
 
17
17
  from pydantic import BaseModel, Field
18
18
  from pydantic.functional_validators import model_validator
19
19
  from typing_extensions import Self
20
20
 
21
21
  from .__types import DictData, TupleStr
22
- from .conf import config, get_logger
23
-
24
- logger = get_logger("ddeutil.workflow")
22
+ from .conf import config
23
+ from .result import TraceLog
25
24
 
26
25
  __all__: TupleStr = (
27
26
  "get_audit",
@@ -55,7 +54,7 @@ class BaseAudit(BaseModel, ABC):
55
54
 
56
55
  :rtype: Self
57
56
  """
58
- if config.enable_write_log:
57
+ if config.enable_write_audit:
59
58
  self.do_before()
60
59
  return self
61
60
 
@@ -83,8 +82,8 @@ class FileAudit(BaseAudit):
83
82
  self.pointer().mkdir(parents=True, exist_ok=True)
84
83
 
85
84
  @classmethod
86
- def find_logs(cls, name: str) -> Iterator[Self]:
87
- """Generate the logging data that found from logs path with specific a
85
+ def find_audits(cls, name: str) -> Iterator[Self]:
86
+ """Generate the audit data that found from logs path with specific a
88
87
  workflow name.
89
88
 
90
89
  :param name: A workflow name that want to search release logging data.
@@ -100,12 +99,12 @@ class FileAudit(BaseAudit):
100
99
  yield cls.model_validate(obj=json.load(f))
101
100
 
102
101
  @classmethod
103
- def find_log_with_release(
102
+ def find_audit_with_release(
104
103
  cls,
105
104
  name: str,
106
105
  release: datetime | None = None,
107
106
  ) -> Self:
108
- """Return the logging data that found from logs path with specific
107
+ """Return the audit data that found from logs path with specific
109
108
  workflow name and release values. If a release does not pass to an input
110
109
  argument, it will return the latest release from the current log path.
111
110
 
@@ -147,7 +146,7 @@ class FileAudit(BaseAudit):
147
146
  :return: Return False if the release log was not pointed or created.
148
147
  """
149
148
  # NOTE: Return False if enable writing log flag does not set.
150
- if not config.enable_write_log:
149
+ if not config.enable_write_audit:
151
150
  return False
152
151
 
153
152
  # NOTE: create pointer path that use the same logic of pointer method.
@@ -175,14 +174,11 @@ class FileAudit(BaseAudit):
175
174
 
176
175
  :rtype: Self
177
176
  """
178
- from .utils import cut_id
177
+ trace: TraceLog = TraceLog(self.run_id, self.parent_run_id)
179
178
 
180
179
  # NOTE: Check environ variable was set for real writing.
181
- if not config.enable_write_log:
182
- logger.debug(
183
- f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
184
- f"config was set"
185
- )
180
+ if not config.enable_write_audit:
181
+ trace.debug("[LOG]: Skip writing log cause config was set")
186
182
  return self
187
183
 
188
184
  log_file: Path = self.pointer() / f"{self.run_id}.log"
@@ -200,34 +196,29 @@ class FileAudit(BaseAudit):
200
196
  class SQLiteAudit(BaseAudit): # pragma: no cov
201
197
  """SQLite Audit Pydantic Model."""
202
198
 
203
- @staticmethod
204
- def meta() -> dict[str, Any]:
205
- return {
206
- "table": "workflow_log",
207
- "ddl": """
208
- workflow str,
209
- release int,
210
- type str,
211
- context json,
212
- parent_run_id int,
213
- run_id int,
214
- update datetime
215
- primary key ( run_id )
216
- """,
217
- }
199
+ table_name: ClassVar[str] = "workflow_log"
200
+ schemas: ClassVar[
201
+ str
202
+ ] = """
203
+ workflow str,
204
+ release int,
205
+ type str,
206
+ context json,
207
+ parent_run_id int,
208
+ run_id int,
209
+ update datetime
210
+ primary key ( run_id )
211
+ """
218
212
 
219
213
  def save(self, excluded: list[str] | None) -> SQLiteAudit:
220
214
  """Save logging data that receive a context data from a workflow
221
215
  execution result.
222
216
  """
223
- from .utils import cut_id
217
+ trace: TraceLog = TraceLog(self.run_id, self.parent_run_id)
224
218
 
225
219
  # NOTE: Check environ variable was set for real writing.
226
- if not config.enable_write_log:
227
- logger.debug(
228
- f"({cut_id(self.run_id)}) [LOG]: Skip writing log cause "
229
- f"config was set"
230
- )
220
+ if not config.enable_write_audit:
221
+ trace.debug("[LOG]: Skip writing log cause config was set")
231
222
  return self
232
223
 
233
224
  raise NotImplementedError("SQLiteAudit does not implement yet.")
@@ -60,7 +60,7 @@ def tag(
60
60
 
61
61
  @wraps(func)
62
62
  def wrapped(*args: P.args, **kwargs: P.kwargs) -> TagFunc:
63
- # NOTE: Able to do anything before calling hook function.
63
+ # NOTE: Able to do anything before calling call function.
64
64
  return func(*args, **kwargs)
65
65
 
66
66
  return wrapped
@@ -79,9 +79,9 @@ def make_registry(submodule: str) -> dict[str, Registry]:
79
79
  :rtype: dict[str, Registry]
80
80
  """
81
81
  rs: dict[str, Registry] = {}
82
- regis_hooks: list[str] = config.regis_hook
83
- regis_hooks.extend(["ddeutil.vendors"])
84
- for module in regis_hooks:
82
+ regis_calls: list[str] = config.regis_call
83
+ regis_calls.extend(["ddeutil.vendors"])
84
+ for module in regis_calls:
85
85
  # NOTE: try to sequential import task functions
86
86
  try:
87
87
  importer = import_module(f"{module}.{submodule}")
@@ -114,9 +114,9 @@ def make_registry(submodule: str) -> dict[str, Registry]:
114
114
 
115
115
 
116
116
  @dataclass(frozen=True)
117
- class HookSearchData:
118
- """Hook Search dataclass that use for receive regular expression grouping
119
- dict from searching hook string value.
117
+ class CallSearchData:
118
+ """Call Search dataclass that use for receive regular expression grouping
119
+ dict from searching call string value.
120
120
  """
121
121
 
122
122
  path: str
@@ -124,49 +124,49 @@ class HookSearchData:
124
124
  tag: str
125
125
 
126
126
 
127
- def extract_hook(hook: str) -> Callable[[], TagFunc]:
128
- """Extract Hook function from string value to hook partial function that
127
+ def extract_call(call: str) -> Callable[[], TagFunc]:
128
+ """Extract Call function from string value to call partial function that
129
129
  does run it at runtime.
130
130
 
131
- :raise NotImplementedError: When the searching hook's function result does
131
+ :raise NotImplementedError: When the searching call's function result does
132
132
  not exist in the registry.
133
- :raise NotImplementedError: When the searching hook's tag result does not
133
+ :raise NotImplementedError: When the searching call's tag result does not
134
134
  exist in the registry with its function key.
135
135
 
136
- :param hook: A hook value that able to match with Task regex.
136
+ :param call: A call value that able to match with Task regex.
137
137
 
138
- The format of hook value should contain 3 regular expression groups
138
+ The format of call value should contain 3 regular expression groups
139
139
  which match with the below config format:
140
140
 
141
141
  >>> "^(?P<path>[^/@]+)/(?P<func>[^@]+)@(?P<tag>.+)$"
142
142
 
143
143
  Examples:
144
- >>> extract_hook("tasks/el-postgres-to-delta@polars")
144
+ >>> extract_call("tasks/el-postgres-to-delta@polars")
145
145
  ...
146
- >>> extract_hook("tasks/return-type-not-valid@raise")
146
+ >>> extract_call("tasks/return-type-not-valid@raise")
147
147
  ...
148
148
 
149
149
  :rtype: Callable[[], TagFunc]
150
150
  """
151
- if not (found := Re.RE_TASK_FMT.search(hook)):
151
+ if not (found := Re.RE_TASK_FMT.search(call)):
152
152
  raise ValueError(
153
- f"Hook {hook!r} does not match with hook format regex."
153
+ f"Call {call!r} does not match with call format regex."
154
154
  )
155
155
 
156
- # NOTE: Pass the searching hook string to `path`, `func`, and `tag`.
157
- hook: HookSearchData = HookSearchData(**found.groupdict())
156
+ # NOTE: Pass the searching call string to `path`, `func`, and `tag`.
157
+ call: CallSearchData = CallSearchData(**found.groupdict())
158
158
 
159
159
  # NOTE: Registry object should implement on this package only.
160
- rgt: dict[str, Registry] = make_registry(f"{hook.path}")
161
- if hook.func not in rgt:
160
+ rgt: dict[str, Registry] = make_registry(f"{call.path}")
161
+ if call.func not in rgt:
162
162
  raise NotImplementedError(
163
- f"``REGISTER-MODULES.{hook.path}.registries`` does not "
164
- f"implement registry: {hook.func!r}."
163
+ f"``REGISTER-MODULES.{call.path}.registries`` does not "
164
+ f"implement registry: {call.func!r}."
165
165
  )
166
166
 
167
- if hook.tag not in rgt[hook.func]:
167
+ if call.tag not in rgt[call.func]:
168
168
  raise NotImplementedError(
169
- f"tag: {hook.tag!r} does not found on registry func: "
170
- f"``REGISTER-MODULES.{hook.path}.registries.{hook.func}``"
169
+ f"tag: {call.tag!r} does not found on registry func: "
170
+ f"``REGISTER-MODULES.{call.path}.registries.{call.func}``"
171
171
  )
172
- return rgt[hook.func][hook.tag]
172
+ return rgt[call.func][call.tag]
ddeutil/workflow/conf.py CHANGED
@@ -38,6 +38,7 @@ __all__: TupleStr = (
38
38
  "SimLoad",
39
39
  "Loader",
40
40
  "config",
41
+ "glob_files",
41
42
  )
42
43
 
43
44
 
@@ -97,9 +98,9 @@ class Config(BaseConfig): # pragma: no cov
97
98
 
98
99
  # NOTE: Register
99
100
  @property
100
- def regis_hook(self) -> list[str]:
101
- regis_hook_str: str = env("CORE_REGISTRY", ".")
102
- return [r.strip() for r in regis_hook_str.split(",")]
101
+ def regis_call(self) -> list[str]:
102
+ regis_call_str: str = env("CORE_REGISTRY", ".")
103
+ return [r.strip() for r in regis_call_str.split(",")]
103
104
 
104
105
  @property
105
106
  def regis_filter(self) -> list[str]:
@@ -129,16 +130,26 @@ class Config(BaseConfig): # pragma: no cov
129
130
  )
130
131
 
131
132
  @property
132
- def enable_rotate_file(self) -> bool:
133
- return str2bool(env("LOG_ENABLE_ROTATED_FILE", "false"))
133
+ def log_format_file(self) -> str:
134
+ return env(
135
+ "LOG_FORMAT_FILE",
136
+ (
137
+ "{datetime} ({process:5d}, {thread:5d}) {message:120s} "
138
+ "({filename}:{lineno})"
139
+ ),
140
+ )
141
+
142
+ @property
143
+ def enable_write_log(self) -> bool:
144
+ return str2bool(env("LOG_ENABLE_WRITE", "false"))
134
145
 
135
146
  # NOTE: Audit Log
136
147
  @property
137
148
  def audit_path(self) -> Path:
138
- return Path(env("AUDIT_PATH", "./logs"))
149
+ return Path(env("AUDIT_PATH", "./audits"))
139
150
 
140
151
  @property
141
- def enable_write_log(self) -> bool:
152
+ def enable_write_audit(self) -> bool:
142
153
  return str2bool(env("AUDIT_ENABLE_WRITE", "false"))
143
154
 
144
155
  @property
@@ -254,18 +265,22 @@ class SimLoad:
254
265
  conf_path: Path,
255
266
  externals: DictData | None = None,
256
267
  ) -> None:
268
+ self.conf_path: Path = conf_path
269
+ self.externals: DictData = externals or {}
270
+
257
271
  self.data: DictData = {}
258
272
  for file in glob_files(conf_path):
259
273
 
260
- if data := self.filter_suffix(file, name):
274
+ if self.is_ignore(file, conf_path):
275
+ continue
276
+
277
+ if data := self.filter_suffix(file, name=name):
261
278
  self.data = data
262
279
 
263
280
  # VALIDATE: check the data that reading should not empty.
264
281
  if not self.data:
265
282
  raise ValueError(f"Config {name!r} does not found on conf path")
266
283
 
267
- self.conf_path: Path = conf_path
268
- self.externals: DictData = externals or {}
269
284
  self.data.update(self.externals)
270
285
 
271
286
  @classmethod
@@ -283,8 +298,10 @@ class SimLoad:
283
298
 
284
299
  :param obj: An object that want to validate matching before return.
285
300
  :param conf_path: A config object.
286
- :param included:
287
- :param excluded:
301
+ :param included: An excluded list of data key that want to reject this
302
+ data if any key exist.
303
+ :param excluded: An included list of data key that want to filter from
304
+ data.
288
305
 
289
306
  :rtype: Iterator[tuple[str, DictData]]
290
307
  """
@@ -293,6 +310,9 @@ class SimLoad:
293
310
 
294
311
  for key, data in cls.filter_suffix(file).items():
295
312
 
313
+ if cls.is_ignore(file, conf_path):
314
+ continue
315
+
296
316
  if key in exclude:
297
317
  continue
298
318
 
@@ -303,11 +323,26 @@ class SimLoad:
303
323
  else data
304
324
  )
305
325
 
326
+ @classmethod
327
+ def is_ignore(cls, file: Path, conf_path: Path) -> bool:
328
+ ignore_file: Path = conf_path / ".confignore"
329
+ ignore: list[str] = []
330
+ if ignore_file.exists():
331
+ ignore = ignore_file.read_text(encoding="utf-8").splitlines()
332
+
333
+ if any(
334
+ (file.match(f"**/{pattern}/*") or file.match(f"**/{pattern}*"))
335
+ for pattern in ignore
336
+ ):
337
+ return True
338
+ return False
339
+
306
340
  @classmethod
307
341
  def filter_suffix(cls, file: Path, name: str | None = None) -> DictData:
308
342
  if any(file.suffix.endswith(s) for s in (".yml", ".yaml")):
309
343
  values: DictData = YamlFlResolve(file).read()
310
344
  return values.get(name, {}) if name else values
345
+
311
346
  return {}
312
347
 
313
348
  @cached_property