ddeutil-workflow 0.0.55__py3-none-any.whl → 0.0.56__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.55"
1
+ __version__: str = "0.0.56"
@@ -7,16 +7,18 @@ from .__cron import CronJob, CronRunner
7
7
  from .__types import DictData, DictStr, Matrix, Re, TupleStr
8
8
  from .conf import (
9
9
  Config,
10
- Loader,
10
+ FileLoad,
11
11
  config,
12
12
  env,
13
13
  )
14
- from .cron import *
14
+ from .event import *
15
15
  from .exceptions import *
16
16
  from .job import *
17
17
  from .logs import (
18
18
  Audit,
19
19
  AuditModel,
20
+ FileAudit,
21
+ FileTrace,
20
22
  Trace,
21
23
  TraceData,
22
24
  TraceMeta,
@@ -0,0 +1,30 @@
1
+ import typer
2
+
3
+ app = typer.Typer()
4
+
5
+
6
+ @app.callback()
7
+ def callback():
8
+ """
9
+ Awesome Portal Gun
10
+ """
11
+
12
+
13
+ @app.command()
14
+ def provision():
15
+ """
16
+ Shoot the portal gun
17
+ """
18
+ typer.echo("Shooting portal gun")
19
+
20
+
21
+ @app.command()
22
+ def job():
23
+ """
24
+ Load the portal gun
25
+ """
26
+ typer.echo("Loading portal gun")
27
+
28
+
29
+ if __name__ == "__main__":
30
+ app()
ddeutil/workflow/conf.py CHANGED
@@ -7,24 +7,26 @@ from __future__ import annotations
7
7
 
8
8
  import json
9
9
  import os
10
+ from abc import ABC, abstractmethod
10
11
  from collections.abc import Iterator
11
12
  from datetime import timedelta
12
13
  from functools import cached_property
14
+ from inspect import isclass
13
15
  from pathlib import Path
14
- from typing import Final, Optional, TypeVar
16
+ from typing import Final, Optional, Protocol, TypeVar, Union
15
17
  from zoneinfo import ZoneInfo
16
18
 
17
19
  from ddeutil.core import str2bool
18
20
  from ddeutil.io import YamlFlResolve
19
21
  from ddeutil.io.paths import glob_files, is_ignored, read_ignore
20
22
 
21
- from .__types import DictData, TupleStr
23
+ from .__types import DictData
22
24
 
23
25
  T = TypeVar("T")
24
26
  PREFIX: Final[str] = "WORKFLOW"
25
27
 
26
28
 
27
- def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
+ def env(var: str, default: str | None = None) -> str | None:
28
30
  """Get environment variable with uppercase and adding prefix string.
29
31
 
30
32
  :param var: (str) A env variable name.
@@ -35,17 +37,6 @@ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
35
37
  return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
36
38
 
37
39
 
38
- __all__: TupleStr = (
39
- "api_config",
40
- "env",
41
- "Config",
42
- "SimLoad",
43
- "Loader",
44
- "config",
45
- "dynamic",
46
- )
47
-
48
-
49
40
  class Config: # pragma: no cov
50
41
  """Config object for keeping core configurations on the current session
51
42
  without changing when if the application still running.
@@ -188,7 +179,7 @@ class Config: # pragma: no cov
188
179
  return timedelta(**json.loads(stop_boundary_delta_str))
189
180
  except Exception as err:
190
181
  raise ValueError(
191
- "Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
182
+ "Config `WORKFLOW_APP_STOP_BOUNDARY_DELTA` can not parsing to"
192
183
  f"timedelta with {stop_boundary_delta_str}."
193
184
  ) from err
194
185
 
@@ -209,110 +200,194 @@ class APIConfig:
209
200
  return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
210
201
 
211
202
 
212
- class SimLoad:
213
- """Simple Load Object that will search config data by given some identity
214
- value like name of workflow or on.
203
+ class BaseLoad(ABC):
204
+
205
+ @classmethod
206
+ @abstractmethod
207
+ def find(cls, name: str, *args, **kwargs) -> DictData: ...
208
+
209
+ @classmethod
210
+ @abstractmethod
211
+ def finds(
212
+ cls, obj: object, *args, **kwargs
213
+ ) -> Iterator[tuple[str, DictData]]: ...
215
214
 
216
- :param name: A name of config data that will read by Yaml Loader object.
217
- :param conf_path: A config path object.
218
- :param externals: An external parameters
215
+
216
+ class FileLoad(BaseLoad):
217
+ """Base Load object that use to search config data by given some identity
218
+ value like name of `Workflow` or `On` templates.
219
+
220
+ :param name: (str) A name of key of config data that read with YAML
221
+ Environment object.
222
+ :param path: (Path) A config path object.
223
+ :param externals: (DictData) An external config data that want to add to
224
+ loaded config data.
225
+ :param extras: (DictDdata) An extra parameters that use to override core
226
+ config values.
227
+
228
+ :raise ValueError: If the data does not find on the config path with the
229
+ name parameter.
219
230
 
220
231
  Noted:
221
- The config data should have ``type`` key for modeling validation that
232
+ The config data should have `type` key for modeling validation that
222
233
  make this loader know what is config should to do pass to.
223
234
 
224
235
  ... <identity-key>:
225
236
  ... type: <importable-object>
226
237
  ... <key-data-1>: <value-data-1>
227
238
  ... <key-data-2>: <value-data-2>
239
+
240
+ This object support multiple config paths if you pass the `conf_paths`
241
+ key to the `extras` parameter.
228
242
  """
229
243
 
230
244
  def __init__(
231
245
  self,
232
246
  name: str,
233
- conf_path: Path,
247
+ *,
248
+ path: Optional[Union[str, Path]] = None,
234
249
  externals: DictData | None = None,
250
+ extras: DictData | None = None,
235
251
  ) -> None:
236
- self.conf_path: Path = conf_path
252
+ self.path: Path = Path(dynamic("conf_path", f=path, extras=extras))
237
253
  self.externals: DictData = externals or {}
238
-
239
- self.data: DictData = {}
240
- for file in glob_files(conf_path):
241
-
242
- if self.is_ignore(file, conf_path):
243
- continue
244
-
245
- if data := self.filter_yaml(file, name=name):
246
- self.data = data
254
+ self.extras: DictData = extras or {}
255
+ self.data: DictData = self.find(
256
+ name,
257
+ path=path,
258
+ paths=self.extras.get("conf_paths"),
259
+ extras=extras,
260
+ )
247
261
 
248
262
  # VALIDATE: check the data that reading should not empty.
249
263
  if not self.data:
250
264
  raise ValueError(
251
- f"Config {name!r} does not found on conf path: "
252
- f"{self.conf_path}."
265
+ f"Config {name!r} does not found on the conf path: {self.path}."
253
266
  )
254
267
 
255
268
  self.data.update(self.externals)
256
269
 
270
+ @classmethod
271
+ def find(
272
+ cls,
273
+ name: str,
274
+ *,
275
+ path: Optional[Path] = None,
276
+ paths: Optional[list[Path]] = None,
277
+ extras: Optional[DictData] = None,
278
+ ) -> DictData:
279
+ """Find data with specific key and return the latest modify date data if
280
+ this key exists multiple files.
281
+
282
+ :param name: (str) A name of data that want to find.
283
+ :param path: (Path) A config path object.
284
+ :param paths: (list[Path]) A list of config path object.
285
+ :param extras: (DictData) An extra parameter that use to override core
286
+ config values.
287
+
288
+ :rtype: DictData
289
+ """
290
+ path: Path = dynamic("conf_path", f=path, extras=extras)
291
+ if not paths:
292
+ paths: list[Path] = [path]
293
+ elif not isinstance(paths, list):
294
+ raise TypeError(
295
+ f"Multi-config paths does not support for type: {type(paths)}"
296
+ )
297
+ else:
298
+ paths.append(path)
299
+
300
+ all_data: list[tuple[float, DictData]] = []
301
+ for path in paths:
302
+ for file in glob_files(path):
303
+
304
+ if cls.is_ignore(file, path):
305
+ continue
306
+
307
+ if data := cls.filter_yaml(file, name=name):
308
+ all_data.append((file.lstat().st_mtime, data))
309
+
310
+ return {} if not all_data else max(all_data, key=lambda x: x[0])[1]
311
+
257
312
  @classmethod
258
313
  def finds(
259
314
  cls,
260
315
  obj: object,
261
- conf_path: Path,
262
316
  *,
263
- included: list[str] | None = None,
317
+ path: Optional[Path] = None,
318
+ paths: Optional[list[Path]] = None,
264
319
  excluded: list[str] | None = None,
320
+ extras: Optional[DictData] = None,
265
321
  ) -> Iterator[tuple[str, DictData]]:
266
322
  """Find all data that match with object type in config path. This class
267
323
  method can use include and exclude list of identity name for filter and
268
324
  adds-on.
269
325
 
270
326
  :param obj: An object that want to validate matching before return.
271
- :param conf_path: A config object.
272
- :param included: An excluded list of data key that want to reject this
273
- data if any key exist.
327
+ :param path: A config path object.
328
+ :param paths: (list[Path]) A list of config path object.
274
329
  :param excluded: An included list of data key that want to filter from
275
330
  data.
331
+ :param extras: (DictData) An extra parameter that use to override core
332
+ config values.
276
333
 
277
334
  :rtype: Iterator[tuple[str, DictData]]
278
335
  """
279
- exclude: list[str] = excluded or []
280
- for file in glob_files(conf_path):
336
+ excluded: list[str] = excluded or []
337
+ path: Path = dynamic("conf_path", f=path, extras=extras)
338
+ if not paths:
339
+ paths: list[Path] = [path]
340
+ else:
341
+ paths.append(path)
342
+
343
+ all_data: dict[str, list[tuple[float, DictData]]] = {}
344
+ for path in paths:
345
+ for file in glob_files(path):
346
+
347
+ if cls.is_ignore(file, path):
348
+ continue
281
349
 
282
- if cls.is_ignore(file, conf_path):
283
- continue
350
+ for key, data in cls.filter_yaml(file).items():
284
351
 
285
- for key, data in cls.filter_yaml(file).items():
352
+ if key in excluded:
353
+ continue
286
354
 
287
- if key in exclude:
288
- continue
355
+ if (
356
+ data.get("type", "")
357
+ == (obj if isclass(obj) else obj.__class__).__name__
358
+ ):
359
+ marking: tuple[float, DictData] = (
360
+ file.lstat().st_mtime,
361
+ data,
362
+ )
363
+ if key in all_data:
364
+ all_data[key].append(marking)
365
+ else:
366
+ all_data[key] = [marking]
289
367
 
290
- if data.get("type", "") == obj.__name__:
291
- yield key, (
292
- {k: data[k] for k in data if k in included}
293
- if included
294
- else data
295
- )
368
+ for key in all_data:
369
+ yield key, max(all_data[key], key=lambda x: x[0])[1]
296
370
 
297
371
  @classmethod
298
372
  def is_ignore(
299
373
  cls,
300
374
  file: Path,
301
- conf_path: Path,
375
+ path: Path,
302
376
  *,
303
377
  ignore_filename: Optional[str] = None,
304
378
  ) -> bool:
305
379
  """Check this file was ignored.
306
380
 
307
381
  :param file: (Path) A file path that want to check.
308
- :param conf_path: (Path) A config path that want to read the config
382
+ :param path: (Path) A config path that want to read the config
309
383
  ignore file.
310
- :param ignore_filename: (str) An ignore filename.
384
+ :param ignore_filename: (str) An ignore filename. Default is
385
+ `.confignore` filename.
311
386
 
312
387
  :rtype: bool
313
388
  """
314
389
  ignore_filename: str = ignore_filename or ".confignore"
315
- return is_ignored(file, read_ignore(conf_path / ignore_filename))
390
+ return is_ignored(file, read_ignore(path / ignore_filename))
316
391
 
317
392
  @classmethod
318
393
  def filter_yaml(cls, file: Path, name: str | None = None) -> DictData:
@@ -369,44 +444,19 @@ def dynamic(
369
444
  return rsx if rsx is not None else rs
370
445
 
371
446
 
372
- class Loader(SimLoad):
373
- """Loader Object that get the config `yaml` file from current path.
447
+ class Loader(Protocol): # pragma: no cov
448
+ type: str
449
+ path: Path
450
+ data: DictData
451
+ extras: DictData
452
+ externals: DictData
374
453
 
375
- :param name: (str) A name of config data that will read by Yaml Loader object.
376
- :param externals: (DictData) An external parameters
377
- """
454
+ def __init__(self, *args, **kwargs) -> None: ...
378
455
 
379
456
  @classmethod
380
- def finds(
381
- cls,
382
- obj: object,
383
- *,
384
- path: Path | None = None,
385
- included: list[str] | None = None,
386
- excluded: list[str] | None = None,
387
- **kwargs,
388
- ) -> Iterator[tuple[str, DictData]]:
389
- """Override the find class method from the Simple Loader object.
457
+ def find(cls, name: str, *args, **kwargs) -> DictData: ...
390
458
 
391
- :param obj: An object that want to validate matching before return.
392
- :param path: (Path) A override config path.
393
- :param included: An excluded list of data key that want to reject this
394
- data if any key exist.
395
- :param excluded: An included list of data key that want to filter from
396
- data.
397
-
398
- :rtype: Iterator[tuple[str, DictData]]
399
- """
400
- return super().finds(
401
- obj=obj,
402
- conf_path=(path or config.conf_path),
403
- included=included,
404
- excluded=excluded,
405
- )
406
-
407
- def __init__(self, name: str, externals: DictData) -> None:
408
- super().__init__(
409
- name,
410
- conf_path=dynamic("conf_path", extras=externals),
411
- externals=externals,
412
- )
459
+ @classmethod
460
+ def finds(
461
+ cls, obj: object, *args, **kwargs
462
+ ) -> Iterator[tuple[str, DictData]]: ...
@@ -3,11 +3,14 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ """Event module that store all event object. Now, it has only `On` and `OnYear`
7
+ model these are schedule with crontab event.
8
+ """
6
9
  from __future__ import annotations
7
10
 
8
11
  from dataclasses import fields
9
12
  from datetime import datetime
10
- from typing import Annotated, Literal, Union
13
+ from typing import Annotated, Any, Literal, Union
11
14
  from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
12
15
 
13
16
  from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
@@ -17,11 +20,13 @@ from typing_extensions import Self
17
20
 
18
21
  from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner, Options
19
22
  from .__types import DictData, DictStr
20
- from .conf import Loader
23
+ from .conf import FileLoad
24
+
25
+ Interval = Literal["daily", "weekly", "monthly"]
21
26
 
22
27
 
23
28
  def interval2crontab(
24
- interval: Literal["daily", "weekly", "monthly"],
29
+ interval: Interval,
25
30
  *,
26
31
  day: str | None = None,
27
32
  time: str = "00:00",
@@ -59,10 +64,11 @@ def interval2crontab(
59
64
 
60
65
 
61
66
  class On(BaseModel):
62
- """On Pydantic model (Warped crontab object by model).
67
+ """On model (Warped crontab object by Pydantic model) to keep crontab value
68
+ and generate CronRunner object from this crontab value.
63
69
 
64
- See Also:
65
- * `generate()` is the main use-case of this schedule object.
70
+ Methods:
71
+ - generate: is the main use-case of this schedule object.
66
72
  """
67
73
 
68
74
  model_config = ConfigDict(arbitrary_types_allowed=True)
@@ -90,19 +96,24 @@ class On(BaseModel):
90
96
  description="A timezone string value",
91
97
  alias="timezone",
92
98
  ),
93
- ] = "Etc/UTC"
99
+ ] = "UTC"
94
100
 
95
101
  @classmethod
96
102
  def from_value(cls, value: DictStr, extras: DictData) -> Self:
97
103
  """Constructor from values that will generate crontab by function.
98
104
 
99
- :param value: A mapping value that will generate crontab before create
100
- schedule model.
101
- :param extras: An extras parameter that will keep in extras.
105
+ :param value: (DictStr) A mapping value that will generate crontab
106
+ before create schedule model.
107
+ :param extras: (DictData) An extra parameter that use to override core
108
+ config value.
102
109
  """
103
110
  passing: DictStr = {}
111
+
104
112
  if "timezone" in value:
105
113
  passing["tz"] = value.pop("timezone")
114
+ elif "tz" in value:
115
+ passing["tz"] = value.pop("tz")
116
+
106
117
  passing["cronjob"] = interval2crontab(
107
118
  **{v: value[v] for v in value if v in ("interval", "day", "time")}
108
119
  )
@@ -112,18 +123,20 @@ class On(BaseModel):
112
123
  def from_conf(
113
124
  cls,
114
125
  name: str,
126
+ *,
115
127
  extras: DictData | None = None,
116
128
  ) -> Self:
117
- """Constructor from the name of config that will use loader object for
118
- getting the data.
129
+ """Constructor from the name of config loader that will use loader
130
+ object for getting the `On` data.
119
131
 
120
- :param name: A name of config that will get from loader.
121
- :param extras: An extra parameter that will keep in extras.
132
+ :param name: (str) A name of config that will get from loader.
133
+ :param extras: (DictData) An extra parameter that use to override core
134
+ config values.
122
135
 
123
136
  :rtype: Self
124
137
  """
125
138
  extras: DictData = extras or {}
126
- loader: Loader = Loader(name, externals=extras)
139
+ loader: FileLoad = FileLoad(name, extras=extras)
127
140
 
128
141
  # NOTE: Validate the config type match with current connection model
129
142
  if loader.type != cls.__name__:
@@ -155,17 +168,17 @@ class On(BaseModel):
155
168
  )
156
169
 
157
170
  @model_validator(mode="before")
158
- def __prepare_values(cls, values: DictData) -> DictData:
171
+ def __prepare_values(cls, data: Any) -> Any:
159
172
  """Extract tz key from value and change name to timezone key.
160
173
 
161
- :param values: (DictData) A data that want to pass for create an On
174
+ :param data: (DictData) A data that want to pass for create an On
162
175
  model.
163
176
 
164
177
  :rtype: DictData
165
178
  """
166
- if tz := values.pop("tz", None):
167
- values["timezone"] = tz
168
- return values
179
+ if isinstance(data, dict) and (tz := data.pop("tz", None)):
180
+ data["timezone"] = tz
181
+ return data
169
182
 
170
183
  @field_validator("tz")
171
184
  def __validate_tz(cls, value: str) -> str:
@@ -238,6 +251,9 @@ class On(BaseModel):
238
251
  """Return a next datetime from Cron runner object that start with any
239
252
  date that given from input.
240
253
 
254
+ :param start: (str | datetime) A start datetime that use to generate
255
+ the CronRunner object.
256
+
241
257
  :rtype: CronRunner
242
258
  """
243
259
  runner: CronRunner = self.generate(start=start)
@@ -22,7 +22,12 @@ ErrorData = TypedDict(
22
22
 
23
23
 
24
24
  def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
25
- """Create dict data from exception instance."""
25
+ """Create dict data from exception instance.
26
+
27
+ :param exception: An exception object.
28
+
29
+ :rtype: ErrorData
30
+ """
26
31
  return {
27
32
  "class": exception,
28
33
  "name": exception.__class__.__name__,
@@ -33,6 +38,10 @@ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
33
38
  class BaseWorkflowException(Exception):
34
39
 
35
40
  def to_dict(self) -> ErrorData:
41
+ """Return ErrorData data from the current exception object.
42
+
43
+ :rtype: ErrorData
44
+ """
36
45
  return to_dict(self)
37
46
 
38
47
 
ddeutil/workflow/job.py CHANGED
@@ -140,14 +140,19 @@ class Strategy(BaseModel):
140
140
 
141
141
  fail_fast: bool = Field(
142
142
  default=False,
143
+ description=(
144
+ "A fail-fast flag that use to cancel strategy execution when it "
145
+ "has some execution was failed."
146
+ ),
143
147
  alias="fail-fast",
144
148
  )
145
149
  max_parallel: int = Field(
146
150
  default=1,
147
151
  gt=0,
152
+ lt=10,
148
153
  description=(
149
154
  "The maximum number of executor thread pool that want to run "
150
- "parallel"
155
+ "parallel. This value should gather than 0 and less than 10."
151
156
  ),
152
157
  alias="max-parallel",
153
158
  )
@@ -34,7 +34,7 @@ from heapq import heappop, heappush
34
34
  from pathlib import Path
35
35
  from textwrap import dedent
36
36
  from threading import Thread
37
- from typing import Callable, Optional, TypedDict, Union
37
+ from typing import Any, Callable, Optional, TypedDict, Union
38
38
 
39
39
  from pydantic import BaseModel, Field, ValidationInfo
40
40
  from pydantic.functional_validators import field_validator, model_validator
@@ -52,8 +52,8 @@ except ImportError: # pragma: no cov
52
52
 
53
53
  from .__cron import CronRunner
54
54
  from .__types import DictData, TupleStr
55
- from .conf import Loader, SimLoad, dynamic
56
- from .cron import On
55
+ from .conf import FileLoad, Loader, dynamic
56
+ from .event import On
57
57
  from .exceptions import ScheduleException, WorkflowException
58
58
  from .logs import Audit, get_audit
59
59
  from .result import SUCCESS, Result
@@ -113,21 +113,15 @@ class ScheduleWorkflow(BaseModel):
113
113
  )
114
114
 
115
115
  @model_validator(mode="before")
116
- def __prepare_before__(cls, values: DictData) -> DictData:
117
- """Prepare incoming values before validating with model fields.
118
-
119
- :rtype: DictData
120
- """
121
- # VALIDATE: Prepare a workflow name that should not include space.
122
- if name := values.get("name"):
123
- values["name"] = name.replace(" ", "_")
124
-
125
- # VALIDATE: Add default the alias field with the name.
126
- if not values.get("alias"):
127
- values["alias"] = values.get("name")
128
-
129
- cls.__bypass_on(values, extras=values.get("extras"))
130
- return values
116
+ def __prepare_before__(cls, data: Any) -> Any:
117
+ """Prepare incoming values before validating with model fields."""
118
+ if isinstance(data, dict):
119
+ # VALIDATE: Add default the alias field with the name.
120
+ if "alias" not in data:
121
+ data["alias"] = data.get("name")
122
+
123
+ cls.__bypass_on(data, extras=data.get("extras"))
124
+ return data
131
125
 
132
126
  @classmethod
133
127
  def __bypass_on(
@@ -135,8 +129,10 @@ class ScheduleWorkflow(BaseModel):
135
129
  ) -> DictData:
136
130
  """Bypass and prepare the on data to loaded config data.
137
131
 
138
- :param data: A data that want to validate for model initialization.
139
- :param extras: An extra parameter that want to override core config.
132
+ :param data: (DictData) A data that want to validate for the model
133
+ initialization.
134
+ :param extras: (DictData) An extra parameter that want to override core
135
+ config values.
140
136
 
141
137
  :rtype: DictData
142
138
  """
@@ -151,7 +147,7 @@ class ScheduleWorkflow(BaseModel):
151
147
  # NOTE: Pass on value to Loader and keep on model object to on
152
148
  # field.
153
149
  data["on"] = [
154
- Loader(n, externals=extras).data if isinstance(n, str) else n
150
+ FileLoad(n, externals=extras).data if isinstance(n, str) else n
155
151
  for n in on
156
152
  ]
157
153
 
@@ -162,6 +158,10 @@ class ScheduleWorkflow(BaseModel):
162
158
  """Validate the on fields should not contain duplicate values and if it
163
159
  contains every minute value, it should have only one on value.
164
160
 
161
+ :param value: (list[On]) A list of `On` object.
162
+ :param info: (ValidationInfo) An validation info object for getting an
163
+ extra parameter.
164
+
165
165
  :rtype: list[On]
166
166
  """
167
167
  set_ons: set[str] = {str(on.cronjob) for on in value}
@@ -191,23 +191,22 @@ class ScheduleWorkflow(BaseModel):
191
191
  This task creation need queue to tracking release date already
192
192
  mapped or not.
193
193
 
194
- :param start_date: A start date that get from the workflow schedule.
195
- :param queue: A mapping of name and list of datetime for queue.
194
+ :param start_date: (datetime) A start datetime that get from the
195
+ workflow schedule.
196
+ :param queue: (dict[str, ReleaseQueue]) A mapping of name and list of
197
+ datetime for queue.
196
198
 
197
199
  :rtype: list[WorkflowTask]
198
200
  :return: Return the list of WorkflowTask object from the specific
199
201
  input datetime that mapping with the on field.
200
202
  """
201
- workflow_tasks: list[WorkflowTask] = []
202
-
203
- # NOTE: Loading workflow model from the name of workflow.
204
203
  wf: Workflow = Workflow.from_conf(self.name, extras=self.extras)
205
204
  wf_queue: ReleaseQueue = queue[self.alias]
206
205
 
207
206
  # IMPORTANT: Create the default 'on' value if it does not pass the `on`
208
207
  # field to the Schedule object.
209
208
  ons: list[On] = self.on or wf.on.copy()
210
-
209
+ workflow_tasks: list[WorkflowTask] = []
211
210
  for on in ons:
212
211
 
213
212
  # NOTE: Create CronRunner instance from the start_date param.
@@ -250,7 +249,7 @@ class Schedule(BaseModel):
250
249
  )
251
250
  workflows: list[ScheduleWorkflow] = Field(
252
251
  default_factory=list,
253
- description="A list of ScheduleWorkflow models.",
252
+ description="A list of ScheduleWorkflow model.",
254
253
  )
255
254
 
256
255
  @field_validator("desc", mode="after")
@@ -267,6 +266,8 @@ class Schedule(BaseModel):
267
266
  def from_conf(
268
267
  cls,
269
268
  name: str,
269
+ *,
270
+ path: Optional[Path] = None,
270
271
  extras: DictData | None = None,
271
272
  ) -> Self:
272
273
  """Create Schedule instance from the Loader object that only receive
@@ -274,6 +275,7 @@ class Schedule(BaseModel):
274
275
  searching configuration data of this schedule model in conf path.
275
276
 
276
277
  :param name: (str) A schedule name that want to pass to Loader object.
278
+ :param path: (Path) An override config path.
277
279
  :param extras: An extra parameters that want to pass to Loader
278
280
  object.
279
281
 
@@ -281,55 +283,14 @@ class Schedule(BaseModel):
281
283
 
282
284
  :rtype: Self
283
285
  """
284
- loader: Loader = Loader(name, externals=(extras or {}))
286
+ loader: Loader = FileLoad(name, path=path, extras=extras)
285
287
 
286
288
  # NOTE: Validate the config type match with current connection model
287
289
  if loader.type != cls.__name__:
288
290
  raise ValueError(f"Type {loader.type} does not match with {cls}")
289
291
 
290
292
  loader_data: DictData = copy.deepcopy(loader.data)
291
-
292
- # NOTE: Add name to loader data
293
- loader_data["name"] = name.replace(" ", "_")
294
-
295
- if extras:
296
- loader_data["extras"] = extras
297
-
298
- return cls.model_validate(obj=loader_data)
299
-
300
- @classmethod
301
- def from_path(
302
- cls,
303
- name: str,
304
- path: Path,
305
- extras: DictData | None = None,
306
- ) -> Self:
307
- """Create Schedule instance from the SimLoad object that receive an
308
- input schedule name and conf path. The loader object will use this
309
- schedule name to searching configuration data of this schedule model
310
- in conf path.
311
-
312
- :param name: (str) A schedule name that want to pass to Loader object.
313
- :param path: (Path) A config path that want to search.
314
- :param extras: An external parameters that want to pass to Loader
315
- object.
316
-
317
- :raise ValueError: If the type does not match with current object.
318
-
319
- :rtype: Self
320
- """
321
- loader: SimLoad = SimLoad(
322
- name, conf_path=path, externals=(extras or {})
323
- )
324
-
325
- # NOTE: Validate the config type match with current connection model
326
- if loader.type != cls.__name__:
327
- raise ValueError(f"Type {loader.type} does not match with {cls}")
328
-
329
- loader_data: DictData = copy.deepcopy(loader.data)
330
-
331
- # NOTE: Add name to loader data
332
- loader_data["name"] = name.replace(" ", "_")
293
+ loader_data["name"] = name
333
294
 
334
295
  if extras:
335
296
  loader_data["extras"] = extras
@@ -28,7 +28,7 @@ from pathlib import Path
28
28
  from queue import Queue
29
29
  from textwrap import dedent
30
30
  from threading import Event
31
- from typing import Optional
31
+ from typing import Any, Optional, Union
32
32
 
33
33
  from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
34
34
  from pydantic.dataclasses import dataclass
@@ -37,8 +37,8 @@ from typing_extensions import Self
37
37
 
38
38
  from .__cron import CronJob, CronRunner
39
39
  from .__types import DictData, TupleStr
40
- from .conf import Loader, SimLoad, dynamic
41
- from .cron import On
40
+ from .conf import FileLoad, Loader, dynamic
41
+ from .event import On
42
42
  from .exceptions import JobException, UtilException, WorkflowException
43
43
  from .job import Job
44
44
  from .logs import Audit, get_audit
@@ -144,7 +144,7 @@ class Release:
144
144
  return NotImplemented
145
145
 
146
146
  def __lt__(self, other: Release | datetime) -> bool:
147
- """Override equal property that will compare only the same type or
147
+ """Override less-than property that will compare only the same type or
148
148
  datetime.
149
149
 
150
150
  :rtype: bool
@@ -172,7 +172,7 @@ class ReleaseQueue:
172
172
  @classmethod
173
173
  def from_list(
174
174
  cls,
175
- queue: list[datetime] | list[Release] | None = None,
175
+ queue: Optional[Union[list[datetime], list[Release]]] = None,
176
176
  extras: Optional[DictData] = None,
177
177
  ) -> Self:
178
178
  """Construct ReleaseQueue object from an input queue value that passing
@@ -190,7 +190,6 @@ class ReleaseQueue:
190
190
  return cls()
191
191
 
192
192
  if isinstance(queue, list):
193
-
194
193
  if all(isinstance(q, datetime) for q in queue):
195
194
  return cls(
196
195
  queue=[
@@ -233,7 +232,11 @@ class ReleaseQueue:
233
232
  )
234
233
 
235
234
  def mark_complete(self, value: Release) -> Self:
236
- """Push Release to the complete queue.
235
+ """Push Release to the complete queue. After push the release, it will
236
+ delete old release base on the `CORE_MAX_QUEUE_COMPLETE_HIST` value.
237
+
238
+ :param value: (Release) A Release value that want to push to the
239
+ complete field.
237
240
 
238
241
  :rtype: Self
239
242
  """
@@ -262,7 +265,7 @@ class ReleaseQueue:
262
265
  force_run: bool = False,
263
266
  extras: Optional[DictData] = None,
264
267
  ) -> Self:
265
- """Generate Release model to queue.
268
+ """Generate a Release model to the queue field with an input CronRunner.
266
269
 
267
270
  Steps:
268
271
  - Create Release object from the current date that not reach the end
@@ -277,9 +280,10 @@ class ReleaseQueue:
277
280
  :param runner: (CronRunner) A CronRunner object.
278
281
  :param name: (str) A target name that want to check at pointer of audit.
279
282
  :param offset: (float) An offset in second unit for time travel.
280
- :param force_run: A flag that allow to release workflow if the audit
281
- with that release was pointed.
282
- :param extras: An extra parameter that want to override core config.
283
+ :param force_run: (bool) A flag that allow to release workflow if the
284
+ audit with that release was pointed. (Default is False).
285
+ :param extras: (DictDatA) An extra parameter that want to override core
286
+ config values.
283
287
 
284
288
  :rtype: ReleaseQueue
285
289
 
@@ -287,7 +291,7 @@ class ReleaseQueue:
287
291
  if runner.date > end_date:
288
292
  return self
289
293
 
290
- workflow_release = Release(
294
+ release = Release(
291
295
  date=runner.date,
292
296
  offset=offset,
293
297
  end_date=end_date,
@@ -295,13 +299,11 @@ class ReleaseQueue:
295
299
  type=ReleaseType.POKE,
296
300
  )
297
301
 
298
- while self.check_queue(workflow_release) or (
299
- audit.is_pointed(
300
- name=name, release=workflow_release.date, extras=extras
301
- )
302
+ while self.check_queue(release) or (
303
+ audit.is_pointed(name=name, release=release.date, extras=extras)
302
304
  and not force_run
303
305
  ):
304
- workflow_release = Release(
306
+ release = Release(
305
307
  date=runner.next,
306
308
  offset=offset,
307
309
  end_date=end_date,
@@ -312,12 +314,12 @@ class ReleaseQueue:
312
314
  if runner.date > end_date:
313
315
  return self
314
316
 
315
- heappush(self.queue, workflow_release)
317
+ heappush(self.queue, release)
316
318
  return self
317
319
 
318
320
 
319
321
  class Workflow(BaseModel):
320
- """Workflow Pydantic model.
322
+ """Workflow model that use to keep the `Job` and `On` models.
321
323
 
322
324
  This is the main future of this project because it uses to be workflow
323
325
  data for running everywhere that you want or using it to scheduler task in
@@ -355,6 +357,7 @@ class Workflow(BaseModel):
355
357
  cls,
356
358
  name: str,
357
359
  *,
360
+ path: Optional[Path] = None,
358
361
  extras: DictData | None = None,
359
362
  loader: type[Loader] = None,
360
363
  ) -> Self:
@@ -362,45 +365,8 @@ class Workflow(BaseModel):
362
365
  an input workflow name. The loader object will use this workflow name to
363
366
  searching configuration data of this workflow model in conf path.
364
367
 
365
- :param name: A workflow name that want to pass to Loader object.
366
- :param extras: An extra parameters that want to pass to Loader
367
- object.
368
- :param loader: A loader class for override default loader object.
369
-
370
- :raise ValueError: If the type does not match with current object.
371
-
372
- :rtype: Self
373
- """
374
- loader: Loader = (loader or Loader)(name, externals=(extras or {}))
375
-
376
- # NOTE: Validate the config type match with current connection model
377
- if loader.type != cls.__name__:
378
- raise ValueError(f"Type {loader.type} does not match with {cls}")
379
-
380
- loader_data: DictData = copy.deepcopy(loader.data)
381
- loader_data["name"] = name.replace(" ", "_")
382
-
383
- if extras:
384
- loader_data["extras"] = extras
385
-
386
- cls.__bypass_on__(loader_data, path=loader.conf_path, extras=extras)
387
- return cls.model_validate(obj=loader_data)
388
-
389
- @classmethod
390
- def from_path(
391
- cls,
392
- name: str,
393
- path: Path,
394
- *,
395
- extras: DictData | None = None,
396
- loader: type[Loader] = None,
397
- ) -> Self:
398
- """Create Workflow instance from the specific path. The loader object
399
- will use this workflow name and path to searching configuration data of
400
- this workflow model.
401
-
402
368
  :param name: (str) A workflow name that want to pass to Loader object.
403
- :param path: (Path) A config path that want to search.
369
+ :param path: (Path) An override config path.
404
370
  :param extras: (DictData) An extra parameters that want to override core
405
371
  config values.
406
372
  :param loader: A loader class for override default loader object.
@@ -409,21 +375,21 @@ class Workflow(BaseModel):
409
375
 
410
376
  :rtype: Self
411
377
  """
412
- loader: SimLoad = (loader or SimLoad)(
413
- name, conf_path=path, externals=(extras or {})
414
- )
378
+ loader: type[Loader] = loader or FileLoad
379
+ load: Loader = loader(name, path=path, extras=extras)
380
+
415
381
  # NOTE: Validate the config type match with current connection model
416
- if loader.type != cls.__name__:
417
- raise ValueError(f"Type {loader.type} does not match with {cls}")
382
+ if load.type != cls.__name__:
383
+ raise ValueError(f"Type {load.type} does not match with {cls}")
418
384
 
419
- loader_data: DictData = copy.deepcopy(loader.data)
420
- loader_data["name"] = name.replace(" ", "_")
385
+ data: DictData = copy.deepcopy(load.data)
386
+ data["name"] = name
421
387
 
422
388
  if extras:
423
- loader_data["extras"] = extras
389
+ data["extras"] = extras
424
390
 
425
- cls.__bypass_on__(loader_data, path=path, extras=extras)
426
- return cls.model_validate(obj=loader_data)
391
+ cls.__bypass_on__(data, path=load.path, extras=extras, loader=loader)
392
+ return cls.model_validate(obj=data)
427
393
 
428
394
  @classmethod
429
395
  def __bypass_on__(
@@ -431,6 +397,7 @@ class Workflow(BaseModel):
431
397
  data: DictData,
432
398
  path: Path,
433
399
  extras: DictData | None = None,
400
+ loader: type[Loader] = None,
434
401
  ) -> DictData:
435
402
  """Bypass the on data to loaded config data.
436
403
 
@@ -451,7 +418,7 @@ class Workflow(BaseModel):
451
418
  # field.
452
419
  data["on"] = [
453
420
  (
454
- SimLoad(n, conf_path=path, externals=(extras or {})).data
421
+ (loader or FileLoad)(n, path=path, extras=extras).data
455
422
  if isinstance(n, str)
456
423
  else n
457
424
  )
@@ -460,11 +427,10 @@ class Workflow(BaseModel):
460
427
  return data
461
428
 
462
429
  @model_validator(mode="before")
463
- def __prepare_model_before__(cls, values: DictData) -> DictData:
430
+ def __prepare_model_before__(cls, data: Any) -> Any:
464
431
  """Prepare the params key in the data model before validating."""
465
- # NOTE: Prepare params type if it is passing with only type value.
466
- if params := values.pop("params", {}):
467
- values["params"] = {
432
+ if isinstance(data, dict) and (params := data.pop("params", {})):
433
+ data["params"] = {
468
434
  p: (
469
435
  {"type": params[p]}
470
436
  if isinstance(params[p], str)
@@ -472,7 +438,7 @@ class Workflow(BaseModel):
472
438
  )
473
439
  for p in params
474
440
  }
475
- return values
441
+ return data
476
442
 
477
443
  @field_validator("desc", mode="after")
478
444
  def __dedent_desc__(cls, value: str) -> str:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.55
3
+ Version: 0.0.56
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -1,20 +1,20 @@
1
- ddeutil/workflow/__about__.py,sha256=kfTS6Gqr5xkMNDVKm-NTS4vmeIl2Zj4oMBQGmq19LHA,28
1
+ ddeutil/workflow/__about__.py,sha256=EXJHOyenQmmnoVgcw7W0m9HU1cg9EJV61611jGr4NDY,28
2
2
  ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
- ddeutil/workflow/__init__.py,sha256=noE8LNRcgq32m9OnIFcQqh0P7PXWdp-SGmvBCYIXgf4,1338
4
- ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ ddeutil/workflow/__init__.py,sha256=NXEhjzKFdIGa-jtIq9HXChLCjSXNPd8VJ8ltggxbBO8,1371
4
+ ddeutil/workflow/__main__.py,sha256=x-sYedl4T8p6054aySk-EQX6vhytvPR0HvaBNYxMzp0,364
5
5
  ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
6
- ddeutil/workflow/conf.py,sha256=80rgmJKFU7BlH5xTLnghGzGhE8C6LFAQykd9mjHSjo8,12528
7
- ddeutil/workflow/cron.py,sha256=WS2MInn0Sp5DKlZDZH5VFZ5AA0Q3_AnBnYEU4lZSv4I,9779
8
- ddeutil/workflow/exceptions.py,sha256=r4Jrf9qtVPALU4wh4bnb_OYqC-StqSQJEmFC-_QK934,1408
9
- ddeutil/workflow/job.py,sha256=aVRWLMLv5vYFbckT6AKYrMu29FzXYESOEzDHhFIpUyo,34159
6
+ ddeutil/workflow/conf.py,sha256=JaTfglfdgGe7M-nb2NeRska95MDmYapwKuAlZfzVdr4,14425
7
+ ddeutil/workflow/event.py,sha256=qiUrkkVxOvYEFfxTWBsLCgYTyOWMY125abOPieY5Xqc,10319
8
+ ddeutil/workflow/exceptions.py,sha256=0MvjCRBUsHfOm1kzMiC4Y22vb1_sfvTU0wAW7xZwtAo,1587
9
+ ddeutil/workflow/job.py,sha256=T0zxzK682kYaNmVHTzNWeBrZu8QJ6J2eYCFlKto0vA4,34381
10
10
  ddeutil/workflow/logs.py,sha256=rsoBrUGQrooou18fg2yvPsB8NOaXnUA5ThQpBr_WVMg,26598
11
11
  ddeutil/workflow/params.py,sha256=FKY4Oo1Ze4QZKRfAk7rqKsi44YaJQAbqAtXM6vlO2hI,11392
12
12
  ddeutil/workflow/result.py,sha256=rI0S8-HanFDk1l6_BsYRRamzSfzKUy7bkKJUae1w_aQ,5708
13
13
  ddeutil/workflow/reusables.py,sha256=iXcS7Gg-71qVX4ln0ILTDx03cTtUnj_rNoXHTVdVrxc,17636
14
- ddeutil/workflow/scheduler.py,sha256=hk3-9R63DZH9J0PWbtBzaD8rqHyiOf03vTvAJDgsxTA,28279
14
+ ddeutil/workflow/scheduler.py,sha256=oVSNwZ-iyXFOGXhsltzaDy7GDQejI9GalMHxa8JRcro,27063
15
15
  ddeutil/workflow/stages.py,sha256=E5XoMVijjcvm_YK8AbiA8xGAQUphCPTtGazW-oLAdeI,82543
16
16
  ddeutil/workflow/utils.py,sha256=NZPvPPP_5g4cigFcD7tHjIKLtKMeYAcb3oUhNyhTpJ0,7947
17
- ddeutil/workflow/workflow.py,sha256=F3I_c0LHMoJFZIFt1a92sxFMlQVkE-cH--TLyX2rwuo,46799
17
+ ddeutil/workflow/workflow.py,sha256=vgVOwa79ZXWcOv2k6FG4I_FVuyswGwbglidWQJXSrsY,45739
18
18
  ddeutil/workflow/api/__init__.py,sha256=kY30dL8HPY8tY_GBmm7y_3OdoXzB1-EA2a96PLU0AQw,5278
19
19
  ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
20
20
  ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
@@ -23,8 +23,9 @@ ddeutil/workflow/api/routes/job.py,sha256=8X5VLDJH6PumyNIY6JGRNBsf2gWN0eG9DzxRPS
23
23
  ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
24
24
  ddeutil/workflow/api/routes/schedules.py,sha256=14RnaJKEGMSJtncI1H_QQVZNBe_jDS40PPRO6qFc3i0,4805
25
25
  ddeutil/workflow/api/routes/workflows.py,sha256=GJu5PiXEylswrXylEImpncySjeU9chrvrtjhiMCw2RQ,4529
26
- ddeutil_workflow-0.0.55.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
- ddeutil_workflow-0.0.55.dist-info/METADATA,sha256=sTjAMsv4yIgw-CKKHyr-sg_yrgnnDT_0UMF3E2TP-Js,19008
28
- ddeutil_workflow-0.0.55.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
29
- ddeutil_workflow-0.0.55.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
- ddeutil_workflow-0.0.55.dist-info/RECORD,,
26
+ ddeutil_workflow-0.0.56.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
+ ddeutil_workflow-0.0.56.dist-info/METADATA,sha256=JYyoDdlPBgJukz3yFPseqjqTOwK7i2LejPgLhqkBXwo,19008
28
+ ddeutil_workflow-0.0.56.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
29
+ ddeutil_workflow-0.0.56.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
30
+ ddeutil_workflow-0.0.56.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
+ ddeutil_workflow-0.0.56.dist-info/RECORD,,
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ workflow-cli = ddeutil.workflow.__main__:app