ddeutil-workflow 0.0.55__tar.gz → 0.0.56__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. {ddeutil_workflow-0.0.55/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.56}/PKG-INFO +1 -1
  2. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/pyproject.toml +4 -1
  3. ddeutil_workflow-0.0.56/src/ddeutil/workflow/__about__.py +1 -0
  4. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/__init__.py +4 -2
  5. ddeutil_workflow-0.0.56/src/ddeutil/workflow/__main__.py +30 -0
  6. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/conf.py +144 -94
  7. ddeutil_workflow-0.0.55/src/ddeutil/workflow/cron.py → ddeutil_workflow-0.0.56/src/ddeutil/workflow/event.py +36 -20
  8. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/exceptions.py +10 -1
  9. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/job.py +6 -1
  10. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/scheduler.py +32 -71
  11. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/workflow.py +39 -73
  12. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56/src/ddeutil_workflow.egg-info}/PKG-INFO +1 -1
  13. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil_workflow.egg-info/SOURCES.txt +3 -2
  14. ddeutil_workflow-0.0.56/src/ddeutil_workflow.egg-info/entry_points.txt +2 -0
  15. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test__cron.py +0 -6
  16. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_conf.py +25 -38
  17. ddeutil_workflow-0.0.55/tests/test_cron_on.py → ddeutil_workflow-0.0.56/tests/test_event.py +32 -21
  18. ddeutil_workflow-0.0.56/tests/test_reusables_call_tag.py +121 -0
  19. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_schedule.py +6 -9
  20. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_schedule_workflow.py +1 -1
  21. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_stage.py +22 -27
  22. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_stage_handler_exec.py +67 -68
  23. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_utils.py +2 -3
  24. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_workflow.py +102 -115
  25. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_workflow_task.py +1 -5
  26. ddeutil_workflow-0.0.55/src/ddeutil/workflow/__about__.py +0 -1
  27. ddeutil_workflow-0.0.55/src/ddeutil/workflow/__main__.py +0 -0
  28. ddeutil_workflow-0.0.55/tests/test_reusables_call_tag.py +0 -252
  29. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/LICENSE +0 -0
  30. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/README.md +0 -0
  31. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/setup.cfg +0 -0
  32. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/__cron.py +0 -0
  33. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/__types.py +0 -0
  34. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/__init__.py +0 -0
  35. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/logs.py +0 -0
  36. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  37. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/routes/job.py +0 -0
  38. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/routes/logs.py +0 -0
  39. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/routes/schedules.py +0 -0
  40. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
  41. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/api/utils.py +0 -0
  42. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/logs.py +0 -0
  43. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/params.py +0 -0
  44. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/result.py +0 -0
  45. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/reusables.py +0 -0
  46. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/stages.py +0 -0
  47. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil/workflow/utils.py +0 -0
  48. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  49. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  50. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  51. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test__regex.py +0 -0
  52. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_job.py +0 -0
  53. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_job_exec.py +0 -0
  54. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_job_exec_strategy.py +0 -0
  55. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_logs_audit.py +0 -0
  56. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_logs_trace.py +0 -0
  57. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_params.py +0 -0
  58. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_release.py +0 -0
  59. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_release_queue.py +0 -0
  60. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_result.py +0 -0
  61. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_reusables_template.py +0 -0
  62. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_reusables_template_filter.py +0 -0
  63. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_schedule_pending.py +0 -0
  64. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_schedule_tasks.py +0 -0
  65. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_scheduler_control.py +0 -0
  66. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_strategy.py +0 -0
  67. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_workflow_exec.py +0 -0
  68. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_workflow_exec_job.py +0 -0
  69. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_workflow_exec_poke.py +0 -0
  70. {ddeutil_workflow-0.0.55 → ddeutil_workflow-0.0.56}/tests/test_workflow_exec_release.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.55
3
+ Version: 0.0.56
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -57,6 +57,9 @@ docker = [
57
57
  Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
58
58
  "Source Code" = "https://github.com/ddeutils/ddeutil-workflow/"
59
59
 
60
+ [project.scripts]
61
+ workflow-cli = "ddeutil.workflow.__main__:app"
62
+
60
63
  [tool.setuptools.dynamic]
61
64
  version = {attr = "ddeutil.workflow.__about__.__version__"}
62
65
 
@@ -81,7 +84,7 @@ omit = [
81
84
  "src/ddeutil/workflow/__cron.py",
82
85
  "src/ddeutil/workflow/api/__init__.py",
83
86
  "src/ddeutil/workflow/api/log.py",
84
- "src/ddeutil/workflow/api/repeat.py",
87
+ "src/ddeutil/workflow/api/utils.py",
85
88
  "src/ddeutil/workflow/api/routes/__init__.py",
86
89
  "src/ddeutil/workflow/api/routes/job.py",
87
90
  "src/ddeutil/workflow/api/routes/logs.py",
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.56"
@@ -7,16 +7,18 @@ from .__cron import CronJob, CronRunner
7
7
  from .__types import DictData, DictStr, Matrix, Re, TupleStr
8
8
  from .conf import (
9
9
  Config,
10
- Loader,
10
+ FileLoad,
11
11
  config,
12
12
  env,
13
13
  )
14
- from .cron import *
14
+ from .event import *
15
15
  from .exceptions import *
16
16
  from .job import *
17
17
  from .logs import (
18
18
  Audit,
19
19
  AuditModel,
20
+ FileAudit,
21
+ FileTrace,
20
22
  Trace,
21
23
  TraceData,
22
24
  TraceMeta,
@@ -0,0 +1,30 @@
1
+ import typer
2
+
3
+ app = typer.Typer()
4
+
5
+
6
+ @app.callback()
7
+ def callback():
8
+ """
9
+ Awesome Portal Gun
10
+ """
11
+
12
+
13
+ @app.command()
14
+ def provision():
15
+ """
16
+ Shoot the portal gun
17
+ """
18
+ typer.echo("Shooting portal gun")
19
+
20
+
21
+ @app.command()
22
+ def job():
23
+ """
24
+ Load the portal gun
25
+ """
26
+ typer.echo("Loading portal gun")
27
+
28
+
29
+ if __name__ == "__main__":
30
+ app()
@@ -7,24 +7,26 @@ from __future__ import annotations
7
7
 
8
8
  import json
9
9
  import os
10
+ from abc import ABC, abstractmethod
10
11
  from collections.abc import Iterator
11
12
  from datetime import timedelta
12
13
  from functools import cached_property
14
+ from inspect import isclass
13
15
  from pathlib import Path
14
- from typing import Final, Optional, TypeVar
16
+ from typing import Final, Optional, Protocol, TypeVar, Union
15
17
  from zoneinfo import ZoneInfo
16
18
 
17
19
  from ddeutil.core import str2bool
18
20
  from ddeutil.io import YamlFlResolve
19
21
  from ddeutil.io.paths import glob_files, is_ignored, read_ignore
20
22
 
21
- from .__types import DictData, TupleStr
23
+ from .__types import DictData
22
24
 
23
25
  T = TypeVar("T")
24
26
  PREFIX: Final[str] = "WORKFLOW"
25
27
 
26
28
 
27
- def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
+ def env(var: str, default: str | None = None) -> str | None:
28
30
  """Get environment variable with uppercase and adding prefix string.
29
31
 
30
32
  :param var: (str) A env variable name.
@@ -35,17 +37,6 @@ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
35
37
  return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
36
38
 
37
39
 
38
- __all__: TupleStr = (
39
- "api_config",
40
- "env",
41
- "Config",
42
- "SimLoad",
43
- "Loader",
44
- "config",
45
- "dynamic",
46
- )
47
-
48
-
49
40
  class Config: # pragma: no cov
50
41
  """Config object for keeping core configurations on the current session
51
42
  without changing when if the application still running.
@@ -188,7 +179,7 @@ class Config: # pragma: no cov
188
179
  return timedelta(**json.loads(stop_boundary_delta_str))
189
180
  except Exception as err:
190
181
  raise ValueError(
191
- "Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
182
+ "Config `WORKFLOW_APP_STOP_BOUNDARY_DELTA` can not parsing to"
192
183
  f"timedelta with {stop_boundary_delta_str}."
193
184
  ) from err
194
185
 
@@ -209,110 +200,194 @@ class APIConfig:
209
200
  return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
210
201
 
211
202
 
212
- class SimLoad:
213
- """Simple Load Object that will search config data by given some identity
214
- value like name of workflow or on.
203
+ class BaseLoad(ABC):
204
+
205
+ @classmethod
206
+ @abstractmethod
207
+ def find(cls, name: str, *args, **kwargs) -> DictData: ...
208
+
209
+ @classmethod
210
+ @abstractmethod
211
+ def finds(
212
+ cls, obj: object, *args, **kwargs
213
+ ) -> Iterator[tuple[str, DictData]]: ...
215
214
 
216
- :param name: A name of config data that will read by Yaml Loader object.
217
- :param conf_path: A config path object.
218
- :param externals: An external parameters
215
+
216
+ class FileLoad(BaseLoad):
217
+ """Base Load object that use to search config data by given some identity
218
+ value like name of `Workflow` or `On` templates.
219
+
220
+ :param name: (str) A name of key of config data that read with YAML
221
+ Environment object.
222
+ :param path: (Path) A config path object.
223
+ :param externals: (DictData) An external config data that want to add to
224
+ loaded config data.
225
+ :param extras: (DictDdata) An extra parameters that use to override core
226
+ config values.
227
+
228
+ :raise ValueError: If the data does not find on the config path with the
229
+ name parameter.
219
230
 
220
231
  Noted:
221
- The config data should have ``type`` key for modeling validation that
232
+ The config data should have `type` key for modeling validation that
222
233
  make this loader know what is config should to do pass to.
223
234
 
224
235
  ... <identity-key>:
225
236
  ... type: <importable-object>
226
237
  ... <key-data-1>: <value-data-1>
227
238
  ... <key-data-2>: <value-data-2>
239
+
240
+ This object support multiple config paths if you pass the `conf_paths`
241
+ key to the `extras` parameter.
228
242
  """
229
243
 
230
244
  def __init__(
231
245
  self,
232
246
  name: str,
233
- conf_path: Path,
247
+ *,
248
+ path: Optional[Union[str, Path]] = None,
234
249
  externals: DictData | None = None,
250
+ extras: DictData | None = None,
235
251
  ) -> None:
236
- self.conf_path: Path = conf_path
252
+ self.path: Path = Path(dynamic("conf_path", f=path, extras=extras))
237
253
  self.externals: DictData = externals or {}
238
-
239
- self.data: DictData = {}
240
- for file in glob_files(conf_path):
241
-
242
- if self.is_ignore(file, conf_path):
243
- continue
244
-
245
- if data := self.filter_yaml(file, name=name):
246
- self.data = data
254
+ self.extras: DictData = extras or {}
255
+ self.data: DictData = self.find(
256
+ name,
257
+ path=path,
258
+ paths=self.extras.get("conf_paths"),
259
+ extras=extras,
260
+ )
247
261
 
248
262
  # VALIDATE: check the data that reading should not empty.
249
263
  if not self.data:
250
264
  raise ValueError(
251
- f"Config {name!r} does not found on conf path: "
252
- f"{self.conf_path}."
265
+ f"Config {name!r} does not found on the conf path: {self.path}."
253
266
  )
254
267
 
255
268
  self.data.update(self.externals)
256
269
 
270
+ @classmethod
271
+ def find(
272
+ cls,
273
+ name: str,
274
+ *,
275
+ path: Optional[Path] = None,
276
+ paths: Optional[list[Path]] = None,
277
+ extras: Optional[DictData] = None,
278
+ ) -> DictData:
279
+ """Find data with specific key and return the latest modify date data if
280
+ this key exists multiple files.
281
+
282
+ :param name: (str) A name of data that want to find.
283
+ :param path: (Path) A config path object.
284
+ :param paths: (list[Path]) A list of config path object.
285
+ :param extras: (DictData) An extra parameter that use to override core
286
+ config values.
287
+
288
+ :rtype: DictData
289
+ """
290
+ path: Path = dynamic("conf_path", f=path, extras=extras)
291
+ if not paths:
292
+ paths: list[Path] = [path]
293
+ elif not isinstance(paths, list):
294
+ raise TypeError(
295
+ f"Multi-config paths does not support for type: {type(paths)}"
296
+ )
297
+ else:
298
+ paths.append(path)
299
+
300
+ all_data: list[tuple[float, DictData]] = []
301
+ for path in paths:
302
+ for file in glob_files(path):
303
+
304
+ if cls.is_ignore(file, path):
305
+ continue
306
+
307
+ if data := cls.filter_yaml(file, name=name):
308
+ all_data.append((file.lstat().st_mtime, data))
309
+
310
+ return {} if not all_data else max(all_data, key=lambda x: x[0])[1]
311
+
257
312
  @classmethod
258
313
  def finds(
259
314
  cls,
260
315
  obj: object,
261
- conf_path: Path,
262
316
  *,
263
- included: list[str] | None = None,
317
+ path: Optional[Path] = None,
318
+ paths: Optional[list[Path]] = None,
264
319
  excluded: list[str] | None = None,
320
+ extras: Optional[DictData] = None,
265
321
  ) -> Iterator[tuple[str, DictData]]:
266
322
  """Find all data that match with object type in config path. This class
267
323
  method can use include and exclude list of identity name for filter and
268
324
  adds-on.
269
325
 
270
326
  :param obj: An object that want to validate matching before return.
271
- :param conf_path: A config object.
272
- :param included: An excluded list of data key that want to reject this
273
- data if any key exist.
327
+ :param path: A config path object.
328
+ :param paths: (list[Path]) A list of config path object.
274
329
  :param excluded: An included list of data key that want to filter from
275
330
  data.
331
+ :param extras: (DictData) An extra parameter that use to override core
332
+ config values.
276
333
 
277
334
  :rtype: Iterator[tuple[str, DictData]]
278
335
  """
279
- exclude: list[str] = excluded or []
280
- for file in glob_files(conf_path):
336
+ excluded: list[str] = excluded or []
337
+ path: Path = dynamic("conf_path", f=path, extras=extras)
338
+ if not paths:
339
+ paths: list[Path] = [path]
340
+ else:
341
+ paths.append(path)
342
+
343
+ all_data: dict[str, list[tuple[float, DictData]]] = {}
344
+ for path in paths:
345
+ for file in glob_files(path):
346
+
347
+ if cls.is_ignore(file, path):
348
+ continue
281
349
 
282
- if cls.is_ignore(file, conf_path):
283
- continue
350
+ for key, data in cls.filter_yaml(file).items():
284
351
 
285
- for key, data in cls.filter_yaml(file).items():
352
+ if key in excluded:
353
+ continue
286
354
 
287
- if key in exclude:
288
- continue
355
+ if (
356
+ data.get("type", "")
357
+ == (obj if isclass(obj) else obj.__class__).__name__
358
+ ):
359
+ marking: tuple[float, DictData] = (
360
+ file.lstat().st_mtime,
361
+ data,
362
+ )
363
+ if key in all_data:
364
+ all_data[key].append(marking)
365
+ else:
366
+ all_data[key] = [marking]
289
367
 
290
- if data.get("type", "") == obj.__name__:
291
- yield key, (
292
- {k: data[k] for k in data if k in included}
293
- if included
294
- else data
295
- )
368
+ for key in all_data:
369
+ yield key, max(all_data[key], key=lambda x: x[0])[1]
296
370
 
297
371
  @classmethod
298
372
  def is_ignore(
299
373
  cls,
300
374
  file: Path,
301
- conf_path: Path,
375
+ path: Path,
302
376
  *,
303
377
  ignore_filename: Optional[str] = None,
304
378
  ) -> bool:
305
379
  """Check this file was ignored.
306
380
 
307
381
  :param file: (Path) A file path that want to check.
308
- :param conf_path: (Path) A config path that want to read the config
382
+ :param path: (Path) A config path that want to read the config
309
383
  ignore file.
310
- :param ignore_filename: (str) An ignore filename.
384
+ :param ignore_filename: (str) An ignore filename. Default is
385
+ `.confignore` filename.
311
386
 
312
387
  :rtype: bool
313
388
  """
314
389
  ignore_filename: str = ignore_filename or ".confignore"
315
- return is_ignored(file, read_ignore(conf_path / ignore_filename))
390
+ return is_ignored(file, read_ignore(path / ignore_filename))
316
391
 
317
392
  @classmethod
318
393
  def filter_yaml(cls, file: Path, name: str | None = None) -> DictData:
@@ -369,44 +444,19 @@ def dynamic(
369
444
  return rsx if rsx is not None else rs
370
445
 
371
446
 
372
- class Loader(SimLoad):
373
- """Loader Object that get the config `yaml` file from current path.
447
+ class Loader(Protocol): # pragma: no cov
448
+ type: str
449
+ path: Path
450
+ data: DictData
451
+ extras: DictData
452
+ externals: DictData
374
453
 
375
- :param name: (str) A name of config data that will read by Yaml Loader object.
376
- :param externals: (DictData) An external parameters
377
- """
454
+ def __init__(self, *args, **kwargs) -> None: ...
378
455
 
379
456
  @classmethod
380
- def finds(
381
- cls,
382
- obj: object,
383
- *,
384
- path: Path | None = None,
385
- included: list[str] | None = None,
386
- excluded: list[str] | None = None,
387
- **kwargs,
388
- ) -> Iterator[tuple[str, DictData]]:
389
- """Override the find class method from the Simple Loader object.
457
+ def find(cls, name: str, *args, **kwargs) -> DictData: ...
390
458
 
391
- :param obj: An object that want to validate matching before return.
392
- :param path: (Path) A override config path.
393
- :param included: An excluded list of data key that want to reject this
394
- data if any key exist.
395
- :param excluded: An included list of data key that want to filter from
396
- data.
397
-
398
- :rtype: Iterator[tuple[str, DictData]]
399
- """
400
- return super().finds(
401
- obj=obj,
402
- conf_path=(path or config.conf_path),
403
- included=included,
404
- excluded=excluded,
405
- )
406
-
407
- def __init__(self, name: str, externals: DictData) -> None:
408
- super().__init__(
409
- name,
410
- conf_path=dynamic("conf_path", extras=externals),
411
- externals=externals,
412
- )
459
+ @classmethod
460
+ def finds(
461
+ cls, obj: object, *args, **kwargs
462
+ ) -> Iterator[tuple[str, DictData]]: ...
@@ -3,11 +3,14 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ """Event module that store all event object. Now, it has only `On` and `OnYear`
7
+ model these are schedule with crontab event.
8
+ """
6
9
  from __future__ import annotations
7
10
 
8
11
  from dataclasses import fields
9
12
  from datetime import datetime
10
- from typing import Annotated, Literal, Union
13
+ from typing import Annotated, Any, Literal, Union
11
14
  from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
12
15
 
13
16
  from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
@@ -17,11 +20,13 @@ from typing_extensions import Self
17
20
 
18
21
  from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner, Options
19
22
  from .__types import DictData, DictStr
20
- from .conf import Loader
23
+ from .conf import FileLoad
24
+
25
+ Interval = Literal["daily", "weekly", "monthly"]
21
26
 
22
27
 
23
28
  def interval2crontab(
24
- interval: Literal["daily", "weekly", "monthly"],
29
+ interval: Interval,
25
30
  *,
26
31
  day: str | None = None,
27
32
  time: str = "00:00",
@@ -59,10 +64,11 @@ def interval2crontab(
59
64
 
60
65
 
61
66
  class On(BaseModel):
62
- """On Pydantic model (Warped crontab object by model).
67
+ """On model (Warped crontab object by Pydantic model) to keep crontab value
68
+ and generate CronRunner object from this crontab value.
63
69
 
64
- See Also:
65
- * `generate()` is the main use-case of this schedule object.
70
+ Methods:
71
+ - generate: is the main use-case of this schedule object.
66
72
  """
67
73
 
68
74
  model_config = ConfigDict(arbitrary_types_allowed=True)
@@ -90,19 +96,24 @@ class On(BaseModel):
90
96
  description="A timezone string value",
91
97
  alias="timezone",
92
98
  ),
93
- ] = "Etc/UTC"
99
+ ] = "UTC"
94
100
 
95
101
  @classmethod
96
102
  def from_value(cls, value: DictStr, extras: DictData) -> Self:
97
103
  """Constructor from values that will generate crontab by function.
98
104
 
99
- :param value: A mapping value that will generate crontab before create
100
- schedule model.
101
- :param extras: An extras parameter that will keep in extras.
105
+ :param value: (DictStr) A mapping value that will generate crontab
106
+ before create schedule model.
107
+ :param extras: (DictData) An extra parameter that use to override core
108
+ config value.
102
109
  """
103
110
  passing: DictStr = {}
111
+
104
112
  if "timezone" in value:
105
113
  passing["tz"] = value.pop("timezone")
114
+ elif "tz" in value:
115
+ passing["tz"] = value.pop("tz")
116
+
106
117
  passing["cronjob"] = interval2crontab(
107
118
  **{v: value[v] for v in value if v in ("interval", "day", "time")}
108
119
  )
@@ -112,18 +123,20 @@ class On(BaseModel):
112
123
  def from_conf(
113
124
  cls,
114
125
  name: str,
126
+ *,
115
127
  extras: DictData | None = None,
116
128
  ) -> Self:
117
- """Constructor from the name of config that will use loader object for
118
- getting the data.
129
+ """Constructor from the name of config loader that will use loader
130
+ object for getting the `On` data.
119
131
 
120
- :param name: A name of config that will get from loader.
121
- :param extras: An extra parameter that will keep in extras.
132
+ :param name: (str) A name of config that will get from loader.
133
+ :param extras: (DictData) An extra parameter that use to override core
134
+ config values.
122
135
 
123
136
  :rtype: Self
124
137
  """
125
138
  extras: DictData = extras or {}
126
- loader: Loader = Loader(name, externals=extras)
139
+ loader: FileLoad = FileLoad(name, extras=extras)
127
140
 
128
141
  # NOTE: Validate the config type match with current connection model
129
142
  if loader.type != cls.__name__:
@@ -155,17 +168,17 @@ class On(BaseModel):
155
168
  )
156
169
 
157
170
  @model_validator(mode="before")
158
- def __prepare_values(cls, values: DictData) -> DictData:
171
+ def __prepare_values(cls, data: Any) -> Any:
159
172
  """Extract tz key from value and change name to timezone key.
160
173
 
161
- :param values: (DictData) A data that want to pass for create an On
174
+ :param data: (DictData) A data that want to pass for create an On
162
175
  model.
163
176
 
164
177
  :rtype: DictData
165
178
  """
166
- if tz := values.pop("tz", None):
167
- values["timezone"] = tz
168
- return values
179
+ if isinstance(data, dict) and (tz := data.pop("tz", None)):
180
+ data["timezone"] = tz
181
+ return data
169
182
 
170
183
  @field_validator("tz")
171
184
  def __validate_tz(cls, value: str) -> str:
@@ -238,6 +251,9 @@ class On(BaseModel):
238
251
  """Return a next datetime from Cron runner object that start with any
239
252
  date that given from input.
240
253
 
254
+ :param start: (str | datetime) A start datetime that use to generate
255
+ the CronRunner object.
256
+
241
257
  :rtype: CronRunner
242
258
  """
243
259
  runner: CronRunner = self.generate(start=start)
@@ -22,7 +22,12 @@ ErrorData = TypedDict(
22
22
 
23
23
 
24
24
  def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
25
- """Create dict data from exception instance."""
25
+ """Create dict data from exception instance.
26
+
27
+ :param exception: An exception object.
28
+
29
+ :rtype: ErrorData
30
+ """
26
31
  return {
27
32
  "class": exception,
28
33
  "name": exception.__class__.__name__,
@@ -33,6 +38,10 @@ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
33
38
  class BaseWorkflowException(Exception):
34
39
 
35
40
  def to_dict(self) -> ErrorData:
41
+ """Return ErrorData data from the current exception object.
42
+
43
+ :rtype: ErrorData
44
+ """
36
45
  return to_dict(self)
37
46
 
38
47
 
@@ -140,14 +140,19 @@ class Strategy(BaseModel):
140
140
 
141
141
  fail_fast: bool = Field(
142
142
  default=False,
143
+ description=(
144
+ "A fail-fast flag that use to cancel strategy execution when it "
145
+ "has some execution was failed."
146
+ ),
143
147
  alias="fail-fast",
144
148
  )
145
149
  max_parallel: int = Field(
146
150
  default=1,
147
151
  gt=0,
152
+ lt=10,
148
153
  description=(
149
154
  "The maximum number of executor thread pool that want to run "
150
- "parallel"
155
+ "parallel. This value should gather than 0 and less than 10."
151
156
  ),
152
157
  alias="max-parallel",
153
158
  )