ddeutil-workflow 0.0.16__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/utils.py CHANGED
@@ -37,9 +37,10 @@ from pydantic.functional_validators import model_validator
37
37
  from typing_extensions import Self
38
38
 
39
39
  from .__types import DictData, Matrix, Re
40
- from .conf import config, load_config
40
+ from .conf import config
41
41
  from .exceptions import ParamValueException, UtilException
42
42
 
43
+ T = TypeVar("T")
43
44
  P = ParamSpec("P")
44
45
  AnyModel = TypeVar("AnyModel", bound=BaseModel)
45
46
  AnyModelType = type[AnyModel]
@@ -160,7 +161,7 @@ def make_registry(submodule: str) -> dict[str, Registry]:
160
161
  :rtype: dict[str, Registry]
161
162
  """
162
163
  rs: dict[str, Registry] = {}
163
- for module in load_config().engine.registry:
164
+ for module in config.regis_hook:
164
165
  # NOTE: try to sequential import task functions
165
166
  try:
166
167
  importer = import_module(f"{module}.{submodule}")
@@ -228,15 +229,6 @@ class DefaultParam(BaseParam):
228
229
  "Receive value and validate typing before return valid value."
229
230
  )
230
231
 
231
- @model_validator(mode="after")
232
- def __check_default(self) -> Self:
233
- """Check default value should pass when it set required."""
234
- if self.required and self.default is None:
235
- raise ParamValueException(
236
- "Default should be set when this parameter was required."
237
- )
238
- return self
239
-
240
232
 
241
233
  class DatetimeParam(DefaultParam):
242
234
  """Datetime parameter."""
@@ -497,7 +489,7 @@ def make_filter_registry() -> dict[str, FilterRegistry]:
497
489
  :rtype: dict[str, Registry]
498
490
  """
499
491
  rs: dict[str, Registry] = {}
500
- for module in load_config().engine.registry_filter:
492
+ for module in config.regis_filter:
501
493
  # NOTE: try to sequential import task functions
502
494
  try:
503
495
  importer = import_module(module)
@@ -529,11 +521,11 @@ def get_args_const(
529
521
  raise UtilException(
530
522
  f"Post-filter: {expr} does not valid because it raise syntax error."
531
523
  ) from None
532
- body: list[Expr] = mod.body
533
524
 
525
+ body: list[Expr] = mod.body
534
526
  if len(body) > 1:
535
527
  raise UtilException(
536
- "Post-filter function should be only one calling per wf"
528
+ "Post-filter function should be only one calling per workflow."
537
529
  )
538
530
 
539
531
  caller: Union[Name, Call]
@@ -549,12 +541,15 @@ def get_args_const(
549
541
  keywords: dict[str, Constant] = {k.arg: k.value for k in caller.keywords}
550
542
 
551
543
  if any(not isinstance(i, Constant) for i in args):
552
- raise UtilException("Argument should be constant.")
544
+ raise UtilException(f"Argument of {expr} should be constant.")
545
+
546
+ if any(not isinstance(i, Constant) for i in keywords.values()):
547
+ raise UtilException(f"Keyword argument of {expr} should be constant.")
553
548
 
554
549
  return name.id, args, keywords
555
550
 
556
551
 
557
- @custom_filter("fmt")
552
+ @custom_filter("fmt") # pragma: no cov
558
553
  def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
559
554
  """Format datetime object to string with the format."""
560
555
  if isinstance(value, datetime):
@@ -565,16 +560,18 @@ def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
565
560
 
566
561
 
567
562
  def map_post_filter(
568
- value: Any,
563
+ value: T,
569
564
  post_filter: list[str],
570
565
  filters: dict[str, FilterRegistry],
571
- ) -> Any:
566
+ ) -> T:
572
567
  """Mapping post-filter to value with sequence list of filter function name
573
568
  that will get from the filter registry.
574
569
 
575
570
  :param value: A string value that want to mapped with filter function.
576
571
  :param post_filter: A list of post-filter function name.
577
572
  :param filters: A filter registry.
573
+
574
+ :rtype: T
578
575
  """
579
576
  for _filter in post_filter:
580
577
  func_name, _args, _kwargs = get_args_const(_filter)
@@ -597,6 +594,8 @@ def map_post_filter(
597
594
  value: Any = func(value)
598
595
  else:
599
596
  value: Any = f_func(value, *args, **kwargs)
597
+ except UtilException:
598
+ raise
600
599
  except Exception as err:
601
600
  logger.warning(str(err))
602
601
  raise UtilException(
@@ -609,8 +608,8 @@ def map_post_filter(
609
608
  def not_in_template(value: Any, *, not_in: str = "matrix.") -> bool:
610
609
  """Check value should not pass template with not_in value prefix.
611
610
 
612
- :param value:
613
- :param not_in:
611
+ :param value: A value that want to find parameter template prefix.
612
+ :param not_in: The not in string that use in the `.startswith` function.
614
613
  :rtype: bool
615
614
  """
616
615
  if isinstance(value, dict):
@@ -628,7 +627,7 @@ def not_in_template(value: Any, *, not_in: str = "matrix.") -> bool:
628
627
  def has_template(value: Any) -> bool:
629
628
  """Check value include templating string.
630
629
 
631
- :param value:
630
+ :param value: A value that want to find parameter template.
632
631
  :rtype: bool
633
632
  """
634
633
  if isinstance(value, dict):
@@ -784,6 +783,7 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
784
783
  """
785
784
  if n < 1:
786
785
  raise ValueError("n must be at least one")
786
+
787
787
  it: Iterator[Any] = iter(iterable)
788
788
  while True:
789
789
  chunk_it = islice(it, n)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.16
3
+ Version: 0.0.17
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -23,7 +23,7 @@ Requires-Python: >=3.9.13
23
23
  Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
25
  Requires-Dist: ddeutil >=0.4.3
26
- Requires-Dist: ddeutil-io[yaml] >=0.2.3
26
+ Requires-Dist: ddeutil-io[toml,yaml] >=0.2.3
27
27
  Requires-Dist: python-dotenv ==1.0.1
28
28
  Requires-Dist: typer <1.0.0,==0.12.5
29
29
  Requires-Dist: schedule <2.0.0,==1.2.2
@@ -33,6 +33,7 @@ Requires-Dist: fastapi <1.0.0,>=0.115.0 ; extra == 'api'
33
33
  # Workflow
34
34
 
35
35
  [![test](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml/badge.svg?branch=main)](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml)
36
+ [![codecov](https://codecov.io/gh/ddeutils/ddeutil-workflow/graph/badge.svg?token=3NDPN2I0H9)](https://codecov.io/gh/ddeutils/ddeutil-workflow)
36
37
  [![pypi version](https://img.shields.io/pypi/v/ddeutil-workflow)](https://pypi.org/project/ddeutil-workflow/)
37
38
  [![python support version](https://img.shields.io/pypi/pyversions/ddeutil-workflow)](https://pypi.org/project/ddeutil-workflow/)
38
39
  [![size](https://img.shields.io/github/languages/code-size/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow)
@@ -74,8 +75,9 @@ configuration. It called **Metadata Driven Data Workflow**.
74
75
 
75
76
  ## :round_pushpin: Installation
76
77
 
77
- This project need `ddeutil-io` extension namespace packages. If you want to install
78
- this package with application add-ons, you should add `app` in installation;
78
+ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
79
+ If you want to install this package with application add-ons, you should add
80
+ `app` in installation;
79
81
 
80
82
  | Usecase | Install Optional | Support |
81
83
  |-------------------|------------------------------------------|--------------------|
@@ -181,29 +183,30 @@ and do not raise any error to you.
181
183
 
182
184
  | Environment | Component | Default | Description | Remark |
183
185
  |:----------------------------------------|-----------|----------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
184
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application | |
185
- | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage | |
186
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template | |
187
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files | |
188
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object | |
189
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output | |
190
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution | |
186
+ | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
187
+ | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage. | |
188
+ | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template. | |
189
+ | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files. | |
190
+ | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object. | |
191
+ | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output. | |
192
+ | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | false | A flag that all stage raise StageException from stage execution. | |
191
193
  | `WORKFLOW_CORE_JOB_DEFAULT_ID` | Core | false | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
192
- | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | | |
193
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor | |
194
- | `WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE` | Core | true | | |
195
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode | |
196
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination | |
197
- | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module | |
198
- | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel | |
199
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format | |
194
+ | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
195
+ | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
196
+ | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
197
+ | `WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE` | Core | true | . | |
198
+ | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
199
+ | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
200
+ | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
201
+ | `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel. | |
202
+ | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format. | |
200
203
 
201
204
  **API Application**:
202
205
 
203
- | Environment | Component | Default | Description | Remark |
204
- |:--------------------------------------|-----------|---------|-----------------------------------------------------------------------------------|--------|
205
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging | |
206
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler | |
206
+ | Environment | Component | Default | Description | Remark |
207
+ |:--------------------------------------|-----------|---------|------------------------------------------------------------------------------------|--------|
208
+ | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging. | |
209
+ | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler. | |
207
210
 
208
211
  ## :rocket: Deployment
209
212
 
@@ -0,0 +1,21 @@
1
+ ddeutil/workflow/__about__.py,sha256=z3f1GAF3VbZK1m4FWAXXMsWplP_jSe-X-wVlshvlDWU,28
2
+ ddeutil/workflow/__cron.py,sha256=ZiuV4ASkXvAyFJYxEb9PKiAFNYnUt4AJozu_kH3pI4U,25777
3
+ ddeutil/workflow/__init__.py,sha256=RNKME4FPMAjqtrBR-IBwQVEKeoY5yBAiHYcZw0k9cI4,729
4
+ ddeutil/workflow/__types.py,sha256=yizLXzjQpBt_WPaof2pIyncitJvYeksw4Q1zYJeuCLA,3707
5
+ ddeutil/workflow/api.py,sha256=vUT2RVS9sF3hvY-IrzAEnahxwq4ZFYP0G3xfctHbNsw,4701
6
+ ddeutil/workflow/cli.py,sha256=baHhvtI8snbHYHeThoX401Cd6SMB2boyyCbCtTrIl3E,3278
7
+ ddeutil/workflow/conf.py,sha256=SV4GMtjUc-Bor9BPi0yOtTIsiZ0FImsoRbuJysUIE9w,15395
8
+ ddeutil/workflow/exceptions.py,sha256=Uf1-Tn8rAzj0aiVHSqo4fBqO80W0za7UFZgKv24E-tg,706
9
+ ddeutil/workflow/job.py,sha256=dW9NXR_bttDGLwelVi7qXXlLd96KX-TKG8xnHejA6u0,24041
10
+ ddeutil/workflow/on.py,sha256=rneZB5HyFWTBWriGef999bovA3glQIK6LTgC996q9Gc,7334
11
+ ddeutil/workflow/repeat.py,sha256=9uKku5uMcQgzY5fWyaJMwJ0wPFX0oTwmu7vXKdgB_ec,4923
12
+ ddeutil/workflow/route.py,sha256=JALwOH6xKu5rnII7DgA1Lbp_E5ehCoBbOW_eKqB_Olk,6753
13
+ ddeutil/workflow/scheduler.py,sha256=Oa6bZpphjlGp0mXdBuLMk1m6G-dezaBNQxQX-SB3WJ0,47032
14
+ ddeutil/workflow/stage.py,sha256=fMv_oFkoqpfoewzPUMdl3-BQcrJ8SE53cF7es8yGxfs,25525
15
+ ddeutil/workflow/utils.py,sha256=lpnqGGd_Rw7eZo2wDbZ-NZNItBooFooPjwM4_40Csh8,25152
16
+ ddeutil_workflow-0.0.17.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
17
+ ddeutil_workflow-0.0.17.dist-info/METADATA,sha256=btmCr-yjy4gzhnZppfXjANfPH-3tKUJFGon2aOMUK30,13574
18
+ ddeutil_workflow-0.0.17.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
19
+ ddeutil_workflow-0.0.17.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
20
+ ddeutil_workflow-0.0.17.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
21
+ ddeutil_workflow-0.0.17.dist-info/RECORD,,
ddeutil/workflow/log.py DELETED
@@ -1,195 +0,0 @@
1
- # ------------------------------------------------------------------------------
2
- # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
- # Licensed under the MIT License. See LICENSE in the project root for
4
- # license information.
5
- # ------------------------------------------------------------------------------
6
- from __future__ import annotations
7
-
8
- import json
9
- import logging
10
- from abc import ABC, abstractmethod
11
- from datetime import datetime
12
- from functools import lru_cache
13
- from pathlib import Path
14
- from typing import ClassVar, Optional, Union
15
-
16
- from pydantic import BaseModel, Field
17
- from pydantic.functional_validators import model_validator
18
- from typing_extensions import Self
19
-
20
- from .__types import DictData
21
- from .conf import config, load_config
22
-
23
-
24
- @lru_cache
25
- def get_logger(name: str):
26
- """Return logger object with an input module name.
27
-
28
- :param name: A module name that want to log.
29
- """
30
- logger = logging.getLogger(name)
31
- formatter = logging.Formatter(
32
- fmt=(
33
- "%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d, "
34
- "%(thread)-5d) [%(levelname)-7s] %(message)-120s "
35
- "(%(filename)s:%(lineno)s)"
36
- ),
37
- datefmt="%Y-%m-%d %H:%M:%S",
38
- )
39
- stream = logging.StreamHandler()
40
- stream.setFormatter(formatter)
41
- logger.addHandler(stream)
42
-
43
- logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
44
- return logger
45
-
46
-
47
- class BaseLog(BaseModel, ABC):
48
- """Base Log Pydantic Model with abstraction class property that implement
49
- only model fields. This model should to use with inherit to logging
50
- sub-class like file, sqlite, etc.
51
- """
52
-
53
- name: str = Field(description="A workflow name.")
54
- on: str = Field(description="A cronjob string of this piepline schedule.")
55
- release: datetime = Field(description="A release datetime.")
56
- context: DictData = Field(
57
- default_factory=dict,
58
- description=(
59
- "A context data that receive from a workflow execution result.",
60
- ),
61
- )
62
- parent_run_id: Optional[str] = Field(default=None)
63
- run_id: str
64
- update: datetime = Field(default_factory=datetime.now)
65
-
66
- @model_validator(mode="after")
67
- def __model_action(self) -> Self:
68
- """Do before the Log action with WORKFLOW_LOG_ENABLE_WRITE env variable.
69
-
70
- :rtype: Self
71
- """
72
- if config.enable_write_log:
73
- self.do_before()
74
- return self
75
-
76
- def do_before(self) -> None:
77
- """To something before end up of initial log model."""
78
-
79
- @abstractmethod
80
- def save(self, excluded: list[str] | None) -> None:
81
- """Save this model logging to target logging store."""
82
- raise NotImplementedError("Log should implement ``save`` method.")
83
-
84
-
85
- class FileLog(BaseLog):
86
- """File Log Pydantic Model that use to saving log data from result of
87
- workflow execution. It inherit from BaseLog model that implement the
88
- ``self.save`` method for file.
89
- """
90
-
91
- filename: ClassVar[str] = (
92
- "./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
93
- )
94
-
95
- def do_before(self) -> None:
96
- """Create directory of release before saving log file."""
97
- self.pointer().mkdir(parents=True, exist_ok=True)
98
-
99
- @classmethod
100
- def find_logs(cls, name: str):
101
- pointer: Path = (
102
- load_config().engine.paths.root / f"./logs/workflow={name}"
103
- )
104
- for file in pointer.glob("./release=*/*.log"):
105
- with file.open(mode="r", encoding="utf-8") as f:
106
- yield json.load(f)
107
-
108
- @classmethod
109
- def find_log(cls, name: str, release: datetime | None = None):
110
- if release is not None:
111
- pointer: Path = (
112
- load_config().engine.paths.root
113
- / f"./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
114
- )
115
- if not pointer.exists():
116
- raise FileNotFoundError(
117
- f"Pointer: ./logs/workflow={name}/"
118
- f"release={release:%Y%m%d%H%M%S} does not found."
119
- )
120
- return cls.model_validate(
121
- obj=json.loads(pointer.read_text(encoding="utf-8"))
122
- )
123
- raise NotImplementedError("Find latest log does not implement yet.")
124
-
125
- @classmethod
126
- def is_pointed(
127
- cls,
128
- name: str,
129
- release: datetime,
130
- *,
131
- queue: list[datetime] | None = None,
132
- ) -> bool:
133
- """Check this log already point in the destination.
134
-
135
- :param name: A workflow name.
136
- :param release: A release datetime.
137
- :param queue: A list of queue of datetime that already run in the
138
- future.
139
- """
140
- # NOTE: Check environ variable was set for real writing.
141
- if not config.enable_write_log:
142
- return False
143
-
144
- # NOTE: create pointer path that use the same logic of pointer method.
145
- pointer: Path = load_config().engine.paths.root / cls.filename.format(
146
- name=name, release=release
147
- )
148
-
149
- if not queue:
150
- return pointer.exists()
151
- return pointer.exists() or (release in queue)
152
-
153
- def pointer(self) -> Path:
154
- """Return release directory path that was generated from model data.
155
-
156
- :rtype: Path
157
- """
158
- return load_config().engine.paths.root / self.filename.format(
159
- name=self.name, release=self.release
160
- )
161
-
162
- def save(self, excluded: list[str] | None) -> Self:
163
- """Save logging data that receive a context data from a workflow
164
- execution result.
165
-
166
- :param excluded: An excluded list of key name that want to pass in the
167
- model_dump method.
168
- :rtype: Self
169
- """
170
- # NOTE: Check environ variable was set for real writing.
171
- if not config.enable_write_log:
172
- return self
173
-
174
- log_file: Path = self.pointer() / f"{self.run_id}.log"
175
- log_file.write_text(
176
- json.dumps(
177
- self.model_dump(exclude=excluded),
178
- default=str,
179
- indent=2,
180
- ),
181
- encoding="utf-8",
182
- )
183
- return self
184
-
185
-
186
- class SQLiteLog(BaseLog):
187
-
188
- def save(self, excluded: list[str] | None) -> None:
189
- raise NotImplementedError("SQLiteLog does not implement yet.")
190
-
191
-
192
- Log = Union[
193
- FileLog,
194
- SQLiteLog,
195
- ]
@@ -1,22 +0,0 @@
1
- ddeutil/workflow/__about__.py,sha256=J3F-a05BmGpnyxPdhYxOJtDuTmXUjC0rvelcqfmfPRQ,28
2
- ddeutil/workflow/__init__.py,sha256=-DIy8SGFsD7_wqp-V-K8v8jTxacmqrcyj_SFx1WS6qg,687
3
- ddeutil/workflow/__types.py,sha256=yizLXzjQpBt_WPaof2pIyncitJvYeksw4Q1zYJeuCLA,3707
4
- ddeutil/workflow/api.py,sha256=7LQeR9w2Mq_vFTjqEK3dPiI1de-ENC_eyozr_UG8JMA,4717
5
- ddeutil/workflow/cli.py,sha256=GgVWPrSrG8ZTUGMTmHHngBi5LK0w5sAIMepciE5G_kc,3294
6
- ddeutil/workflow/conf.py,sha256=Pzw2LGaKtOUgEvczp9_PRhzO2HBigQx5XUEdrlNQFm4,10244
7
- ddeutil/workflow/cron.py,sha256=naWefHc3EnVo41Yf1zQeXOzF27YlTlnfj0XnQ6_HO-U,25514
8
- ddeutil/workflow/exceptions.py,sha256=Uf1-Tn8rAzj0aiVHSqo4fBqO80W0za7UFZgKv24E-tg,706
9
- ddeutil/workflow/job.py,sha256=8198kktH8WqMR2H6VWx2Pq6TFc2gmBdH3qZvVERRJx0,22668
10
- ddeutil/workflow/log.py,sha256=uxnxzeYTG7bu2ShchYehRnyr9YAdQyiMsUEhm8RbDDQ,6285
11
- ddeutil/workflow/on.py,sha256=2Kt0GIgrp_na1lA-TdJz1Wwo_mdcgxhTZdQ5BU2rj0E,7236
12
- ddeutil/workflow/repeat.py,sha256=mAj2BUDdcCe828gG-1NNxaz57mHebra5aC2VbjEzVJE,4939
13
- ddeutil/workflow/route.py,sha256=HveQ2c5MDVYW5YdMsF6g30DX2liR6vNt3_20NirMGQ8,6769
14
- ddeutil/workflow/scheduler.py,sha256=u1r5JhI7HSZzJ9__yruQen3mmo6Pd3slvEMtFfRFeOc,46995
15
- ddeutil/workflow/stage.py,sha256=I5bGASI-UUCcS-figIHdrV_LycDu_2mwjt4eZBR3Z_E,24268
16
- ddeutil/workflow/utils.py,sha256=PgPvcLoRYFobUUS56rSUFuN-zTyejgJ-gl36nNqQuWM,25117
17
- ddeutil_workflow-0.0.16.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
18
- ddeutil_workflow-0.0.16.dist-info/METADATA,sha256=_1jCgcqLcSKB2EHZcotmCtZec2KPKAYCidy28ZqQTe8,13190
19
- ddeutil_workflow-0.0.16.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
20
- ddeutil_workflow-0.0.16.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
21
- ddeutil_workflow-0.0.16.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
22
- ddeutil_workflow-0.0.16.dist-info/RECORD,,