ddeutil-workflow 0.0.27__py3-none-any.whl → 0.0.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -138,7 +138,7 @@ class WorkflowQueue:
138
138
  """Construct WorkflowQueue object from an input queue value that passing
139
139
  with list of datetime or list of WorkflowRelease.
140
140
 
141
- :raise TypeError: If the type of an input queue does not valid.
141
+ :raise TypeError: If the type of input queue does not valid.
142
142
 
143
143
  :rtype: Self
144
144
  """
@@ -226,9 +226,9 @@ class WorkflowQueue:
226
226
  class Workflow(BaseModel):
227
227
  """Workflow Pydantic model.
228
228
 
229
- This is the main future of this project because it use to be workflow
229
+ This is the main future of this project because it uses to be workflow
230
230
  data for running everywhere that you want or using it to scheduler task in
231
- background. It use lightweight coding line from Pydantic Model and enhance
231
+ background. It uses lightweight coding line from Pydantic Model and enhance
232
232
  execute method on it.
233
233
  """
234
234
 
@@ -317,7 +317,7 @@ class Workflow(BaseModel):
317
317
  @model_validator(mode="before")
318
318
  def __prepare_model_before__(cls, values: DictData) -> DictData:
319
319
  """Prepare the params key in the data model before validating."""
320
- # NOTE: Prepare params type if it passing with only type value.
320
+ # NOTE: Prepare params type if it is passing with only type value.
321
321
  if params := values.pop("params", {}):
322
322
  values["params"] = {
323
323
  p: (
@@ -341,7 +341,7 @@ class Workflow(BaseModel):
341
341
  @field_validator("on", mode="after")
342
342
  def __on_no_dup_and_reach_limit__(cls, value: list[On]) -> list[On]:
343
343
  """Validate the on fields should not contain duplicate values and if it
344
- contain the every minute value more than one value, it will remove to
344
+ contains the every minute value more than one value, it will remove to
345
345
  only one value.
346
346
 
347
347
  :raise ValueError: If it has some duplicate value.
@@ -359,8 +359,8 @@ class Workflow(BaseModel):
359
359
  # WARNING:
360
360
  # if '* * * * *' in set_ons and len(set_ons) > 1:
361
361
  # raise ValueError(
362
- # "If it has every minute cronjob on value, it should has only "
363
- # "one value in the on field."
362
+ # "If it has every minute cronjob on value, it should have "
363
+ # "only one value in the on field."
364
364
  # )
365
365
 
366
366
  if len(set_ons) > config.max_on_per_workflow:
@@ -372,7 +372,7 @@ class Workflow(BaseModel):
372
372
 
373
373
  @model_validator(mode="after")
374
374
  def __validate_jobs_need__(self) -> Self:
375
- """Validate each need job in any jobs should exists.
375
+ """Validate each need job in any jobs should exist.
376
376
 
377
377
  :raise WorkflowException: If it has not exists need value in this
378
378
  workflow job.
@@ -591,10 +591,10 @@ class Workflow(BaseModel):
591
591
  """Generate queue of datetime from the cron runner that initialize from
592
592
  the on field. with offset value.
593
593
 
594
- :param offset: A offset in second unit for time travel.
594
+ :param offset: An offset in second unit for time travel.
595
595
  :param end_date: An end datetime object.
596
596
  :param queue: A workflow queue object.
597
- :param log: A log class that want to making log object.
597
+ :param log: A log class that want to make log object.
598
598
  :param force_run: A flag that allow to release workflow if the log with
599
599
  that release was pointed.
600
600
 
@@ -696,7 +696,7 @@ class Workflow(BaseModel):
696
696
  start_date: datetime = current_date
697
697
  offset: float = 0
698
698
 
699
- # NOTE: End date is use to stop generate queue with an input periods
699
+ # NOTE: End date is using to stop generate queue with an input periods
700
700
  # value.
701
701
  end_date: datetime = start_date + timedelta(minutes=periods)
702
702
 
@@ -812,7 +812,7 @@ class Workflow(BaseModel):
812
812
  :param params: A params that was parameterized from workflow execution.
813
813
  :param run_id: A workflow running ID for this job execution.
814
814
  :param raise_error: A flag that raise error instead catching to result
815
- if it get exception from job execution.
815
+ if it gets exception from job execution.
816
816
 
817
817
  :rtype: Result
818
818
  :return: Return the result object that receive the job execution result
@@ -868,8 +868,8 @@ class Workflow(BaseModel):
868
868
  """Execute workflow with passing a dynamic parameters to all jobs that
869
869
  included in this workflow model with ``jobs`` field.
870
870
 
871
- The result of execution process for each jobs and stages on this
872
- workflow will keeping in dict which able to catch out with all jobs and
871
+ The result of execution process for each job and stages on this
872
+ workflow will keep in dict which able to catch out with all jobs and
873
873
  stages by dot annotation.
874
874
 
875
875
  For example, when I want to use the output from previous stage, I
@@ -884,7 +884,9 @@ class Workflow(BaseModel):
884
884
  :param run_id: A workflow running ID for this job execution.
885
885
  :type run_id: str | None (default: None)
886
886
  :param timeout: A workflow execution time out in second unit that use
887
- for limit time of execution and waiting job dependency.
887
+ for limit time of execution and waiting job dependency. This value
888
+ does not force stop the task that still running more than this limit
889
+ time.
888
890
  :type timeout: int (default: 0)
889
891
 
890
892
  :rtype: Result
@@ -907,8 +909,8 @@ class Workflow(BaseModel):
907
909
  )
908
910
  return rs.catch(status=0, context=params)
909
911
 
910
- # NOTE: Create a job queue that keep the job that want to running after
911
- # it dependency condition.
912
+ # NOTE: Create a job queue that keep the job that want to run after
913
+ # its dependency condition.
912
914
  jq: Queue = Queue()
913
915
  for job_id in self.jobs:
914
916
  jq.put(job_id)
@@ -967,7 +969,7 @@ class Workflow(BaseModel):
967
969
 
968
970
  :param context: A context workflow data that want to downstream passing.
969
971
  :param ts: A start timestamp that use for checking execute time should
970
- timeout.
972
+ time out.
971
973
  :param job_queue: A job queue object.
972
974
  :param timeout: A second value unit that bounding running time.
973
975
  :param thread_timeout: A timeout to waiting all futures complete.
@@ -1064,7 +1066,7 @@ class Workflow(BaseModel):
1064
1066
 
1065
1067
  :param context: A context workflow data that want to downstream passing.
1066
1068
  :param ts: A start timestamp that use for checking execute time should
1067
- timeout.
1069
+ time out.
1068
1070
  :param timeout: A second value unit that bounding running time.
1069
1071
 
1070
1072
  :rtype: DictData
@@ -1090,7 +1092,7 @@ class Workflow(BaseModel):
1090
1092
  continue
1091
1093
 
1092
1094
  # NOTE: Start workflow job execution with deep copy context data
1093
- # before release. This job execution process will running until
1095
+ # before release. This job execution process will run until
1094
1096
  # done before checking all execution timeout or not.
1095
1097
  #
1096
1098
  # {
@@ -1182,7 +1184,7 @@ class WorkflowTask:
1182
1184
 
1183
1185
  :param end_date: An end datetime object.
1184
1186
  :param queue: A workflow queue object.
1185
- :param log: A log class that want to making log object.
1187
+ :param log: A log class that want to make log object.
1186
1188
  :param force_run: A flag that allow to release workflow if the log with
1187
1189
  that release was pointed.
1188
1190
 
@@ -1220,6 +1222,7 @@ class WorkflowTask:
1220
1222
  return queue
1221
1223
 
1222
1224
  def __repr__(self) -> str:
1225
+ """Override ___repr__ method."""
1223
1226
  return (
1224
1227
  f"{self.__class__.__name__}(alias={self.alias!r}, "
1225
1228
  f"workflow={self.workflow.name!r}, runner={self.runner!r}, "
@@ -0,0 +1,292 @@
1
+ Metadata-Version: 2.2
2
+ Name: ddeutil-workflow
3
+ Version: 0.0.29
4
+ Summary: Lightweight workflow orchestration
5
+ Author-email: ddeutils <korawich.anu@gmail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/ddeutils/ddeutil-workflow/
8
+ Project-URL: Source Code, https://github.com/ddeutils/ddeutil-workflow/
9
+ Keywords: orchestration,workflow
10
+ Classifier: Topic :: Utilities
11
+ Classifier: Natural Language :: English
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Operating System :: OS Independent
15
+ Classifier: Programming Language :: Python
16
+ Classifier: Programming Language :: Python :: 3 :: Only
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
+ Requires-Python: >=3.9.13
23
+ Description-Content-Type: text/markdown
24
+ License-File: LICENSE
25
+ Requires-Dist: ddeutil>=0.4.6
26
+ Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
27
+ Requires-Dist: pydantic==2.10.6
28
+ Requires-Dist: python-dotenv==1.0.1
29
+ Requires-Dist: schedule<2.0.0,==1.2.2
30
+ Provides-Extra: api
31
+ Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
32
+
33
+ # Workflow Orchestration
34
+
35
+ [![test](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml/badge.svg?branch=main)](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml)
36
+ [![codecov](https://codecov.io/gh/ddeutils/ddeutil-workflow/graph/badge.svg?token=3NDPN2I0H9)](https://codecov.io/gh/ddeutils/ddeutil-workflow)
37
+ [![pypi version](https://img.shields.io/pypi/v/ddeutil-workflow)](https://pypi.org/project/ddeutil-workflow/)
38
+ [![python support version](https://img.shields.io/pypi/pyversions/ddeutil-workflow)](https://pypi.org/project/ddeutil-workflow/)
39
+ [![size](https://img.shields.io/github/languages/code-size/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow)
40
+ [![gh license](https://img.shields.io/github/license/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow/blob/main/LICENSE)
41
+ [![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
42
+
43
+ The **Lightweight Workflow Orchestration** with fewer dependencies the was created
44
+ for easy to make a simple metadata driven data workflow. It can use for data operator
45
+ by a `.yaml` template.
46
+
47
+ > [!WARNING]
48
+ > This package provide only orchestration workload. That mean you should not
49
+ > use the workflow stage to process any large volume data which use a lot of compute
50
+ > resource :cold_sweat:.
51
+
52
+ In my opinion, I think it should not create duplicate workflow codes if I can
53
+ write with dynamic input parameters on the one template workflow that just change
54
+ the input parameters per use-case instead.
55
+ This way I can handle a lot of logical workflows in our orgs with only metadata
56
+ configuration. It called **Metadata Driven Data Workflow**.
57
+
58
+ ---
59
+
60
+ **:pushpin: <u>Rules of This Workflow engine</u>**:
61
+
62
+ 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
63
+ 2. Can not re-run only failed stage and its pending downstream :rotating_light:
64
+ 3. All parallel tasks inside workflow engine use Multi-Threading
65
+ (Python 3.13 unlock GIL :unlock:)
66
+
67
+ ---
68
+
69
+ **:memo: <u>Workflow Diagrams</u>**:
70
+
71
+ This diagram show where is this application run on the production infrastructure.
72
+ You will see that this application do only running code with stress-less which mean
73
+ you should to set the data layer separate this core program before run this application.
74
+
75
+ ```mermaid
76
+ flowchart LR
77
+ subgraph Interface
78
+ A((User))
79
+ subgraph Docker Container
80
+ G@{ shape: rounded, label: "Observe<br>Application" }
81
+ end
82
+ end
83
+
84
+ A --->|action| B(Workflow<br>Application)
85
+ B ---> |response| A
86
+ B -..-> |response| G
87
+ G -..-> |request| B
88
+
89
+ subgraph Docker Container
90
+ B
91
+ end
92
+
93
+ subgraph Data Context
94
+ D@{ shape: processes, label: "Logs" }
95
+ E@{ shape: lin-cyl, label: "Metadata" }
96
+ end
97
+
98
+ subgraph Git Context
99
+ F@{ shape: tag-rect, label: "YAML<br>files" }
100
+ end
101
+
102
+ B --->|disable| F
103
+ F --->|read| B
104
+
105
+ B --->|write| E
106
+ E --->|read| B
107
+ B --->|write| D
108
+
109
+ D -.->|read| G
110
+ E -.->|read| G
111
+ ```
112
+
113
+ > [!NOTE]
114
+ > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
115
+ > with `.yml` files and all configs file from several data orchestration framework
116
+ > tools from my experience on Data Engineer. :grimacing:
117
+ >
118
+ > Other workflow tools that I interest on them and pick some interested feature
119
+ > implement to this package:
120
+ >
121
+ > - [Google **Workflows**](https://cloud.google.com/workflows)
122
+ > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
123
+
124
+ ## :round_pushpin: Installation
125
+
126
+ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
127
+ If you want to install this package with application add-ons, you should add
128
+ `app` in installation;
129
+
130
+ | Use-case | Install Optional | Support |
131
+ |----------------|--------------------------|--------------------|
132
+ | Python | `ddeutil-workflow` | :heavy_check_mark: |
133
+ | FastAPI Server | `ddeutil-workflow[api]` | :heavy_check_mark: |
134
+
135
+ ## :beers: Usage
136
+
137
+ This is examples that use workflow file for running common Data Engineering
138
+ use-case.
139
+
140
+ > [!IMPORTANT]
141
+ > I recommend you to use the `hook` stage for all actions that you want to do
142
+ > with workflow activity that you want to orchestrate. Because it is able to
143
+ > dynamic an input argument with the same hook function that make you use less
144
+ > time to maintenance your data workflows.
145
+
146
+ ```yaml
147
+ run-py-local:
148
+
149
+ # Validate model that use to parsing exists for template file
150
+ type: Workflow
151
+ on:
152
+ # If workflow deploy to schedule, it will run every 5 minutes
153
+ # with Asia/Bangkok timezone.
154
+ - cronjob: '*/5 * * * *'
155
+ timezone: "Asia/Bangkok"
156
+ params:
157
+ # Incoming execution parameters will validate with this type. It allows
158
+ # to set default value or templating.
159
+ source-extract: str
160
+ run-date: datetime
161
+ jobs:
162
+ getting-api-data:
163
+ stages:
164
+ - name: "Retrieve API Data"
165
+ id: retrieve-api
166
+ uses: tasks/get-api-with-oauth-to-s3@requests
167
+ with:
168
+ # Arguments of source data that want to retrieve.
169
+ method: post
170
+ url: https://finances/open-data/currency-pairs/
171
+ body:
172
+ resource: ${{ params.source-extract }}
173
+
174
+ # You can use filtering like Jinja template but this
175
+ # package does not use it.
176
+ filter: ${{ params.run-date | fmt(fmt='%Y%m%d') }}
177
+ auth:
178
+ type: bearer
179
+ keys: ${API_ACCESS_REFRESH_TOKEN}
180
+
181
+ # Arguments of target data that want to land.
182
+ writing_mode: flatten
183
+ aws_s3_path: my-data/open-data/${{ params.source-extract }}
184
+
185
+ # This Authentication code should implement with your custom hook
186
+ # function. The template allow you to use environment variable.
187
+ aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
188
+ aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
189
+ ```
190
+
191
+ The above workflow template is main executor pipeline that you want to do. If you
192
+ want to schedule this workflow, you want to dynamic its parameters change base on
193
+ execution time such as `run-date` should change base on that workflow running date.
194
+
195
+ So, this package provide the `Schedule` template for this action.
196
+
197
+ ```yaml
198
+ schedule-run-local-wf:
199
+
200
+ # Validate model that use to parsing exists for template file
201
+ type: Schedule
202
+ workflows:
203
+
204
+ # Map existing workflow that want to deploy with scheduler application.
205
+ # It allows you to pass release parameter that dynamic change depend on the
206
+ # current context of this scheduler application releasing that time.
207
+ - name: run-py-local
208
+ params:
209
+ source-extract: "USD-THB"
210
+ asat-dt: "${{ release.logical_date }}"
211
+ ```
212
+
213
+ ## :cookie: Configuration
214
+
215
+ The main configuration that use to dynamic changing with your objective of this
216
+ application. If any configuration values do not set yet, it will use default value
217
+ and do not raise any error to you.
218
+
219
+ > [!IMPORTANT]
220
+ > The config value that you will set on the environment should combine with
221
+ > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
222
+
223
+ | Name | Component | Default | Description |
224
+ |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
225
+ | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
226
+ | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
227
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
228
+ | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
229
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
230
+ | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
231
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
232
+ | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
233
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
234
+ | **MAX_NUM_POKING** | Core | `4` | . |
235
+ | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
236
+ | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
237
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
238
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
239
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
240
+ | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
241
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
242
+ | **ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
243
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
244
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
245
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
246
+
247
+ **API Application**:
248
+
249
+ | Environment | Component | Default | Description |
250
+ |:---------------------------|:-----------:|---------|------------------------------------------------------------------------------------|
251
+ | **ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. |
252
+ | **ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. |
253
+
254
+ ## :rocket: Deployment
255
+
256
+ This package able to run as an application service for receive manual trigger
257
+ from the master node via RestAPI or use to be Scheduler background service
258
+ like crontab job but via Python API.
259
+
260
+ ### API Server
261
+
262
+ ```shell
263
+ (venv) $ uvicorn src.ddeutil.workflow.api:app \
264
+ --host 127.0.0.1 \
265
+ --port 80 \
266
+ --no-access-log
267
+ ```
268
+
269
+ > [!NOTE]
270
+ > If this package already deploy, it is able to use multiprocess;
271
+ > `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
272
+
273
+ ### Docker Container
274
+
275
+ Create Docker image;
276
+
277
+ ```shell
278
+ $ docker build -t ddeutil-workflow:latest -f .container/Dockerfile .
279
+ ```
280
+
281
+ Run the above Docker image;
282
+
283
+ ```shell
284
+ $ docker run -i ddeutil-workflow:latest
285
+ ```
286
+
287
+ ## :speech_balloon: Contribute
288
+
289
+ I do not think this project will go around the world because it has specific propose,
290
+ and you can create by your coding without this project dependency for long term
291
+ solution. So, on this time, you can open [the GitHub issue on this project :raised_hands:](https://github.com/ddeutils/ddeutil-workflow/issues)
292
+ for fix bug or request new feature if you want it.
@@ -0,0 +1,25 @@
1
+ ddeutil/workflow/__about__.py,sha256=msVKiLUg4jRVo_KJlghj1cc0zwX_olhWZZkqcWYz16E,28
2
+ ddeutil/workflow/__cron.py,sha256=uA8XcbY_GwA9rJSHaHUaXaJyGDObJN0ZeYlJSinL8y8,26880
3
+ ddeutil/workflow/__init__.py,sha256=dghn2lFl3Own4Pyq7SFHu-FMymOgLontJ6aCfxea9h4,1606
4
+ ddeutil/workflow/__types.py,sha256=CK1jfzyHP9P-MB0ElhpJZ59ZFGJC9MkQuAop5739_9k,4304
5
+ ddeutil/workflow/conf.py,sha256=7lj_Im9jsa95fWUo19Q4-ZAcHa8Pu1HW-vaLgvrjNUM,17559
6
+ ddeutil/workflow/cron.py,sha256=OLgniUxmrn65gzckk-uTmE2Pk1enJJyjYUKVeBbDQz0,7522
7
+ ddeutil/workflow/exceptions.py,sha256=XUnpJSuxOyataClP0w_gpYjzn-NIwZK2BHro-J7Yw24,895
8
+ ddeutil/workflow/hook.py,sha256=vgiJVbgm4aVl-tt_HVhHn-65UXCojzGLapdOPkoX9QA,5406
9
+ ddeutil/workflow/job.py,sha256=XcewyALsLYYq94ycF6mkj3Ydr6if683z7t1oBqEVInE,24290
10
+ ddeutil/workflow/params.py,sha256=svCjmFgEhim8yFJVjZhFmKP8JqTDHQ5EPhwJHVuDGno,5289
11
+ ddeutil/workflow/result.py,sha256=k4pcj5KjbEcEPymsEUXeGY4gyLMfPkMTO6YDrAtfk7Q,3408
12
+ ddeutil/workflow/scheduler.py,sha256=OlrnBZvVttoymeY1g-on9icEMU729OWISJReeX3jAKI,20452
13
+ ddeutil/workflow/stage.py,sha256=wn8CARTvFJY4ZK1SwjzH8sKoMRz_eIeSGUMgnDWNi6g,24031
14
+ ddeutil/workflow/templates.py,sha256=bVU_8gnMQmdhhw3W28ZqwmpEaOx10Nx_aauqiLS0lqg,10807
15
+ ddeutil/workflow/utils.py,sha256=8LTqpvRPfrEYxsxhwszk6GKkyjrswxnwF3r_9vE8szw,6059
16
+ ddeutil/workflow/workflow.py,sha256=vuy0Q3ceslBth04qbslXrp5NAQQ7XfpOochwgORzQ4Q,42349
17
+ ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
18
+ ddeutil/workflow/api/api.py,sha256=hmH45GtpyZ-kbiqQNmnHgjwEiCiDDXLKTGvcNa5nFos,4041
19
+ ddeutil/workflow/api/repeat.py,sha256=zyvsrXKk-3-_N8ZRZSki0Mueshugum2jtqctEOp9QSc,4927
20
+ ddeutil/workflow/api/route.py,sha256=v96jNbgjM1cJ2MpVSRWs2kgRqF8DQElEBdRZrVFEpEw,8578
21
+ ddeutil_workflow-0.0.29.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
22
+ ddeutil_workflow-0.0.29.dist-info/METADATA,sha256=UXsjCGddPiksHRAByDfUcsYAsGIqAbL1qJ87uQKWCVQ,14801
23
+ ddeutil_workflow-0.0.29.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
24
+ ddeutil_workflow-0.0.29.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
25
+ ddeutil_workflow-0.0.29.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.7.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5