ddeutil-workflow 0.0.42__py3-none-any.whl → 0.0.44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,10 +35,10 @@ from .__types import DictData, TupleStr
35
35
  from .conf import Loader, SimLoad, dynamic, get_logger
36
36
  from .cron import On
37
37
  from .exceptions import JobException, WorkflowException
38
- from .job import Job, TriggerState
38
+ from .job import Job
39
39
  from .logs import Audit, get_audit
40
40
  from .params import Param
41
- from .result import Result, Status
41
+ from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
42
42
  from .reusables import has_template, param2template
43
43
  from .utils import (
44
44
  gen_id,
@@ -673,7 +673,7 @@ class Workflow(BaseModel):
673
673
  )
674
674
 
675
675
  return result.catch(
676
- status=Status.SUCCESS,
676
+ status=SUCCESS,
677
677
  context={
678
678
  "params": params,
679
679
  "release": {
@@ -804,7 +804,7 @@ class Workflow(BaseModel):
804
804
  result.trace.info(
805
805
  f"[POKING]: {self.name!r} does not have any schedule to run."
806
806
  )
807
- return result.catch(status=Status.SUCCESS, context={"outputs": []})
807
+ return result.catch(status=SUCCESS, context={"outputs": []})
808
808
 
809
809
  # NOTE: Create the current date that change microsecond to 0
810
810
  current_date: datetime = datetime.now(
@@ -850,7 +850,7 @@ class Workflow(BaseModel):
850
850
  result.trace.info(
851
851
  f"[POKING]: {self.name!r} does not have any queue."
852
852
  )
853
- return result.catch(status=Status.SUCCESS, context={"outputs": []})
853
+ return result.catch(status=SUCCESS, context={"outputs": []})
854
854
 
855
855
  # NOTE: Start create the thread pool executor for running this poke
856
856
  # process.
@@ -912,7 +912,7 @@ class Workflow(BaseModel):
912
912
  context.append(future.result())
913
913
 
914
914
  return result.catch(
915
- status=Status.SUCCESS,
915
+ status=SUCCESS,
916
916
  context={"outputs": context},
917
917
  )
918
918
 
@@ -981,17 +981,17 @@ class Workflow(BaseModel):
981
981
  ).context,
982
982
  to=params,
983
983
  )
984
- except JobException as err:
985
- result.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
984
+ except JobException as e:
985
+ result.trace.error(f"[WORKFLOW]: {e.__class__.__name__}: {e}")
986
986
  if raise_error:
987
987
  raise WorkflowException(
988
- f"Get job execution error {job_id}: JobException: {err}"
988
+ f"Get job execution error {job_id}: JobException: {e}"
989
989
  ) from None
990
990
  raise NotImplementedError(
991
991
  "Handle error from the job execution does not support yet."
992
992
  ) from None
993
993
 
994
- return result.catch(status=Status.SUCCESS, context=params)
994
+ return result.catch(status=SUCCESS, context=params)
995
995
 
996
996
  def execute(
997
997
  self,
@@ -1042,16 +1042,12 @@ class Workflow(BaseModel):
1042
1042
  )
1043
1043
 
1044
1044
  result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
1045
-
1046
- # NOTE: It should not do anything if it does not have job.
1047
1045
  if not self.jobs:
1048
1046
  result.trace.warning(
1049
1047
  f"[WORKFLOW]: {self.name!r} does not have any jobs"
1050
1048
  )
1051
- return result.catch(status=Status.SUCCESS, context=params)
1049
+ return result.catch(status=SUCCESS, context=params)
1052
1050
 
1053
- # NOTE: Create a job queue that keep the job that want to run after
1054
- # its dependency condition.
1055
1051
  jq: Queue = Queue()
1056
1052
  for job_id in self.jobs:
1057
1053
  jq.put(job_id)
@@ -1065,7 +1061,7 @@ class Workflow(BaseModel):
1065
1061
  # }
1066
1062
  #
1067
1063
  context: DictData = self.parameterize(params)
1068
- status: Status = Status.SUCCESS
1064
+ status: Status = SUCCESS
1069
1065
  try:
1070
1066
  if (
1071
1067
  dynamic(
@@ -1090,9 +1086,9 @@ class Workflow(BaseModel):
1090
1086
  timeout=timeout,
1091
1087
  event=event,
1092
1088
  )
1093
- except WorkflowException as err:
1094
- status = Status.FAILED
1095
- context.update({"errors": err.to_dict()})
1089
+ except WorkflowException as e:
1090
+ status: Status = FAILED
1091
+ context.update({"errors": e.to_dict()})
1096
1092
 
1097
1093
  return result.catch(status=status, context=context)
1098
1094
 
@@ -1142,31 +1138,22 @@ class Workflow(BaseModel):
1142
1138
  job_id: str = job_queue.get()
1143
1139
  job: Job = self.jobs[job_id]
1144
1140
 
1145
- if (check := job.check_needs(context["jobs"])).is_waiting():
1141
+ if (check := job.check_needs(context["jobs"])) == WAIT:
1146
1142
  job_queue.task_done()
1147
1143
  job_queue.put(job_id)
1148
1144
  time.sleep(0.15)
1149
1145
  continue
1150
- elif check == TriggerState.failed: # pragma: no cov
1146
+ elif check == FAILED: # pragma: no cov
1151
1147
  raise WorkflowException(
1152
- "Check job trigger rule was failed."
1148
+ f"Validate job trigger rule was failed with "
1149
+ f"{job.trigger_rule.value!r}."
1153
1150
  )
1154
- elif check == TriggerState.skipped: # pragma: no cov
1151
+ elif check == SKIP: # pragma: no cov
1155
1152
  result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1156
1153
  job.set_outputs({"SKIP": {"skipped": True}}, to=context)
1157
1154
  job_queue.task_done()
1158
1155
  continue
1159
1156
 
1160
- # NOTE: Start workflow job execution with deep copy context data
1161
- # before release.
1162
- #
1163
- # Context:
1164
- # ---
1165
- # {
1166
- # 'params': <input-params>,
1167
- # 'jobs': { <job's-id>: ... },
1168
- # }
1169
- #
1170
1157
  futures.append(
1171
1158
  executor.submit(
1172
1159
  self.execute_job,
@@ -1177,19 +1164,14 @@ class Workflow(BaseModel):
1177
1164
  ),
1178
1165
  )
1179
1166
 
1180
- # NOTE: Mark this job queue done.
1181
1167
  job_queue.task_done()
1182
1168
 
1183
1169
  if not_timeout_flag:
1184
-
1185
- # NOTE: Wait for all items to finish processing by `task_done()`
1186
- # method.
1187
1170
  job_queue.join()
1188
-
1189
1171
  for future in as_completed(futures, timeout=thread_timeout):
1190
- if err := future.exception():
1191
- result.trace.error(f"[WORKFLOW]: {err}")
1192
- raise WorkflowException(str(err))
1172
+ if e := future.exception():
1173
+ result.trace.error(f"[WORKFLOW]: {e}")
1174
+ raise WorkflowException(str(e))
1193
1175
 
1194
1176
  future.result()
1195
1177
 
@@ -1235,78 +1217,74 @@ class Workflow(BaseModel):
1235
1217
  "max_job_exec_timeout", f=timeout, extras=self.extras
1236
1218
  )
1237
1219
  event: Event = event or Event()
1238
- future: Future | None = None
1239
1220
  result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with non-threading.")
1240
-
1241
- executor = ThreadPoolExecutor(
1221
+ with ThreadPoolExecutor(
1242
1222
  max_workers=1,
1243
1223
  thread_name_prefix="wf_exec_non_threading_",
1244
- )
1224
+ ) as executor:
1225
+ future: Future | None = None
1245
1226
 
1246
- while not job_queue.empty() and (
1247
- not_timeout_flag := ((time.monotonic() - ts) < timeout)
1248
- ):
1249
- job_id: str = job_queue.get()
1250
- job: Job = self.jobs[job_id]
1227
+ while not job_queue.empty() and (
1228
+ not_timeout_flag := ((time.monotonic() - ts) < timeout)
1229
+ ):
1230
+ job_id: str = job_queue.get()
1231
+ job: Job = self.jobs[job_id]
1251
1232
 
1252
- if (check := job.check_needs(context["jobs"])).is_waiting():
1253
- job_queue.task_done()
1254
- job_queue.put(job_id)
1255
- time.sleep(0.075)
1256
- continue
1257
- elif check == TriggerState.failed: # pragma: no cov
1258
- raise WorkflowException("Check job trigger rule was failed.")
1259
- elif check == TriggerState.skipped: # pragma: no cov
1260
- result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1261
- job.set_outputs({"SKIP": {"skipped": True}}, to=context)
1262
- job_queue.task_done()
1263
- continue
1233
+ if (check := job.check_needs(context["jobs"])) == WAIT:
1234
+ job_queue.task_done()
1235
+ job_queue.put(job_id)
1236
+ time.sleep(0.075)
1237
+ continue
1238
+ elif check == FAILED:
1239
+ raise WorkflowException(
1240
+ f"Validate job trigger rule was failed with "
1241
+ f"{job.trigger_rule.value!r}."
1242
+ )
1243
+ elif check == SKIP: # pragma: no cov
1244
+ result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1245
+ job.set_outputs({"SKIP": {"skipped": True}}, to=context)
1246
+ job_queue.task_done()
1247
+ continue
1264
1248
 
1265
- # NOTE: Start workflow job execution with deep copy context data
1266
- # before release. This job execution process will run until
1267
- # done before checking all execution timeout or not.
1268
- #
1269
- # {
1270
- # 'params': <input-params>,
1271
- # 'jobs': {},
1272
- # }
1273
- if future is None:
1274
- future: Future = executor.submit(
1275
- self.execute_job,
1276
- job_id=job_id,
1277
- params=context,
1278
- result=result,
1279
- event=event,
1280
- )
1281
- result.trace.debug(f"[WORKFLOW]: Make future: {future}")
1282
- time.sleep(0.025)
1283
- elif future.done():
1284
- if err := future.exception():
1285
- result.trace.error(f"[WORKFLOW]: {err}")
1286
- raise WorkflowException(str(err))
1287
-
1288
- future = None
1289
- job_queue.put(job_id)
1290
- elif future.running():
1291
- time.sleep(0.075)
1292
- job_queue.put(job_id)
1293
- else: # pragma: no cov
1294
- job_queue.put(job_id)
1295
- result.trace.debug(
1296
- f"Execution non-threading does not handle case: {future} "
1297
- f"that not running."
1298
- )
1249
+ if future is None:
1250
+ future: Future = executor.submit(
1251
+ self.execute_job,
1252
+ job_id=job_id,
1253
+ params=context,
1254
+ result=result,
1255
+ event=event,
1256
+ )
1257
+ time.sleep(0.025)
1258
+ elif future.done():
1259
+ if e := future.exception():
1260
+ result.trace.error(f"[WORKFLOW]: {e}")
1261
+ raise WorkflowException(str(e))
1299
1262
 
1300
- job_queue.task_done()
1263
+ future = None
1264
+ job_queue.put(job_id)
1265
+ elif future.running():
1266
+ time.sleep(0.075)
1267
+ job_queue.put(job_id)
1268
+ else: # pragma: no cov
1269
+ job_queue.put(job_id)
1270
+ result.trace.debug(
1271
+ f"Execution non-threading does not handle case: {future} "
1272
+ f"that not running."
1273
+ )
1274
+
1275
+ job_queue.task_done()
1301
1276
 
1302
- if not_timeout_flag:
1303
- job_queue.join()
1304
- executor.shutdown()
1305
- return context
1277
+ if not_timeout_flag:
1278
+ job_queue.join()
1279
+ return context
1280
+
1281
+ result.trace.error(
1282
+ f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1283
+ )
1284
+ event.set()
1285
+ if future:
1286
+ future.cancel()
1306
1287
 
1307
- result.trace.error(f"[WORKFLOW]: Execution: {self.name!r} was timeout.")
1308
- event.set()
1309
- executor.shutdown()
1310
1288
  raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1311
1289
 
1312
1290
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.42
3
+ Version: 0.0.44
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -22,7 +22,7 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Requires-Python: >=3.9.13
23
23
  Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
- Requires-Dist: ddeutil>=0.4.6
25
+ Requires-Dist: ddeutil[checksum]>=0.4.6
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.10
27
27
  Requires-Dist: pydantic==2.11.1
28
28
  Requires-Dist: python-dotenv==1.1.0
@@ -212,7 +212,7 @@ execution time such as `run-date` should change base on that workflow running da
212
212
  ```python
213
213
  from ddeutil.workflow import Workflow, Result
214
214
 
215
- workflow: Workflow = Workflow.from_loader('run-py-local')
215
+ workflow: Workflow = Workflow.from_conf('run-py-local')
216
216
  result: Result = workflow.execute(
217
217
  params={"source-extract": "USD-THB", "asat-dt": "2024-01-01"}
218
218
  )
@@ -246,7 +246,7 @@ from ddeutil.workflow import Schedule
246
246
 
247
247
  (
248
248
  Schedule
249
- .from_loader("schedule-run-local-wf")
249
+ .from_conf("schedule-run-local-wf")
250
250
  .pending(stop=None)
251
251
  )
252
252
  ```
@@ -261,31 +261,31 @@ it will use default value and do not raise any error to you.
261
261
  > The config value that you will set on the environment should combine with
262
262
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
263
263
 
264
- | Name | Component | Default | Description |
265
- |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
266
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
267
- | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
268
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
269
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
270
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
271
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
272
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
273
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
275
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
276
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
277
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
278
- | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
279
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
280
- | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
281
- | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
282
- | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
283
- | **TRACE_ENABLE_WRITE** | Log | `false` | |
284
- | **AUDIT_PATH** | Log | `./audits` | |
285
- | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
286
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
287
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
288
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
264
+ | Name | Component | Default | Override | Description |
265
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
266
+ | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
267
+ | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
268
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
269
+ | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
270
+ | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
271
+ | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
272
+ | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
273
+ | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
+ | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
275
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
276
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
277
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
278
+ | **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
279
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
280
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
281
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
282
+ | **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
283
+ | **TRACE_ENABLE_WRITE** | Log | `false` | No | |
284
+ | **AUDIT_PATH** | Log | `./audits` | No | |
285
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
286
+ | **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
287
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
288
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
289
289
 
290
290
  **API Application**:
291
291
 
@@ -1,19 +1,19 @@
1
- ddeutil/workflow/__about__.py,sha256=B3KiwXw9ATZmMG1vER6qdPImMLkQPPjYkRvYepuIhF4,28
1
+ ddeutil/workflow/__about__.py,sha256=babneIKueqHV5Z7O62u5nVOWXWHGZSyS5egKApTAA1I,28
2
2
  ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
- ddeutil/workflow/__init__.py,sha256=cYWwG2utpsYvdwqvkFSRWi_Q6gylDgNQBcIWcF5NFs4,1861
3
+ ddeutil/workflow/__init__.py,sha256=m7ZTCuUOarcTKJuXOyuaXd5WTIO7NTkqCeCrNX3d5i8,1943
4
4
  ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
5
5
  ddeutil/workflow/conf.py,sha256=lDzWiVSNlNAhTzxbNIhIbQAIF1ggbmetAp0yn2fgnsc,12385
6
6
  ddeutil/workflow/cron.py,sha256=80SijzMdDOBxTWRsiF-Fmuz7Ym7leY0XT2lzRAPGdXc,8781
7
- ddeutil/workflow/exceptions.py,sha256=fO37f9p7lOjIJgVOpKE_1X44yJTwBepyukZV9a7NNm4,1241
8
- ddeutil/workflow/job.py,sha256=vsayKMzwKDpjchgYQnbshZHnp-vuM9CobpFWhUJETRU,30315
7
+ ddeutil/workflow/exceptions.py,sha256=uLNxzav3HRcr4vaZnvbUIF_eTR6UXXZNaxroMWFOUL4,1418
8
+ ddeutil/workflow/job.py,sha256=NgEPgMAUL2mqDctLpdoEVx4g0ZsTj4RmkQluCh_ZUdM,30614
9
9
  ddeutil/workflow/logs.py,sha256=RkM5o_JPoWhFY7NrbYAARZQWjLC62YB_FYzTTcyDp8U,19816
10
- ddeutil/workflow/params.py,sha256=Mv-D2DY5inm1ug0lsgCPDkO5wT_AUhc5XEF5jxgDx6U,8036
11
- ddeutil/workflow/result.py,sha256=ynZB0g_vEEXn24034J-hatjNWDBmRAj38S8SqGRM-8I,4029
10
+ ddeutil/workflow/params.py,sha256=xCtFEh0-G-G-f8y_SXxyf31bU6Ox5p5Z-WbBFXrjy8M,9960
11
+ ddeutil/workflow/result.py,sha256=iwkUzOubxhLCuO-ngWEWL6t-CpYBpINIIO_ubg4kz14,4701
12
12
  ddeutil/workflow/reusables.py,sha256=AtZO83HDFu1uK_azUinv5d8jsA36f2i3n_tqMrolbvc,17529
13
- ddeutil/workflow/scheduler.py,sha256=wFEgcnxtgF-8y5otv8RqT1MuBttZl7mu-bBu5ffwV_Y,27534
14
- ddeutil/workflow/stages.py,sha256=prw1-za1zwYehbrjeAnoJ79GxpfTqdKLsI2PY0OuSlY,48417
13
+ ddeutil/workflow/scheduler.py,sha256=_MDsEHbBVOeF-381U8DfIMDyca_nG3XNXmgX4229_EU,27437
14
+ ddeutil/workflow/stages.py,sha256=cvSNt4IjbE4O9llCIL2bojnl4W50GHNa1ANQ4oGefUo,47438
15
15
  ddeutil/workflow/utils.py,sha256=sblje9qOtejCHVt8EVrbC0KY98vKqvxccaR5HIkRiTA,7363
16
- ddeutil/workflow/workflow.py,sha256=Y1D5arh2KSobkIZGJ1fWSTe15heURi9OhhdfIr0jHyo,50591
16
+ ddeutil/workflow/workflow.py,sha256=NxuSQqoNvDTKiOVLambTBD847BJW-748dm7YgfjGPoA,49731
17
17
  ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
18
18
  ddeutil/workflow/api/api.py,sha256=b-bMg0aRsEqt8Qb2hNUtamEt2Fq2CgNotF2oXSAdDu8,5226
19
19
  ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
@@ -23,8 +23,8 @@ ddeutil/workflow/api/routes/job.py,sha256=YVta083i8vU8-o4WdKFwDpfdC9vN1dZ6goZSmN
23
23
  ddeutil/workflow/api/routes/logs.py,sha256=TeRDrEelbKS2Hu_EovgLh0bOdmSv9mfnrIZsrE7uPD4,5353
24
24
  ddeutil/workflow/api/routes/schedules.py,sha256=rUWBm5RgLS1PNBHSWwWXJ0l-c5mYWfl9os0BA9_OTEw,4810
25
25
  ddeutil/workflow/api/routes/workflows.py,sha256=ctgQGxXfpIV6bHFDM9IQ1_qaQHT6n5-HjJ1-D4GKWpc,4527
26
- ddeutil_workflow-0.0.42.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
- ddeutil_workflow-0.0.42.dist-info/METADATA,sha256=TJp1M40eLXYOInkTpl_XhFOWGHLd0hIHktXQXiFsmEw,18853
28
- ddeutil_workflow-0.0.42.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
29
- ddeutil_workflow-0.0.42.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
- ddeutil_workflow-0.0.42.dist-info/RECORD,,
26
+ ddeutil_workflow-0.0.44.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
+ ddeutil_workflow-0.0.44.dist-info/METADATA,sha256=PwXbwLjDtntF2uyvDbbARjc5lvgR1yYYkDvGWNVqb4A,19134
28
+ ddeutil_workflow-0.0.44.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
29
+ ddeutil_workflow-0.0.44.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
+ ddeutil_workflow-0.0.44.dist-info/RECORD,,