ddeutil-workflow 0.0.53__py3-none-any.whl → 0.0.54__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/utils.py CHANGED
@@ -24,6 +24,7 @@ from .__types import DictData, Matrix
24
24
 
25
25
  T = TypeVar("T")
26
26
  UTC: Final[ZoneInfo] = ZoneInfo("UTC")
27
+ NEWLINE: Final[str] = "\n\t| ...\t"
27
28
 
28
29
 
29
30
  def replace_sec(dt: datetime) -> datetime:
@@ -200,7 +201,7 @@ def filter_func(value: T) -> T:
200
201
  def cross_product(matrix: Matrix) -> Iterator[DictData]:
201
202
  """Iterator of products value from matrix.
202
203
 
203
- :param matrix:
204
+ :param matrix: (Matrix)
204
205
 
205
206
  :rtype: Iterator[DictData]
206
207
  """
@@ -223,7 +224,7 @@ def batch(iterable: Iterator[Any] | range, n: int) -> Iterator[Any]:
223
224
  ['G']
224
225
 
225
226
  :param iterable:
226
- :param n:
227
+ :param n: (int) A number of returning batch size.
227
228
 
228
229
  :rtype: Iterator[Any]
229
230
  """
@@ -247,8 +248,8 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
247
248
  >>> cut_id(run_id='20240101081330000000T1354680202')
248
249
  '202401010813680202'
249
250
 
250
- :param run_id: A running ID That want to cut
251
- :param num:
251
+ :param run_id: (str) A running ID That want to cut.
252
+ :param num: (int) A number of cutting length.
252
253
 
253
254
  :rtype: str
254
255
  """
@@ -4,11 +4,11 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  # [x] Use dynamic config
7
- """A Workflow module that is the core module of this package. It keeps Release
8
- and Workflow Pydantic models.
7
+ """Workflow module is the core module of this Workflow package. It keeps
8
+ Release, ReleaseQueue, and Workflow Pydantic models.
9
9
 
10
- I will implement timeout on the workflow execution layer only because the
11
- main propose of this package in Workflow model.
10
+ This package implement timeout strategy on the workflow execution layer only
11
+ because the main propose of this package is using Workflow to be orchestrator.
12
12
  """
13
13
  from __future__ import annotations
14
14
 
@@ -43,9 +43,10 @@ from .exceptions import JobException, UtilException, WorkflowException
43
43
  from .job import Job
44
44
  from .logs import Audit, get_audit
45
45
  from .params import Param
46
- from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
46
+ from .result import FAILED, SKIP, SUCCESS, WAIT, Result
47
47
  from .reusables import has_template, param2template
48
48
  from .utils import (
49
+ NEWLINE,
49
50
  gen_id,
50
51
  get_dt_now,
51
52
  reach_next_minute,
@@ -462,7 +463,7 @@ class Workflow(BaseModel):
462
463
  if isinstance(on, str):
463
464
  on: list[str] = [on]
464
465
  if any(not isinstance(i, (dict, str)) for i in on):
465
- raise TypeError("The ``on`` key should be list of str or dict")
466
+ raise TypeError("The `on` key should be list of str or dict")
466
467
 
467
468
  # NOTE: Pass on value to SimLoad and keep on model object to the on
468
469
  # field.
@@ -602,8 +603,8 @@ class Workflow(BaseModel):
602
603
  ... "jobs": {}
603
604
  ... }
604
605
 
605
- :param params: A parameter mapping that receive from workflow execution.
606
- :type params: DictData
606
+ :param params: (DictData) A parameter data that receive from workflow
607
+ execute method.
607
608
 
608
609
  :raise WorkflowException: If parameter value that want to validate does
609
610
  not include the necessary parameter that had required flag.
@@ -623,7 +624,7 @@ class Workflow(BaseModel):
623
624
  f"{', '.join(check_key)}."
624
625
  )
625
626
 
626
- # NOTE: Mapping type of param before adding it to the ``params`` key.
627
+ # NOTE: Mapping type of param before adding it to the `params` key.
627
628
  return {
628
629
  "params": (
629
630
  params
@@ -969,22 +970,19 @@ class Workflow(BaseModel):
969
970
  execution to the target job object via job's ID.
970
971
 
971
972
  This execution is the minimum level of execution of this workflow
972
- model. It different with ``self.execute`` because this method run only
973
+ model. It different with `self.execute` because this method run only
973
974
  one job and return with context of this job data.
974
975
 
975
976
  :raise WorkflowException: If execute with not exist job's ID.
976
977
  :raise WorkflowException: If the job execution raise JobException.
977
978
 
978
- :param job_id: A job ID that want to execute.
979
- :param params: A params that was parameterized from workflow execution.
980
- :param result: (Result) A result object for keeping context and status
981
- data.
982
- :param event: (Event) An event manager that pass to the
983
- PoolThreadExecutor.
979
+ :param job_id: A job ID.
980
+ :param params: (DictData) A parameter data.
981
+ :param result: (Result) A Result instance for return context and status.
982
+ :param event: (Event) An Event manager instance that use to cancel this
983
+ execution if it forces stopped by parent execution.
984
984
 
985
985
  :rtype: Result
986
- :return: Return the result object that receive the job execution result
987
- context.
988
986
  """
989
987
  if result is None: # pragma: no cov
990
988
  result: Result = Result(run_id=gen_id(self.name, unique=True))
@@ -1020,13 +1018,12 @@ class Workflow(BaseModel):
1020
1018
  except (JobException, UtilException) as e:
1021
1019
  result.trace.error(f"[WORKFLOW]: {e.__class__.__name__}: {e}")
1022
1020
  raise WorkflowException(
1023
- f"Get job execution error {job_id}: JobException: {e}"
1021
+ f"Job {job_id!r} raise {e.__class__.__name__}: {e}"
1024
1022
  ) from None
1025
1023
 
1026
1024
  if rs.status == FAILED:
1027
- error_msg: str = (
1028
- f"Workflow job, {job.id}, failed without raise error."
1029
- )
1025
+ error_msg: str = f"Workflow job, {job.id!r}, return FAILED status."
1026
+ result.trace.warning(f"[WORKFLOW]: {error_msg}")
1030
1027
  return result.catch(
1031
1028
  status=FAILED,
1032
1029
  context={
@@ -1042,37 +1039,42 @@ class Workflow(BaseModel):
1042
1039
  *,
1043
1040
  run_id: str | None = None,
1044
1041
  parent_run_id: str | None = None,
1045
- timeout: int = 600,
1046
1042
  result: Result | None = None,
1047
- max_job_parallel: int = 2,
1048
1043
  event: Event | None = None,
1044
+ timeout: int = 3600,
1045
+ max_job_parallel: int = 2,
1049
1046
  ) -> Result:
1050
1047
  """Execute workflow with passing a dynamic parameters to all jobs that
1051
- included in this workflow model with ``jobs`` field.
1048
+ included in this workflow model with `jobs` field.
1052
1049
 
1053
1050
  The result of execution process for each job and stages on this
1054
1051
  workflow will keep in dict which able to catch out with all jobs and
1055
1052
  stages by dot annotation.
1056
1053
 
1057
- For example, when I want to use the output from previous stage, I
1058
- can access it with syntax:
1054
+ For example with non-strategy job, when I want to use the output
1055
+ from previous stage, I can access it with syntax:
1056
+
1057
+ ... ${job-id}.stages.${stage-id}.outputs.${key}
1058
+ ... ${job-id}.stages.${stage-id}.errors.${key}
1059
1059
 
1060
- ... ${job-name}.stages.${stage-id}.outputs.${key}
1061
- ... ${job-name}.stages.${stage-id}.errors.${key}
1060
+ But example for strategy job:
1061
+
1062
+ ... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.outputs.${key}
1063
+ ... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.errors.${key}
1062
1064
 
1063
1065
  :param params: An input parameters that use on workflow execution that
1064
- will parameterize before using it. Default is None.
1065
- :param run_id: A workflow running ID for this job execution.
1066
- :param parent_run_id: A parent workflow running ID for this release.
1066
+ will parameterize before using it.
1067
+ :param run_id: (str | None) A workflow running ID.
1068
+ :param parent_run_id: (str | None) A parent workflow running ID.
1069
+ :param result: (Result) A Result instance for return context and status.
1070
+ :param event: (Event) An Event manager instance that use to cancel this
1071
+ execution if it forces stopped by parent execution.
1067
1072
  :param timeout: (int) A workflow execution time out in second unit that
1068
1073
  use for limit time of execution and waiting job dependency. This
1069
1074
  value does not force stop the task that still running more than this
1070
- limit time. (default: 0)
1071
- :param result: (Result) A result object for keeping context and status
1072
- data.
1073
- :param max_job_parallel: (int) The maximum threads of job execution.
1074
- :param event: (Event) An event manager that pass to the
1075
- PoolThreadExecutor.
1075
+ limit time. (Default: 60 * 60 seconds)
1076
+ :param max_job_parallel: (int) The maximum workers that use for job
1077
+ execution in `PoolThreadExecutor` object. (Default: 2 workers)
1076
1078
 
1077
1079
  :rtype: Result
1078
1080
  """
@@ -1084,95 +1086,28 @@ class Workflow(BaseModel):
1084
1086
  id_logic=self.name,
1085
1087
  extras=self.extras,
1086
1088
  )
1087
-
1089
+ context: DictData = self.parameterize(params)
1088
1090
  result.trace.info(f"[WORKFLOW]: Execute: {self.name!r} ...")
1089
1091
  if not self.jobs:
1090
- result.trace.warning(
1091
- f"[WORKFLOW]: {self.name!r} does not have any jobs"
1092
- )
1093
- return result.catch(status=SUCCESS, context=params)
1092
+ result.trace.warning(f"[WORKFLOW]: {self.name!r} does not set jobs")
1093
+ return result.catch(status=SUCCESS, context=context)
1094
1094
 
1095
- jq: Queue = Queue()
1095
+ job_queue: Queue = Queue()
1096
1096
  for job_id in self.jobs:
1097
- jq.put(job_id)
1098
-
1099
- # NOTE: Create data context that will pass to any job executions
1100
- # on this workflow.
1101
- #
1102
- # {
1103
- # 'params': <input-params>,
1104
- # 'jobs': {},
1105
- # }
1106
- #
1107
- context: DictData = self.parameterize(params)
1108
- status: Status = SUCCESS
1109
- try:
1110
- if (
1111
- dynamic(
1112
- "max_job_parallel", f=max_job_parallel, extras=self.extras
1113
- )
1114
- == 1
1115
- ):
1116
- self.__exec_non_threading(
1117
- result=result,
1118
- context=context,
1119
- ts=ts,
1120
- job_queue=jq,
1121
- timeout=timeout,
1122
- event=event,
1123
- )
1124
- else:
1125
- self.__exec_threading(
1126
- result=result,
1127
- context=context,
1128
- ts=ts,
1129
- job_queue=jq,
1130
- timeout=timeout,
1131
- event=event,
1132
- )
1133
- except (WorkflowException, JobException) as e:
1134
- status: Status = FAILED
1135
- context.update({"errors": e.to_dict()})
1136
-
1137
- return result.catch(status=status, context=context)
1138
-
1139
- def __exec_threading(
1140
- self,
1141
- result: Result,
1142
- context: DictData,
1143
- ts: float,
1144
- job_queue: Queue,
1145
- *,
1146
- timeout: int = 600,
1147
- thread_timeout: int = 1800,
1148
- event: Event | None = None,
1149
- ) -> DictData:
1150
- """Workflow execution by threading strategy that use multithreading.
1151
-
1152
- If a job need dependency, it will check dependency job ID from
1153
- context data before allow it run.
1154
-
1155
- :param result: (Result) A result model.
1156
- :param context: A context workflow data that want to downstream passing.
1157
- :param ts: A start timestamp that use for checking execute time should
1158
- time out.
1159
- :param job_queue: (Queue) A job queue object.
1160
- :param timeout: (int) A second value unit that bounding running time.
1161
- :param thread_timeout: A timeout to waiting all futures complete.
1162
- :param event: (Event) An event manager that pass to the
1163
- PoolThreadExecutor.
1097
+ job_queue.put(job_id)
1164
1098
 
1165
- :rtype: DictData
1166
- """
1167
1099
  not_timeout_flag: bool = True
1168
1100
  timeout: int = dynamic(
1169
1101
  "max_job_exec_timeout", f=timeout, extras=self.extras
1170
1102
  )
1171
1103
  event: Event = event or Event()
1172
- result.trace.debug(f"... Run {self.name!r} with threading.")
1104
+ result.trace.debug(f"... Run {self.name!r} with non-threading.")
1105
+ max_job_parallel: int = dynamic(
1106
+ "max_job_parallel", f=max_job_parallel, extras=self.extras
1107
+ )
1173
1108
  with ThreadPoolExecutor(
1174
- max_workers=dynamic("max_job_parallel", extras=self.extras),
1175
- thread_name_prefix="wf_exec_threading_",
1109
+ max_workers=max_job_parallel,
1110
+ thread_name_prefix="wf_exec_non_threading_",
1176
1111
  ) as executor:
1177
1112
  futures: list[Future] = []
1178
1113
 
@@ -1188,9 +1123,14 @@ class Workflow(BaseModel):
1188
1123
  time.sleep(0.15)
1189
1124
  continue
1190
1125
  elif check == FAILED: # pragma: no cov
1191
- raise WorkflowException(
1192
- f"Validate job trigger rule was failed with "
1193
- f"{job.trigger_rule.value!r}."
1126
+ return result.catch(
1127
+ status=FAILED,
1128
+ context={
1129
+ "errors": WorkflowException(
1130
+ f"Validate job trigger rule was failed with "
1131
+ f"{job.trigger_rule.value!r}."
1132
+ ).to_dict()
1133
+ },
1194
1134
  )
1195
1135
  elif check == SKIP: # pragma: no cov
1196
1136
  result.trace.info(f"[JOB]: Skip job: {job_id!r}")
@@ -1198,119 +1138,42 @@ class Workflow(BaseModel):
1198
1138
  job_queue.task_done()
1199
1139
  continue
1200
1140
 
1201
- futures.append(
1202
- executor.submit(
1203
- self.execute_job,
1204
- job_id=job_id,
1205
- params=context,
1206
- result=result,
1207
- event=event,
1208
- ),
1209
- )
1210
-
1211
- job_queue.task_done()
1212
-
1213
- if not_timeout_flag:
1214
- job_queue.join()
1215
- for future in as_completed(futures, timeout=thread_timeout):
1216
- if e := future.exception():
1217
- result.trace.error(f"[WORKFLOW]: {e}")
1218
- raise WorkflowException(str(e))
1219
-
1220
- future.result()
1221
-
1222
- return context
1223
-
1224
- result.trace.error(
1225
- f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1226
- )
1227
- event.set()
1228
- for future in futures:
1229
- future.cancel()
1230
-
1231
- raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1232
-
1233
- def __exec_non_threading(
1234
- self,
1235
- result: Result,
1236
- context: DictData,
1237
- ts: float,
1238
- job_queue: Queue,
1239
- *,
1240
- timeout: int = 600,
1241
- event: Event | None = None,
1242
- ) -> DictData:
1243
- """Workflow execution with non-threading strategy that use sequential
1244
- job running and waiting previous job was run successful.
1245
-
1246
- If a job need dependency, it will check dependency job ID from
1247
- context data before allow it run.
1248
-
1249
- :param result: (Result) A result model.
1250
- :param context: A context workflow data that want to downstream passing.
1251
- :param ts: (float) A start timestamp that use for checking execute time
1252
- should time out.
1253
- :param timeout: (int) A second value unit that bounding running time.
1254
- :param event: (Event) An event manager that pass to the
1255
- PoolThreadExecutor.
1256
-
1257
- :rtype: DictData
1258
- """
1259
- not_timeout_flag: bool = True
1260
- timeout: int = dynamic(
1261
- "max_job_exec_timeout", f=timeout, extras=self.extras
1262
- )
1263
- event: Event = event or Event()
1264
- result.trace.debug(f"... Run {self.name!r} with non-threading.")
1265
- with ThreadPoolExecutor(
1266
- max_workers=1,
1267
- thread_name_prefix="wf_exec_non_threading_",
1268
- ) as executor:
1269
- future: Optional[Future] = None
1270
-
1271
- while not job_queue.empty() and (
1272
- not_timeout_flag := ((time.monotonic() - ts) < timeout)
1273
- ):
1274
- job_id: str = job_queue.get()
1275
- job: Job = self.job(name=job_id)
1276
-
1277
- if (check := job.check_needs(context["jobs"])) == WAIT:
1278
- job_queue.task_done()
1279
- job_queue.put(job_id)
1280
- time.sleep(0.075)
1281
- continue
1282
- elif check == FAILED:
1283
- raise WorkflowException(
1284
- f"Validate job trigger rule was failed with "
1285
- f"{job.trigger_rule.value!r}."
1141
+ if max_job_parallel > 1:
1142
+ futures.append(
1143
+ executor.submit(
1144
+ self.execute_job,
1145
+ job_id=job_id,
1146
+ params=context,
1147
+ result=result,
1148
+ event=event,
1149
+ ),
1286
1150
  )
1287
- elif check == SKIP: # pragma: no cov
1288
- result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1289
- job.set_outputs(output={"skipped": True}, to=context)
1290
1151
  job_queue.task_done()
1291
1152
  continue
1292
1153
 
1293
- if future is None:
1294
- future: Future = executor.submit(
1295
- self.execute_job,
1296
- job_id=job_id,
1297
- params=context,
1298
- result=result,
1299
- event=event,
1154
+ if len(futures) < 1:
1155
+ futures.append(
1156
+ executor.submit(
1157
+ self.execute_job,
1158
+ job_id=job_id,
1159
+ params=context,
1160
+ result=result,
1161
+ event=event,
1162
+ )
1300
1163
  )
1301
1164
  time.sleep(0.025)
1302
- elif future.done() or future.cancelled():
1165
+ elif (future := futures.pop(0)).done() or future.cancelled():
1303
1166
  if e := future.exception():
1304
1167
  result.trace.error(f"[WORKFLOW]: {e}")
1305
1168
  raise WorkflowException(str(e))
1306
-
1307
- future = None
1308
1169
  job_queue.put(job_id)
1309
1170
  elif future.running() or "state=pending" in str(future):
1310
1171
  time.sleep(0.075)
1172
+ futures.insert(0, future)
1311
1173
  job_queue.put(job_id)
1312
1174
  else: # pragma: no cov
1313
1175
  job_queue.put(job_id)
1176
+ futures.insert(0, future)
1314
1177
  result.trace.warning(
1315
1178
  f"... Execution non-threading not handle: {future}."
1316
1179
  )
@@ -1319,23 +1182,35 @@ class Workflow(BaseModel):
1319
1182
 
1320
1183
  if not_timeout_flag:
1321
1184
  job_queue.join()
1322
- if future: # pragma: no cov
1323
- if e := future.exception():
1324
- result.trace.error(f"[WORKFLOW]: {e}")
1325
- raise WorkflowException(str(e))
1326
-
1327
- future.result()
1328
-
1329
- return context
1185
+ for future in as_completed(futures):
1186
+ try:
1187
+ future.result()
1188
+ except WorkflowException as e:
1189
+ result.trace.error(f"[WORKFLOW]: Handler:{NEWLINE}{e}")
1190
+ return result.catch(
1191
+ status=FAILED,
1192
+ context={
1193
+ "errors": WorkflowException(str(e)).to_dict()
1194
+ },
1195
+ )
1196
+ return result.catch(
1197
+ status=FAILED if "errors" in result.context else SUCCESS,
1198
+ context=context,
1199
+ )
1330
1200
 
1331
- result.trace.error(
1332
- f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1333
- )
1201
+ result.trace.error(f"[WORKFLOW]: {self.name!r} was timeout.")
1334
1202
  event.set()
1335
- if future:
1203
+ for future in futures:
1336
1204
  future.cancel()
1337
1205
 
1338
- raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1206
+ return result.catch(
1207
+ status=FAILED,
1208
+ context={
1209
+ "errors": WorkflowException(
1210
+ f"{self.name!r} was timeout."
1211
+ ).to_dict()
1212
+ },
1213
+ )
1339
1214
 
1340
1215
 
1341
1216
  @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.53
3
+ Version: 0.0.54
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -62,12 +62,6 @@ by a `.yaml` template.
62
62
  > use the workflow stage to process any large volume data which use a lot of compute
63
63
  > resource :cold_sweat:.
64
64
 
65
- In my opinion, I think it should not create duplicate workflow codes if I can
66
- write with dynamic input parameters on the one template workflow that just change
67
- the input parameters per use-case instead.
68
- This way I can handle a lot of logical workflows in our orgs with only metadata
69
- configuration. It called **Metadata Driven Data Workflow**.
70
-
71
65
  ---
72
66
 
73
67
  **:pushpin: <u>Rules of This Workflow engine</u>**:
@@ -1,4 +1,4 @@
1
- ddeutil/workflow/__about__.py,sha256=fOQi-49Q8-qLVO7us5t2StkrubZvI2LQkyYSQi-3P88,28
1
+ ddeutil/workflow/__about__.py,sha256=KTbXC0Wvt30Z6UCFcSzVAD697wqCc-kgyHEuDu_xEtY,28
2
2
  ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
3
  ddeutil/workflow/__init__.py,sha256=noE8LNRcgq32m9OnIFcQqh0P7PXWdp-SGmvBCYIXgf4,1338
4
4
  ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -6,26 +6,26 @@ ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4
6
6
  ddeutil/workflow/conf.py,sha256=80rgmJKFU7BlH5xTLnghGzGhE8C6LFAQykd9mjHSjo8,12528
7
7
  ddeutil/workflow/cron.py,sha256=WS2MInn0Sp5DKlZDZH5VFZ5AA0Q3_AnBnYEU4lZSv4I,9779
8
8
  ddeutil/workflow/exceptions.py,sha256=r4Jrf9qtVPALU4wh4bnb_OYqC-StqSQJEmFC-_QK934,1408
9
- ddeutil/workflow/job.py,sha256=Z1XP_9pj-RY64z3G4LYX-MppS99zQns9wtZy7zHuWbE,35262
9
+ ddeutil/workflow/job.py,sha256=uoj6grIq1f0iV5RFtQaysyel6z9keYr-urkkhFzyhcI,33831
10
10
  ddeutil/workflow/logs.py,sha256=rsoBrUGQrooou18fg2yvPsB8NOaXnUA5ThQpBr_WVMg,26598
11
11
  ddeutil/workflow/params.py,sha256=FKY4Oo1Ze4QZKRfAk7rqKsi44YaJQAbqAtXM6vlO2hI,11392
12
12
  ddeutil/workflow/result.py,sha256=27nPQq9CETLCVczv4vvFEF9w2TllHZ_ROfyDoLFxRWM,5647
13
13
  ddeutil/workflow/reusables.py,sha256=iXcS7Gg-71qVX4ln0ILTDx03cTtUnj_rNoXHTVdVrxc,17636
14
14
  ddeutil/workflow/scheduler.py,sha256=4G5AogkmnsTKe7jKYSfU35qjubR82WQ8CLtEe9kqPTE,28304
15
- ddeutil/workflow/stages.py,sha256=ZsGh8Wd-NqdAZC5cyJ6wXuF-UHqoCcFFedXvyHssSqc,72473
16
- ddeutil/workflow/utils.py,sha256=zbVttaMFMRLuuBJdSJf7D9qtz8bOnQIBq-rHI3Eqy4M,7821
17
- ddeutil/workflow/workflow.py,sha256=2ZBNW3-vcP8bpKrK184wSCukq3wpT6G0z25Su5bapR0,50832
15
+ ddeutil/workflow/stages.py,sha256=131pGqE5RhhDo9aLy6KTXGrvVLFT3UIwr9fS3tVqoZQ,80466
16
+ ddeutil/workflow/utils.py,sha256=NZPvPPP_5g4cigFcD7tHjIKLtKMeYAcb3oUhNyhTpJ0,7947
17
+ ddeutil/workflow/workflow.py,sha256=-7M3HdxOpuPpXeRF8oWhDh_S8anX3ivSoKlOsKnsh6c,46942
18
18
  ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
19
- ddeutil/workflow/api/api.py,sha256=CWtPLgOv2Jus9E7nzG5mG2Z32ZEkUK3JWQ2htZyMRpA,5244
20
- ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
21
- ddeutil/workflow/api/repeat.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
19
+ ddeutil/workflow/api/api.py,sha256=xLrQ8yD7iOn-MkzaSxG-BADbdkqLikDna630oW3YEmc,5243
20
+ ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
21
+ ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
22
22
  ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
23
- ddeutil/workflow/api/routes/job.py,sha256=oPwBVP0Mxwxv-bGPlfmxQQ9PcVl0ev9HoPzndpYDCCQ,1954
23
+ ddeutil/workflow/api/routes/job.py,sha256=GbDFmToksKsvPHn9KPXhoOCznzEPRTe79jUn7ew0snE,1953
24
24
  ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
25
25
  ddeutil/workflow/api/routes/schedules.py,sha256=EgUjyRGhsm6UNaMj5luh6TcY6l571sCHcla-BL1iOfY,4829
26
26
  ddeutil/workflow/api/routes/workflows.py,sha256=JcDOrn1deK8ztFRcMTNATQejG6KMA7JxZLVc4QeBsP4,4527
27
- ddeutil_workflow-0.0.53.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
28
- ddeutil_workflow-0.0.53.dist-info/METADATA,sha256=xNgYIlFQvS9VsF0agSPsYbJWin_s9c_llkRFnEUxyC0,19425
29
- ddeutil_workflow-0.0.53.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
30
- ddeutil_workflow-0.0.53.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
- ddeutil_workflow-0.0.53.dist-info/RECORD,,
27
+ ddeutil_workflow-0.0.54.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
28
+ ddeutil_workflow-0.0.54.dist-info/METADATA,sha256=uVgjeBlD6MEq6XVs4pPNEfhiTuf44TrUzpESmy_8ADw,19081
29
+ ddeutil_workflow-0.0.54.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
30
+ ddeutil_workflow-0.0.54.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
+ ddeutil_workflow-0.0.54.dist-info/RECORD,,
File without changes
File without changes