ddeutil-workflow 0.0.43__py3-none-any.whl → 0.0.44__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/job.py +65 -50
- ddeutil/workflow/stages.py +9 -5
- ddeutil/workflow/workflow.py +70 -92
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.44.dist-info}/METADATA +1 -1
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.44.dist-info}/RECORD +9 -9
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.44.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.44.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.43.dist-info → ddeutil_workflow-0.0.44.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.44"
|
ddeutil/workflow/job.py
CHANGED
@@ -38,6 +38,7 @@ from .exceptions import (
|
|
38
38
|
JobException,
|
39
39
|
StageException,
|
40
40
|
UtilException,
|
41
|
+
to_dict,
|
41
42
|
)
|
42
43
|
from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
|
43
44
|
from .reusables import has_template, param2template
|
@@ -415,6 +416,7 @@ class Job(BaseModel):
|
|
415
416
|
need_exist: dict[str, Any] = {
|
416
417
|
need: jobs[need] for need in self.needs if need in jobs
|
417
418
|
}
|
419
|
+
|
418
420
|
if len(need_exist) != len(self.needs):
|
419
421
|
return WAIT
|
420
422
|
elif all("skipped" in need_exist[job] for job in need_exist):
|
@@ -630,19 +632,6 @@ def local_execute_strategy(
|
|
630
632
|
result: Result = Result(run_id=gen_id(job.id or "not-set", unique=True))
|
631
633
|
|
632
634
|
strategy_id: str = gen_id(strategy)
|
633
|
-
|
634
|
-
# PARAGRAPH:
|
635
|
-
#
|
636
|
-
# Create strategy execution context and update a matrix and copied
|
637
|
-
# of params. So, the context value will have structure like;
|
638
|
-
#
|
639
|
-
# {
|
640
|
-
# "params": { ... }, <== Current input params
|
641
|
-
# "jobs": { ... }, <== Current input params
|
642
|
-
# "matrix": { ... } <== Current strategy value
|
643
|
-
# "stages": { ... } <== Catching stage outputs
|
644
|
-
# }
|
645
|
-
#
|
646
635
|
context: DictData = copy.deepcopy(params)
|
647
636
|
context.update({"matrix": strategy, "stages": {}})
|
648
637
|
|
@@ -650,7 +639,6 @@ def local_execute_strategy(
|
|
650
639
|
result.trace.info(f"[JOB]: Execute Strategy ID: {strategy_id}")
|
651
640
|
result.trace.info(f"[JOB]: ... Matrix: {strategy_id}")
|
652
641
|
|
653
|
-
# IMPORTANT: The stage execution only run sequentially one-by-one.
|
654
642
|
for stage in job.stages:
|
655
643
|
|
656
644
|
if stage.is_skipped(params=context):
|
@@ -674,34 +662,30 @@ def local_execute_strategy(
|
|
674
662
|
},
|
675
663
|
)
|
676
664
|
|
677
|
-
# PARAGRAPH:
|
678
|
-
#
|
679
|
-
# This step will add the stage result to `stages` key in that
|
680
|
-
# stage id. It will have structure like;
|
681
|
-
#
|
682
|
-
# {
|
683
|
-
# "params": { ... },
|
684
|
-
# "jobs": { ... },
|
685
|
-
# "matrix": { ... },
|
686
|
-
# "stages": { { "stage-id-01": { "outputs": { ... } } }, ... }
|
687
|
-
# }
|
688
|
-
#
|
689
|
-
# IMPORTANT:
|
690
|
-
# This execution change all stage running IDs to the current job
|
691
|
-
# running ID, but it still trac log to the same parent running ID
|
692
|
-
# (with passing `run_id` and `parent_run_id` to the stage
|
693
|
-
# execution arguments).
|
694
|
-
#
|
695
665
|
try:
|
696
|
-
stage.
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
event=event,
|
702
|
-
).context,
|
703
|
-
to=context,
|
666
|
+
rs: Result = stage.handler_execute(
|
667
|
+
params=context,
|
668
|
+
run_id=result.run_id,
|
669
|
+
parent_run_id=result.parent_run_id,
|
670
|
+
event=event,
|
704
671
|
)
|
672
|
+
stage.set_outputs(rs.context, to=context)
|
673
|
+
if rs.status == FAILED:
|
674
|
+
error_msg: str = (
|
675
|
+
f"Job strategy was break because it has a stage, "
|
676
|
+
f"{stage.iden}, failed without raise error."
|
677
|
+
)
|
678
|
+
return result.catch(
|
679
|
+
status=FAILED,
|
680
|
+
context={
|
681
|
+
strategy_id: {
|
682
|
+
"matrix": strategy,
|
683
|
+
"stages": context.pop("stages", {}),
|
684
|
+
"errors": JobException(error_msg).to_dict(),
|
685
|
+
},
|
686
|
+
},
|
687
|
+
)
|
688
|
+
|
705
689
|
except (StageException, UtilException) as err:
|
706
690
|
result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
|
707
691
|
do_raise: bool = dynamic(
|
@@ -746,8 +730,8 @@ def local_execute(
|
|
746
730
|
raise_error: bool | None = None,
|
747
731
|
) -> Result:
|
748
732
|
"""Local job execution with passing dynamic parameters from the workflow
|
749
|
-
execution. It will generate matrix values at the first
|
750
|
-
multithread on this metrics to the `stages` field of this job.
|
733
|
+
execution or itself execution. It will generate matrix values at the first
|
734
|
+
step and run multithread on this metrics to the `stages` field of this job.
|
751
735
|
|
752
736
|
This method does not raise any JobException if it runs with
|
753
737
|
multi-threading strategy.
|
@@ -798,7 +782,7 @@ def local_execute(
|
|
798
782
|
raise_error=raise_error,
|
799
783
|
)
|
800
784
|
|
801
|
-
return result.catch(status=
|
785
|
+
return result.catch(status=result.status)
|
802
786
|
|
803
787
|
fail_fast_flag: bool = job.strategy.fail_fast
|
804
788
|
ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
|
@@ -818,8 +802,6 @@ def local_execute(
|
|
818
802
|
},
|
819
803
|
)
|
820
804
|
|
821
|
-
# IMPORTANT: Start running strategy execution by multithreading because
|
822
|
-
# it will run by strategy values without waiting previous execution.
|
823
805
|
with ThreadPoolExecutor(
|
824
806
|
max_workers=job.strategy.max_parallel,
|
825
807
|
thread_name_prefix="job_strategy_exec_",
|
@@ -885,6 +867,22 @@ def self_hosted_execute(
|
|
885
867
|
event: Event | None = None,
|
886
868
|
raise_error: bool | None = None,
|
887
869
|
) -> Result: # pragma: no cov
|
870
|
+
"""Self-Hosted job execution with passing dynamic parameters from the
|
871
|
+
workflow execution or itself execution. It will make request to the
|
872
|
+
self-hosted host url.
|
873
|
+
|
874
|
+
:param job: (Job) A job model that want to execute.
|
875
|
+
:param params: (DictData) An input parameters that use on job execution.
|
876
|
+
:param run_id: (str) A job running ID for this execution.
|
877
|
+
:param parent_run_id: (str) A parent workflow running ID for this release.
|
878
|
+
:param result: (Result) A result object for keeping context and status
|
879
|
+
data.
|
880
|
+
:param event: (Event) An event manager that pass to the PoolThreadExecutor.
|
881
|
+
:param raise_error: (bool) A flag that all this method raise error to the
|
882
|
+
strategy execution.
|
883
|
+
|
884
|
+
:rtype: Result
|
885
|
+
"""
|
888
886
|
result: Result = Result.construct_with_rs_or_id(
|
889
887
|
result,
|
890
888
|
run_id=run_id,
|
@@ -893,14 +891,31 @@ def self_hosted_execute(
|
|
893
891
|
)
|
894
892
|
|
895
893
|
if event and event.is_set():
|
896
|
-
return result.catch(
|
894
|
+
return result.catch(
|
895
|
+
status=FAILED,
|
896
|
+
context={
|
897
|
+
"errors": JobException(
|
898
|
+
"Job self-hosted execution was canceled from event that "
|
899
|
+
"had set before start execution."
|
900
|
+
).to_dict()
|
901
|
+
},
|
902
|
+
)
|
897
903
|
|
898
904
|
import requests
|
899
905
|
|
900
|
-
|
901
|
-
|
902
|
-
|
903
|
-
|
906
|
+
try:
|
907
|
+
resp = requests.post(
|
908
|
+
job.runs_on.args.host,
|
909
|
+
headers={"Auth": f"Barer {job.runs_on.args.token}"},
|
910
|
+
data={
|
911
|
+
"job": job.model_dump(),
|
912
|
+
"params": params,
|
913
|
+
"result": result.__dict__,
|
914
|
+
"raise_error": raise_error,
|
915
|
+
},
|
916
|
+
)
|
917
|
+
except requests.exceptions.RequestException as e:
|
918
|
+
return result.catch(status=FAILED, context={"errors": to_dict(e)})
|
904
919
|
|
905
920
|
if resp.status_code != 200:
|
906
921
|
do_raise: bool = dynamic(
|
ddeutil/workflow/stages.py
CHANGED
@@ -223,10 +223,11 @@ class BaseStage(BaseModel, ABC):
|
|
223
223
|
) from e
|
224
224
|
|
225
225
|
errors: DictData = {"errors": to_dict(e)}
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
226
|
+
return (
|
227
|
+
self.set_outputs(errors, to=to)
|
228
|
+
if to is not None
|
229
|
+
else result.catch(status=FAILED, context=errors)
|
230
|
+
)
|
230
231
|
|
231
232
|
def set_outputs(self, output: DictData, to: DictData) -> DictData:
|
232
233
|
"""Set an outputs from execution process to the received context. The
|
@@ -326,7 +327,10 @@ class BaseAsyncStage(BaseStage):
|
|
326
327
|
*,
|
327
328
|
result: Result | None = None,
|
328
329
|
event: Event | None = None,
|
329
|
-
) -> Result:
|
330
|
+
) -> Result:
|
331
|
+
raise NotImplementedError(
|
332
|
+
"Async Stage should implement `execute` method."
|
333
|
+
)
|
330
334
|
|
331
335
|
@abstractmethod
|
332
336
|
async def axecute(
|
ddeutil/workflow/workflow.py
CHANGED
@@ -981,11 +981,11 @@ class Workflow(BaseModel):
|
|
981
981
|
).context,
|
982
982
|
to=params,
|
983
983
|
)
|
984
|
-
except JobException as
|
985
|
-
result.trace.error(f"[WORKFLOW]: {
|
984
|
+
except JobException as e:
|
985
|
+
result.trace.error(f"[WORKFLOW]: {e.__class__.__name__}: {e}")
|
986
986
|
if raise_error:
|
987
987
|
raise WorkflowException(
|
988
|
-
f"Get job execution error {job_id}: JobException: {
|
988
|
+
f"Get job execution error {job_id}: JobException: {e}"
|
989
989
|
) from None
|
990
990
|
raise NotImplementedError(
|
991
991
|
"Handle error from the job execution does not support yet."
|
@@ -1042,16 +1042,12 @@ class Workflow(BaseModel):
|
|
1042
1042
|
)
|
1043
1043
|
|
1044
1044
|
result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
|
1045
|
-
|
1046
|
-
# NOTE: It should not do anything if it does not have job.
|
1047
1045
|
if not self.jobs:
|
1048
1046
|
result.trace.warning(
|
1049
1047
|
f"[WORKFLOW]: {self.name!r} does not have any jobs"
|
1050
1048
|
)
|
1051
1049
|
return result.catch(status=SUCCESS, context=params)
|
1052
1050
|
|
1053
|
-
# NOTE: Create a job queue that keep the job that want to run after
|
1054
|
-
# its dependency condition.
|
1055
1051
|
jq: Queue = Queue()
|
1056
1052
|
for job_id in self.jobs:
|
1057
1053
|
jq.put(job_id)
|
@@ -1090,9 +1086,9 @@ class Workflow(BaseModel):
|
|
1090
1086
|
timeout=timeout,
|
1091
1087
|
event=event,
|
1092
1088
|
)
|
1093
|
-
except WorkflowException as
|
1094
|
-
status = FAILED
|
1095
|
-
context.update({"errors":
|
1089
|
+
except WorkflowException as e:
|
1090
|
+
status: Status = FAILED
|
1091
|
+
context.update({"errors": e.to_dict()})
|
1096
1092
|
|
1097
1093
|
return result.catch(status=status, context=context)
|
1098
1094
|
|
@@ -1149,7 +1145,8 @@ class Workflow(BaseModel):
|
|
1149
1145
|
continue
|
1150
1146
|
elif check == FAILED: # pragma: no cov
|
1151
1147
|
raise WorkflowException(
|
1152
|
-
"
|
1148
|
+
f"Validate job trigger rule was failed with "
|
1149
|
+
f"{job.trigger_rule.value!r}."
|
1153
1150
|
)
|
1154
1151
|
elif check == SKIP: # pragma: no cov
|
1155
1152
|
result.trace.info(f"[JOB]: Skip job: {job_id!r}")
|
@@ -1157,16 +1154,6 @@ class Workflow(BaseModel):
|
|
1157
1154
|
job_queue.task_done()
|
1158
1155
|
continue
|
1159
1156
|
|
1160
|
-
# NOTE: Start workflow job execution with deep copy context data
|
1161
|
-
# before release.
|
1162
|
-
#
|
1163
|
-
# Context:
|
1164
|
-
# ---
|
1165
|
-
# {
|
1166
|
-
# 'params': <input-params>,
|
1167
|
-
# 'jobs': { <job's-id>: ... },
|
1168
|
-
# }
|
1169
|
-
#
|
1170
1157
|
futures.append(
|
1171
1158
|
executor.submit(
|
1172
1159
|
self.execute_job,
|
@@ -1177,19 +1164,14 @@ class Workflow(BaseModel):
|
|
1177
1164
|
),
|
1178
1165
|
)
|
1179
1166
|
|
1180
|
-
# NOTE: Mark this job queue done.
|
1181
1167
|
job_queue.task_done()
|
1182
1168
|
|
1183
1169
|
if not_timeout_flag:
|
1184
|
-
|
1185
|
-
# NOTE: Wait for all items to finish processing by `task_done()`
|
1186
|
-
# method.
|
1187
1170
|
job_queue.join()
|
1188
|
-
|
1189
1171
|
for future in as_completed(futures, timeout=thread_timeout):
|
1190
|
-
if
|
1191
|
-
result.trace.error(f"[WORKFLOW]: {
|
1192
|
-
raise WorkflowException(str(
|
1172
|
+
if e := future.exception():
|
1173
|
+
result.trace.error(f"[WORKFLOW]: {e}")
|
1174
|
+
raise WorkflowException(str(e))
|
1193
1175
|
|
1194
1176
|
future.result()
|
1195
1177
|
|
@@ -1235,78 +1217,74 @@ class Workflow(BaseModel):
|
|
1235
1217
|
"max_job_exec_timeout", f=timeout, extras=self.extras
|
1236
1218
|
)
|
1237
1219
|
event: Event = event or Event()
|
1238
|
-
future: Future | None = None
|
1239
1220
|
result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with non-threading.")
|
1240
|
-
|
1241
|
-
executor = ThreadPoolExecutor(
|
1221
|
+
with ThreadPoolExecutor(
|
1242
1222
|
max_workers=1,
|
1243
1223
|
thread_name_prefix="wf_exec_non_threading_",
|
1244
|
-
)
|
1224
|
+
) as executor:
|
1225
|
+
future: Future | None = None
|
1245
1226
|
|
1246
|
-
|
1247
|
-
|
1248
|
-
|
1249
|
-
|
1250
|
-
|
1227
|
+
while not job_queue.empty() and (
|
1228
|
+
not_timeout_flag := ((time.monotonic() - ts) < timeout)
|
1229
|
+
):
|
1230
|
+
job_id: str = job_queue.get()
|
1231
|
+
job: Job = self.jobs[job_id]
|
1251
1232
|
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
1233
|
+
if (check := job.check_needs(context["jobs"])) == WAIT:
|
1234
|
+
job_queue.task_done()
|
1235
|
+
job_queue.put(job_id)
|
1236
|
+
time.sleep(0.075)
|
1237
|
+
continue
|
1238
|
+
elif check == FAILED:
|
1239
|
+
raise WorkflowException(
|
1240
|
+
f"Validate job trigger rule was failed with "
|
1241
|
+
f"{job.trigger_rule.value!r}."
|
1242
|
+
)
|
1243
|
+
elif check == SKIP: # pragma: no cov
|
1244
|
+
result.trace.info(f"[JOB]: Skip job: {job_id!r}")
|
1245
|
+
job.set_outputs({"SKIP": {"skipped": True}}, to=context)
|
1246
|
+
job_queue.task_done()
|
1247
|
+
continue
|
1264
1248
|
|
1265
|
-
|
1266
|
-
|
1267
|
-
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
future
|
1275
|
-
|
1276
|
-
|
1277
|
-
|
1278
|
-
|
1279
|
-
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
|
1284
|
-
|
1285
|
-
|
1286
|
-
|
1287
|
-
|
1288
|
-
|
1289
|
-
|
1290
|
-
elif future.running():
|
1291
|
-
time.sleep(0.075)
|
1292
|
-
job_queue.put(job_id)
|
1293
|
-
else: # pragma: no cov
|
1294
|
-
job_queue.put(job_id)
|
1295
|
-
result.trace.debug(
|
1296
|
-
f"Execution non-threading does not handle case: {future} "
|
1297
|
-
f"that not running."
|
1298
|
-
)
|
1249
|
+
if future is None:
|
1250
|
+
future: Future = executor.submit(
|
1251
|
+
self.execute_job,
|
1252
|
+
job_id=job_id,
|
1253
|
+
params=context,
|
1254
|
+
result=result,
|
1255
|
+
event=event,
|
1256
|
+
)
|
1257
|
+
time.sleep(0.025)
|
1258
|
+
elif future.done():
|
1259
|
+
if e := future.exception():
|
1260
|
+
result.trace.error(f"[WORKFLOW]: {e}")
|
1261
|
+
raise WorkflowException(str(e))
|
1262
|
+
|
1263
|
+
future = None
|
1264
|
+
job_queue.put(job_id)
|
1265
|
+
elif future.running():
|
1266
|
+
time.sleep(0.075)
|
1267
|
+
job_queue.put(job_id)
|
1268
|
+
else: # pragma: no cov
|
1269
|
+
job_queue.put(job_id)
|
1270
|
+
result.trace.debug(
|
1271
|
+
f"Execution non-threading does not handle case: {future} "
|
1272
|
+
f"that not running."
|
1273
|
+
)
|
1299
1274
|
|
1300
|
-
|
1275
|
+
job_queue.task_done()
|
1276
|
+
|
1277
|
+
if not_timeout_flag:
|
1278
|
+
job_queue.join()
|
1279
|
+
return context
|
1301
1280
|
|
1302
|
-
|
1303
|
-
|
1304
|
-
|
1305
|
-
|
1281
|
+
result.trace.error(
|
1282
|
+
f"[WORKFLOW]: Execution: {self.name!r} was timeout."
|
1283
|
+
)
|
1284
|
+
event.set()
|
1285
|
+
if future:
|
1286
|
+
future.cancel()
|
1306
1287
|
|
1307
|
-
result.trace.error(f"[WORKFLOW]: Execution: {self.name!r} was timeout.")
|
1308
|
-
event.set()
|
1309
|
-
executor.shutdown()
|
1310
1288
|
raise WorkflowException(f"Execution: {self.name!r} was timeout.")
|
1311
1289
|
|
1312
1290
|
|
@@ -1,19 +1,19 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=
|
1
|
+
ddeutil/workflow/__about__.py,sha256=babneIKueqHV5Z7O62u5nVOWXWHGZSyS5egKApTAA1I,28
|
2
2
|
ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
|
3
3
|
ddeutil/workflow/__init__.py,sha256=m7ZTCuUOarcTKJuXOyuaXd5WTIO7NTkqCeCrNX3d5i8,1943
|
4
4
|
ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
|
5
5
|
ddeutil/workflow/conf.py,sha256=lDzWiVSNlNAhTzxbNIhIbQAIF1ggbmetAp0yn2fgnsc,12385
|
6
6
|
ddeutil/workflow/cron.py,sha256=80SijzMdDOBxTWRsiF-Fmuz7Ym7leY0XT2lzRAPGdXc,8781
|
7
7
|
ddeutil/workflow/exceptions.py,sha256=uLNxzav3HRcr4vaZnvbUIF_eTR6UXXZNaxroMWFOUL4,1418
|
8
|
-
ddeutil/workflow/job.py,sha256=
|
8
|
+
ddeutil/workflow/job.py,sha256=NgEPgMAUL2mqDctLpdoEVx4g0ZsTj4RmkQluCh_ZUdM,30614
|
9
9
|
ddeutil/workflow/logs.py,sha256=RkM5o_JPoWhFY7NrbYAARZQWjLC62YB_FYzTTcyDp8U,19816
|
10
10
|
ddeutil/workflow/params.py,sha256=xCtFEh0-G-G-f8y_SXxyf31bU6Ox5p5Z-WbBFXrjy8M,9960
|
11
11
|
ddeutil/workflow/result.py,sha256=iwkUzOubxhLCuO-ngWEWL6t-CpYBpINIIO_ubg4kz14,4701
|
12
12
|
ddeutil/workflow/reusables.py,sha256=AtZO83HDFu1uK_azUinv5d8jsA36f2i3n_tqMrolbvc,17529
|
13
13
|
ddeutil/workflow/scheduler.py,sha256=_MDsEHbBVOeF-381U8DfIMDyca_nG3XNXmgX4229_EU,27437
|
14
|
-
ddeutil/workflow/stages.py,sha256=
|
14
|
+
ddeutil/workflow/stages.py,sha256=cvSNt4IjbE4O9llCIL2bojnl4W50GHNa1ANQ4oGefUo,47438
|
15
15
|
ddeutil/workflow/utils.py,sha256=sblje9qOtejCHVt8EVrbC0KY98vKqvxccaR5HIkRiTA,7363
|
16
|
-
ddeutil/workflow/workflow.py,sha256=
|
16
|
+
ddeutil/workflow/workflow.py,sha256=NxuSQqoNvDTKiOVLambTBD847BJW-748dm7YgfjGPoA,49731
|
17
17
|
ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
|
18
18
|
ddeutil/workflow/api/api.py,sha256=b-bMg0aRsEqt8Qb2hNUtamEt2Fq2CgNotF2oXSAdDu8,5226
|
19
19
|
ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
@@ -23,8 +23,8 @@ ddeutil/workflow/api/routes/job.py,sha256=YVta083i8vU8-o4WdKFwDpfdC9vN1dZ6goZSmN
|
|
23
23
|
ddeutil/workflow/api/routes/logs.py,sha256=TeRDrEelbKS2Hu_EovgLh0bOdmSv9mfnrIZsrE7uPD4,5353
|
24
24
|
ddeutil/workflow/api/routes/schedules.py,sha256=rUWBm5RgLS1PNBHSWwWXJ0l-c5mYWfl9os0BA9_OTEw,4810
|
25
25
|
ddeutil/workflow/api/routes/workflows.py,sha256=ctgQGxXfpIV6bHFDM9IQ1_qaQHT6n5-HjJ1-D4GKWpc,4527
|
26
|
-
ddeutil_workflow-0.0.
|
27
|
-
ddeutil_workflow-0.0.
|
28
|
-
ddeutil_workflow-0.0.
|
29
|
-
ddeutil_workflow-0.0.
|
30
|
-
ddeutil_workflow-0.0.
|
26
|
+
ddeutil_workflow-0.0.44.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
27
|
+
ddeutil_workflow-0.0.44.dist-info/METADATA,sha256=PwXbwLjDtntF2uyvDbbARjc5lvgR1yYYkDvGWNVqb4A,19134
|
28
|
+
ddeutil_workflow-0.0.44.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
29
|
+
ddeutil_workflow-0.0.44.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
30
|
+
ddeutil_workflow-0.0.44.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|