ddeutil-workflow 0.0.69__py3-none-any.whl → 0.0.70__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.69"
1
+ __version__: str = "0.0.70"
ddeutil/workflow/cli.py CHANGED
@@ -1,17 +1,21 @@
1
1
  import json
2
2
  from pathlib import Path
3
3
  from platform import python_version
4
- from typing import Annotated, Any, Optional
4
+ from typing import Annotated, Any, Literal, Optional, Union
5
5
 
6
6
  import typer
7
7
  import uvicorn
8
+ from pydantic import Field, TypeAdapter
8
9
 
9
10
  from .__about__ import __version__
10
11
  from .__types import DictData
11
12
  from .api import app as fastapp
12
13
  from .errors import JobError
14
+ from .event import Crontab
13
15
  from .job import Job
16
+ from .params import Param
14
17
  from .result import Result
18
+ from .workflow import Workflow
15
19
 
16
20
  app = typer.Typer(
17
21
  pretty_exceptions_enable=True,
@@ -129,5 +133,51 @@ def workflow_execute():
129
133
  """"""
130
134
 
131
135
 
136
+ WORKFLOW_TYPE = Literal["Workflow"]
137
+
138
+
139
+ class WorkflowSchema(Workflow):
140
+ """Override workflow model fields for generate JSON schema file."""
141
+
142
+ type: WORKFLOW_TYPE = Field(description="A type of workflow template.")
143
+ name: Optional[str] = Field(default=None, description="A workflow name.")
144
+ params: dict[str, Union[Param, str]] = Field(
145
+ default_factory=dict,
146
+ description="A parameters that need to use on this workflow.",
147
+ )
148
+ on: Union[list[Union[Crontab, str]], str] = Field(
149
+ default_factory=list,
150
+ description="A list of Crontab instance for this workflow schedule.",
151
+ )
152
+
153
+
154
+ CRONTAB_TYPE = Literal["Crontab"]
155
+
156
+
157
+ class CrontabSchema(Crontab):
158
+ """Override crontab model fields for generate JSON schema file."""
159
+
160
+ type: CRONTAB_TYPE = Field(description="A type of crontab template.")
161
+
162
+
163
+ @workflow_app.command(name="json-schema")
164
+ def workflow_json_schema(
165
+ output: Annotated[
166
+ Path,
167
+ typer.Option(help="An output file to export the JSON schema."),
168
+ ] = Path("./json-schema.json"),
169
+ ) -> None:
170
+ """Generate JSON schema file from the Workflow model."""
171
+ template = dict[str, Union[WorkflowSchema, CrontabSchema]]
172
+ json_schema = TypeAdapter(template).json_schema(by_alias=True)
173
+ template_schema: dict[str, str] = {
174
+ "$schema": "http://json-schema.org/draft-07/schema#",
175
+ "title": "Workflow Configuration Schema",
176
+ "version": "1.0.0",
177
+ }
178
+ with open(output, mode="w", encoding="utf-8") as f:
179
+ json.dump(template_schema | json_schema, f, indent=2)
180
+
181
+
132
182
  if __name__ == "__main__":
133
183
  app()
ddeutil/workflow/conf.py CHANGED
@@ -327,13 +327,13 @@ class YamlParser:
327
327
  *,
328
328
  ignore_filename: Optional[str] = None,
329
329
  ) -> bool:
330
- """Check this file was ignored.
330
+ """Check this file was ignored from the `.confignore` format.
331
331
 
332
332
  :param file: (Path) A file path that want to check.
333
333
  :param path: (Path) A config path that want to read the config
334
334
  ignore file.
335
335
  :param ignore_filename: (str) An ignore filename. Default is
336
- `.confignore` filename.
336
+ ``.confignore`` filename.
337
337
 
338
338
  :rtype: bool
339
339
  """
@@ -38,8 +38,14 @@ def to_dict(exception: Exception, **kwargs) -> ErrorData: # pragma: no cov
38
38
 
39
39
 
40
40
  class BaseError(Exception):
41
- """Base Workflow exception class will implement the `refs` argument for
41
+ """Base Workflow exception class will implement the ``refs`` argument for
42
42
  making an error context to the result context.
43
+
44
+ Attributes:
45
+ refs: (:obj:str, optional)
46
+ context: (:obj:DictData)
47
+ params: (:obj:DictData)
48
+
43
49
  """
44
50
 
45
51
  def __init__(
ddeutil/workflow/job.py CHANGED
@@ -402,13 +402,19 @@ class Job(BaseModel):
402
402
  """
403
403
  # VALIDATE: Validate stage id should not duplicate.
404
404
  rs: list[str] = []
405
+ rs_raise: list[str] = []
405
406
  for stage in value:
406
407
  name: str = stage.iden
407
408
  if name in rs:
408
- raise ValueError(
409
- f"Stage name, {name!r}, should not be duplicate."
410
- )
409
+ rs_raise.append(name)
410
+ continue
411
411
  rs.append(name)
412
+
413
+ if rs_raise:
414
+ raise ValueError(
415
+ f"Stage name, {', '.join(repr(s) for s in rs_raise)}, should "
416
+ f"not be duplicate."
417
+ )
412
418
  return value
413
419
 
414
420
  @model_validator(mode="after")
@@ -44,7 +44,7 @@ from .errors import UtilError
44
44
  T = TypeVar("T")
45
45
  P = ParamSpec("P")
46
46
 
47
- # NOTE: Adjust logging level of the `asyncio` to INFO level.
47
+ # NOTE: Adjust logging level of the ``asyncio`` to INFO level.
48
48
  logging.getLogger("asyncio").setLevel(logging.INFO)
49
49
 
50
50
 
@@ -333,6 +333,124 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
333
333
  "Adjust make message method for this trace object before using."
334
334
  )
335
335
 
336
+ @abstractmethod
337
+ def _logging(
338
+ self,
339
+ message: str,
340
+ mode: str,
341
+ *,
342
+ is_err: bool = False,
343
+ ):
344
+ """Write trace log with append mode and logging this message with any
345
+ logging level.
346
+
347
+ :param message: (str) A message that want to log.
348
+ :param mode: (str)
349
+ :param is_err: (bool)
350
+ """
351
+ raise NotImplementedError(
352
+ "Logging action should be implement for making trace log."
353
+ )
354
+
355
+ def debug(self, message: str):
356
+ """Write trace log with append mode and logging this message with the
357
+ DEBUG level.
358
+
359
+ :param message: (str) A message that want to log.
360
+ """
361
+ self._logging(message, mode="debug")
362
+
363
+ def info(self, message: str) -> None:
364
+ """Write trace log with append mode and logging this message with the
365
+ INFO level.
366
+
367
+ :param message: (str) A message that want to log.
368
+ """
369
+ self._logging(message, mode="info")
370
+
371
+ def warning(self, message: str) -> None:
372
+ """Write trace log with append mode and logging this message with the
373
+ WARNING level.
374
+
375
+ :param message: (str) A message that want to log.
376
+ """
377
+ self._logging(message, mode="warning")
378
+
379
+ def error(self, message: str) -> None:
380
+ """Write trace log with append mode and logging this message with the
381
+ ERROR level.
382
+
383
+ :param message: (str) A message that want to log.
384
+ """
385
+ self._logging(message, mode="error", is_err=True)
386
+
387
+ def exception(self, message: str) -> None:
388
+ """Write trace log with append mode and logging this message with the
389
+ EXCEPTION level.
390
+
391
+ :param message: (str) A message that want to log.
392
+ """
393
+ self._logging(message, mode="exception", is_err=True)
394
+
395
+ @abstractmethod
396
+ async def _alogging(
397
+ self,
398
+ message: str,
399
+ mode: str,
400
+ *,
401
+ is_err: bool = False,
402
+ ) -> None:
403
+ """Async write trace log with append mode and logging this message with
404
+ any logging level.
405
+
406
+ :param message: (str) A message that want to log.
407
+ :param mode: (str)
408
+ :param is_err: (bool)
409
+ """
410
+ raise NotImplementedError(
411
+ "Async Logging action should be implement for making trace log."
412
+ )
413
+
414
+ async def adebug(self, message: str) -> None: # pragma: no cov
415
+ """Async write trace log with append mode and logging this message with
416
+ the DEBUG level.
417
+
418
+ :param message: (str) A message that want to log.
419
+ """
420
+ await self._alogging(message, mode="debug")
421
+
422
+ async def ainfo(self, message: str) -> None: # pragma: no cov
423
+ """Async write trace log with append mode and logging this message with
424
+ the INFO level.
425
+
426
+ :param message: (str) A message that want to log.
427
+ """
428
+ await self._alogging(message, mode="info")
429
+
430
+ async def awarning(self, message: str) -> None: # pragma: no cov
431
+ """Async write trace log with append mode and logging this message with
432
+ the WARNING level.
433
+
434
+ :param message: (str) A message that want to log.
435
+ """
436
+ await self._alogging(message, mode="warning")
437
+
438
+ async def aerror(self, message: str) -> None: # pragma: no cov
439
+ """Async write trace log with append mode and logging this message with
440
+ the ERROR level.
441
+
442
+ :param message: (str) A message that want to log.
443
+ """
444
+ await self._alogging(message, mode="error", is_err=True)
445
+
446
+ async def aexception(self, message: str) -> None: # pragma: no cov
447
+ """Async write trace log with append mode and logging this message with
448
+ the EXCEPTION level.
449
+
450
+ :param message: (str) A message that want to log.
451
+ """
452
+ await self._alogging(message, mode="exception", is_err=True)
453
+
336
454
 
337
455
  class ConsoleTrace(BaseTrace): # pragma: no cov
338
456
  """Console Trace log model."""
@@ -416,7 +534,7 @@ class ConsoleTrace(BaseTrace): # pragma: no cov
416
534
  f"{PrefixMsg.from_str(message).prepare(self.extras)}"
417
535
  )
418
536
 
419
- def __logging(
537
+ def _logging(
420
538
  self, message: str, mode: str, *, is_err: bool = False
421
539
  ) -> None:
422
540
  """Write trace log with append mode and logging this message with any
@@ -433,47 +551,7 @@ class ConsoleTrace(BaseTrace): # pragma: no cov
433
551
 
434
552
  getattr(logger, mode)(msg, stacklevel=3, extra={"cut_id": self.cut_id})
435
553
 
436
- def debug(self, message: str):
437
- """Write trace log with append mode and logging this message with the
438
- DEBUG level.
439
-
440
- :param message: (str) A message that want to log.
441
- """
442
- self.__logging(message, mode="debug")
443
-
444
- def info(self, message: str) -> None:
445
- """Write trace log with append mode and logging this message with the
446
- INFO level.
447
-
448
- :param message: (str) A message that want to log.
449
- """
450
- self.__logging(message, mode="info")
451
-
452
- def warning(self, message: str) -> None:
453
- """Write trace log with append mode and logging this message with the
454
- WARNING level.
455
-
456
- :param message: (str) A message that want to log.
457
- """
458
- self.__logging(message, mode="warning")
459
-
460
- def error(self, message: str) -> None:
461
- """Write trace log with append mode and logging this message with the
462
- ERROR level.
463
-
464
- :param message: (str) A message that want to log.
465
- """
466
- self.__logging(message, mode="error", is_err=True)
467
-
468
- def exception(self, message: str) -> None:
469
- """Write trace log with append mode and logging this message with the
470
- EXCEPTION level.
471
-
472
- :param message: (str) A message that want to log.
473
- """
474
- self.__logging(message, mode="exception", is_err=True)
475
-
476
- async def __alogging(
554
+ async def _alogging(
477
555
  self, message: str, mode: str, *, is_err: bool = False
478
556
  ) -> None:
479
557
  """Write trace log with append mode and logging this message with any
@@ -490,46 +568,6 @@ class ConsoleTrace(BaseTrace): # pragma: no cov
490
568
 
491
569
  getattr(logger, mode)(msg, stacklevel=3, extra={"cut_id": self.cut_id})
492
570
 
493
- async def adebug(self, message: str) -> None: # pragma: no cov
494
- """Async write trace log with append mode and logging this message with
495
- the DEBUG level.
496
-
497
- :param message: (str) A message that want to log.
498
- """
499
- await self.__alogging(message, mode="debug")
500
-
501
- async def ainfo(self, message: str) -> None: # pragma: no cov
502
- """Async write trace log with append mode and logging this message with
503
- the INFO level.
504
-
505
- :param message: (str) A message that want to log.
506
- """
507
- await self.__alogging(message, mode="info")
508
-
509
- async def awarning(self, message: str) -> None: # pragma: no cov
510
- """Async write trace log with append mode and logging this message with
511
- the WARNING level.
512
-
513
- :param message: (str) A message that want to log.
514
- """
515
- await self.__alogging(message, mode="warning")
516
-
517
- async def aerror(self, message: str) -> None: # pragma: no cov
518
- """Async write trace log with append mode and logging this message with
519
- the ERROR level.
520
-
521
- :param message: (str) A message that want to log.
522
- """
523
- await self.__alogging(message, mode="error", is_err=True)
524
-
525
- async def aexception(self, message: str) -> None: # pragma: no cov
526
- """Async write trace log with append mode and logging this message with
527
- the EXCEPTION level.
528
-
529
- :param message: (str) A message that want to log.
530
- """
531
- await self.__alogging(message, mode="exception", is_err=True)
532
-
533
571
 
534
572
  class FileTrace(ConsoleTrace): # pragma: no cov
535
573
  """File Trace dataclass that write file to the local storage."""
ddeutil/workflow/utils.py CHANGED
@@ -163,21 +163,23 @@ def gen_id(
163
163
  extras: DictData | None = None,
164
164
  ) -> str:
165
165
  """Generate running ID for able to tracking. This generates process use
166
- `md5` algorithm function if `WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE` set
166
+ ``md5`` algorithm function if ``WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE`` set
167
167
  to false. But it will cut this hashing value length to 10 it the setting
168
168
  value set to true.
169
169
 
170
170
  Simple Mode:
171
171
 
172
- ... 0000 00 00 00 00 00 000000 T 0000000000
173
- ... year month day hour minute second microsecond sep simple-id
172
+ ... 0000 00 00 00 00 00 000000 T 0000000000
173
+ ... year month day hour minute second microsecond sep simple-id
174
174
 
175
175
  :param value: A value that want to add to prefix before hashing with md5.
176
- :param sensitive: A flag that convert the value to lower case before hashing
177
- :param unique: A flag that add timestamp at microsecond level to value
178
- before hashing.
179
- :param simple_mode: A flag for generate ID by simple mode.
180
- :param extras: An extra parameter that use for override config value.
176
+ :param sensitive: (bool) A flag that enable to convert the value to lower
177
+ case before hashing that value before generate ID.
178
+ :param unique: (bool) A flag that add timestamp at microsecond level to
179
+ value before hashing.
180
+ :param simple_mode: (bool | None) A flag for generate ID by simple mode.
181
+ :param extras: (DictData) An extra parameter that use for override config
182
+ value.
181
183
 
182
184
  :rtype: str
183
185
  """
@@ -212,7 +214,8 @@ def default_gen_id() -> str:
212
214
  def make_exec(path: Union[Path, str]) -> None:
213
215
  """Change mode of file to be executable file.
214
216
 
215
- :param path: A file path that want to make executable permission.
217
+ :param path: (Path | str) A file path that want to make executable
218
+ permission.
216
219
  """
217
220
  f: Path = Path(path) if isinstance(path, str) else path
218
221
  f.chmod(f.stat().st_mode | stat.S_IEXEC)
@@ -285,9 +288,14 @@ def dump_all(value: T, by_alias: bool = False) -> T: ... # pragma: no cov
285
288
 
286
289
 
287
290
  def dump_all(
288
- value: Union[T, BaseModel], by_alias: bool = False
291
+ value: Union[T, BaseModel],
292
+ by_alias: bool = False,
289
293
  ) -> Union[T, DictData]:
290
- """Dump all BaseModel object to dict."""
294
+ """Dump all nested BaseModel object to dict object.
295
+
296
+ :param value: (T | BaseModel)
297
+ :param by_alias: (bool)
298
+ """
291
299
  if isinstance(value, dict):
292
300
  return {k: dump_all(value[k], by_alias=by_alias) for k in value}
293
301
  elif isinstance(value, (list, tuple, set)):
@@ -257,8 +257,10 @@ class Workflow(BaseModel):
257
257
  f"{self.name!r}."
258
258
  )
259
259
 
260
- # NOTE: Set job ID to the job model.
261
- self.jobs[job].id = job
260
+ # NOTE: Copy the job model and set job ID to the job model.
261
+ job_model = self.jobs[job].model_copy()
262
+ job_model.id = job
263
+ self.jobs[job] = job_model
262
264
 
263
265
  # VALIDATE: Validate workflow name should not dynamic with params
264
266
  # template.
@@ -771,3 +773,217 @@ class Workflow(BaseModel):
771
773
  ).to_dict(),
772
774
  },
773
775
  )
776
+
777
+ def rerun(
778
+ self,
779
+ context: DictData,
780
+ *,
781
+ parent_run_id: Optional[str] = None,
782
+ event: Optional[Event] = None,
783
+ timeout: float = 3600,
784
+ max_job_parallel: int = 2,
785
+ ) -> Result:
786
+ """Re-Execute workflow with passing the error context data.
787
+
788
+ :param context: A context result that get the failed status.
789
+ :param parent_run_id: (Optional[str]) A parent workflow running ID.
790
+ :param event: (Event) An Event manager instance that use to cancel this
791
+ execution if it forces stopped by parent execution.
792
+ :param timeout: (float) A workflow execution time out in second unit
793
+ that use for limit time of execution and waiting job dependency.
794
+ This value does not force stop the task that still running more than
795
+ this limit time. (Default: 60 * 60 seconds)
796
+ :param max_job_parallel: (int) The maximum workers that use for job
797
+ execution in `ThreadPoolExecutor` object. (Default: 2 workers)
798
+
799
+ :rtype: Result
800
+ """
801
+ ts: float = time.monotonic()
802
+
803
+ result: Result = Result.construct_with_rs_or_id(
804
+ parent_run_id=parent_run_id,
805
+ id_logic=self.name,
806
+ extras=self.extras,
807
+ )
808
+ if context["status"] == SUCCESS:
809
+ result.trace.info(
810
+ "[WORKFLOW]: Does not rerun because it already executed with "
811
+ "success status."
812
+ )
813
+ return result.catch(status=SUCCESS, context=context)
814
+
815
+ err = context["errors"]
816
+ result.trace.info(f"[WORKFLOW]: Previous error: {err}")
817
+
818
+ event: Event = event or Event()
819
+ max_job_parallel: int = dynamic(
820
+ "max_job_parallel", f=max_job_parallel, extras=self.extras
821
+ )
822
+ result.trace.info(
823
+ f"[WORKFLOW]: Execute: {self.name!r} ("
824
+ f"{'parallel' if max_job_parallel > 1 else 'sequential'} jobs)"
825
+ )
826
+ if not self.jobs:
827
+ result.trace.warning(f"[WORKFLOW]: {self.name!r} does not set jobs")
828
+ return result.catch(status=SUCCESS, context=context)
829
+
830
+ # NOTE: Prepare the new context for rerun process.
831
+ jobs: DictData = context.get("jobs")
832
+ new_context: DictData = {
833
+ "params": context["params"].copy(),
834
+ "jobs": {j: jobs[j] for j in jobs if jobs[j]["status"] == SUCCESS},
835
+ }
836
+
837
+ total_job: int = 0
838
+ job_queue: Queue = Queue()
839
+ for job_id in self.jobs:
840
+
841
+ if job_id in new_context["jobs"]:
842
+ continue
843
+
844
+ job_queue.put(job_id)
845
+ total_job += 1
846
+
847
+ if total_job == 0:
848
+ result.trace.warning("[WORKFLOW]: It does not have job to rerun.")
849
+ return result.catch(status=SUCCESS, context=context)
850
+
851
+ not_timeout_flag: bool = True
852
+ statuses: list[Status] = [WAIT] * total_job
853
+ skip_count: int = 0
854
+ sequence_statuses: list[Status] = []
855
+ timeout: float = dynamic(
856
+ "max_job_exec_timeout", f=timeout, extras=self.extras
857
+ )
858
+
859
+ result.catch(status=WAIT, context=new_context)
860
+ if event and event.is_set():
861
+ return result.catch(
862
+ status=CANCEL,
863
+ context={
864
+ "errors": WorkflowCancelError(
865
+ "Execution was canceled from the event was set before "
866
+ "workflow execution."
867
+ ).to_dict(),
868
+ },
869
+ )
870
+
871
+ with ThreadPoolExecutor(max_job_parallel, "wf") as executor:
872
+ futures: list[Future] = []
873
+
874
+ while not job_queue.empty() and (
875
+ not_timeout_flag := ((time.monotonic() - ts) < timeout)
876
+ ):
877
+ job_id: str = job_queue.get()
878
+ job: Job = self.job(name=job_id)
879
+ if (check := job.check_needs(new_context["jobs"])) == WAIT:
880
+ job_queue.task_done()
881
+ job_queue.put(job_id)
882
+ time.sleep(0.15)
883
+ continue
884
+ elif check == FAILED: # pragma: no cov
885
+ return result.catch(
886
+ status=FAILED,
887
+ context={
888
+ "status": FAILED,
889
+ "errors": WorkflowError(
890
+ f"Validate job trigger rule was failed with "
891
+ f"{job.trigger_rule.value!r}."
892
+ ).to_dict(),
893
+ },
894
+ )
895
+ elif check == SKIP: # pragma: no cov
896
+ result.trace.info(
897
+ f"[JOB]: Skip job: {job_id!r} from trigger rule."
898
+ )
899
+ job.set_outputs(output={"status": SKIP}, to=new_context)
900
+ job_queue.task_done()
901
+ skip_count += 1
902
+ continue
903
+
904
+ if max_job_parallel > 1:
905
+ futures.append(
906
+ executor.submit(
907
+ self.execute_job,
908
+ job=job,
909
+ params=new_context,
910
+ result=result,
911
+ event=event,
912
+ ),
913
+ )
914
+ job_queue.task_done()
915
+ continue
916
+
917
+ if len(futures) < 1:
918
+ futures.append(
919
+ executor.submit(
920
+ self.execute_job,
921
+ job=job,
922
+ params=new_context,
923
+ result=result,
924
+ event=event,
925
+ )
926
+ )
927
+ elif (future := futures.pop(0)).done():
928
+ if e := future.exception():
929
+ sequence_statuses.append(get_status_from_error(e))
930
+ else:
931
+ st, _ = future.result()
932
+ sequence_statuses.append(st)
933
+ job_queue.put(job_id)
934
+ elif future.cancelled():
935
+ sequence_statuses.append(CANCEL)
936
+ job_queue.put(job_id)
937
+ elif future.running() or "state=pending" in str(future):
938
+ futures.insert(0, future)
939
+ job_queue.put(job_id)
940
+ else: # pragma: no cov
941
+ job_queue.put(job_id)
942
+ futures.insert(0, future)
943
+ result.trace.warning(
944
+ f"[WORKFLOW]: ... Execution non-threading not "
945
+ f"handle: {future}."
946
+ )
947
+
948
+ job_queue.task_done()
949
+
950
+ if not_timeout_flag:
951
+ job_queue.join()
952
+ for total, future in enumerate(as_completed(futures), start=0):
953
+ try:
954
+ statuses[total], _ = future.result()
955
+ except WorkflowError as e:
956
+ statuses[total] = get_status_from_error(e)
957
+
958
+ # NOTE: Update skipped status from the job trigger.
959
+ for i in range(skip_count):
960
+ statuses[total + 1 + i] = SKIP
961
+
962
+ # NOTE: Update status from none-parallel job execution.
963
+ for i, s in enumerate(sequence_statuses, start=0):
964
+ statuses[total + 1 + skip_count + i] = s
965
+
966
+ return result.catch(
967
+ status=validate_statuses(statuses), context=new_context
968
+ )
969
+
970
+ event.set()
971
+ for future in futures:
972
+ future.cancel()
973
+
974
+ result.trace.error(
975
+ f"[WORKFLOW]: {self.name!r} was timeout because it use exec "
976
+ f"time more than {timeout} seconds."
977
+ )
978
+
979
+ time.sleep(0.0025)
980
+
981
+ return result.catch(
982
+ status=FAILED,
983
+ context={
984
+ "errors": WorkflowTimeoutError(
985
+ f"{self.name!r} was timeout because it use exec time more "
986
+ f"than {timeout} seconds."
987
+ ).to_dict(),
988
+ },
989
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.69
3
+ Version: 0.0.70
4
4
  Summary: Lightweight workflow orchestration with YAML template
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -1,30 +1,30 @@
1
- ddeutil/workflow/__about__.py,sha256=wwXPhnyUKhGfDURjazLxeC2yPB5gr3G3n0ZD73ZnhYY,28
1
+ ddeutil/workflow/__about__.py,sha256=hpI1C5z35dcwfGMtSFmLCLv6edKv0tP9m0z-1ytAu0Y,28
2
2
  ddeutil/workflow/__cron.py,sha256=BOKQcreiex0SAigrK1gnLxpvOeF3aca_rQwyz9Kfve4,28751
3
3
  ddeutil/workflow/__init__.py,sha256=HUy9XkBe7ttpUupJS4JDuj3aGp2QmJZfz8m2kHAIwdw,927
4
4
  ddeutil/workflow/__main__.py,sha256=Qd-f8z2Q2vpiEP2x6PBFsJrpACWDVxFKQk820MhFmHo,59
5
5
  ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
6
6
  ddeutil/workflow/audits.py,sha256=1pg4a5wdZCAKOqMr1Z_ofzRAFsDarN1BIJenWwn9xkg,11435
7
- ddeutil/workflow/cli.py,sha256=ems4Ewc2UGmjUwPjbobVsP3igfBZcz6Crf3PQM15w4E,3449
8
- ddeutil/workflow/conf.py,sha256=ct7CtebGhWfXk6509CZsKh45hkWMzTD5cVghmINa0Hk,13577
9
- ddeutil/workflow/errors.py,sha256=4DaKnyUm8RrUyQA5qakgW0ycSQLO7j-owyoh79LWQ5c,2893
7
+ ddeutil/workflow/cli.py,sha256=pFmSmkdQUIpmCbFcoyoBub2LAj3rtlFM3oToJq0C0Ac,5149
8
+ ddeutil/workflow/conf.py,sha256=KcvOlU0zzS53iK44X_T64qHSX9nr4EHGjGX31dsRiUE,13609
9
+ ddeutil/workflow/errors.py,sha256=O5rq80Sqj0QMeIsWXpRUhiFLTq0o8bwm5BQ4kuq6xmI,3013
10
10
  ddeutil/workflow/event.py,sha256=e3xcECfMvH6K8Tff9cjCXIItVJjOmlonAQ0l253l6T0,11110
11
- ddeutil/workflow/job.py,sha256=qcbKSOa39256nfJHL0vKJsHrelcRujX5KET2IEGS8dw,38995
11
+ ddeutil/workflow/job.py,sha256=kviOQeSUsx0Z7CL0foblTulg2m_l6a3M3SMRxg9RWeg,39151
12
12
  ddeutil/workflow/params.py,sha256=Pco3DyjptC5Jkx53dhLL9xlIQdJvNAZs4FLzMUfXpbQ,12402
13
13
  ddeutil/workflow/result.py,sha256=ctxNSaY9tZPHEAUgvDkjWWu2APeTmlZCf1Hb0XVbbFo,8173
14
- ddeutil/workflow/reusables.py,sha256=jPrOCbxagqRvRFGXJzIyDa1wKV5AZ4crZyJ10cldQP0,21620
14
+ ddeutil/workflow/reusables.py,sha256=LSn0XTkzGHf4ulOmWub29F0JZHt0NEyzrFd4ZFx_g_k,21622
15
15
  ddeutil/workflow/stages.py,sha256=kzMEMRTEuG52EOw51zyVO6LE-oiiqTIRUCk_OMcWZTM,106506
16
- ddeutil/workflow/traces.py,sha256=GranGEgizx-R6wDJjVjkoCNVHlgOc6-wHi9zD5DH3D8,24059
17
- ddeutil/workflow/utils.py,sha256=oKrhB-HOogeaO9RGXbe2vAs30A3rMMQxUd2B5pOw8zg,9131
18
- ddeutil/workflow/workflow.py,sha256=jcSwOTaigVgiHElmlM1iK3g6rDMDd5PGni0ZcIxgH2U,27859
16
+ ddeutil/workflow/traces.py,sha256=DrKzxgp9FJUJd_oWYHiMtR1z8R7HeweAL4_Y8q0_pz8,25048
17
+ ddeutil/workflow/utils.py,sha256=65aMH2JKeeG7GLLoD0D5_0Cv55XV5EUd1Gn-Zz21hwo,9339
18
+ ddeutil/workflow/workflow.py,sha256=nPjNbn0UUKlUwkh7lHFRb5BSLARBOTZ0iv0TU_SMOKc,36205
19
19
  ddeutil/workflow/api/__init__.py,sha256=W3fe6_NLHSUzr4Tsu79w3pmvrYjpLeP3zBk4mtpPyqg,2843
20
20
  ddeutil/workflow/api/log_conf.py,sha256=WfS3udDLSyrP-C80lWOvxxmhd_XWKvQPkwDqKblcH3E,1834
21
21
  ddeutil/workflow/api/routes/__init__.py,sha256=JRaJZB0D6mgR17MbZo8yLtdYDtD62AA8MdKlFqhG84M,420
22
22
  ddeutil/workflow/api/routes/job.py,sha256=x809G5gCbJS257txj9eLLTbCbFK8ercXWzPDLuv5gEM,2953
23
23
  ddeutil/workflow/api/routes/logs.py,sha256=HiXw93PeIiaK_xJjM8lbD2ED1Il-W1iM51085nc7qmg,5286
24
24
  ddeutil/workflow/api/routes/workflows.py,sha256=D76cdLb2_9Dkfe2_8xt06CvPhAyJMqxyYkUgAs8Qlnw,4402
25
- ddeutil_workflow-0.0.69.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
26
- ddeutil_workflow-0.0.69.dist-info/METADATA,sha256=wXzNozF5KKoYdTX_JuFz3qOla_YCKjrXCTd7wFd-kb4,15207
27
- ddeutil_workflow-0.0.69.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
28
- ddeutil_workflow-0.0.69.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
29
- ddeutil_workflow-0.0.69.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
- ddeutil_workflow-0.0.69.dist-info/RECORD,,
25
+ ddeutil_workflow-0.0.70.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
26
+ ddeutil_workflow-0.0.70.dist-info/METADATA,sha256=EddcIOkUz36vyIOG4GEOnBt2Zy_K8qI7_Gld5KTbeLs,15207
27
+ ddeutil_workflow-0.0.70.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
28
+ ddeutil_workflow-0.0.70.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
29
+ ddeutil_workflow-0.0.70.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
+ ddeutil_workflow-0.0.70.dist-info/RECORD,,