ddeutil-workflow 0.0.81__py3-none-any.whl → 0.0.82__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,16 @@ from inspect import Traceback, currentframe, getframeinfo
29
29
  from pathlib import Path
30
30
  from threading import Lock, get_ident
31
31
  from types import FrameType
32
- from typing import Annotated, Any, ClassVar, Final, Literal, Optional, Union
32
+ from typing import (
33
+ Annotated,
34
+ Any,
35
+ ClassVar,
36
+ Final,
37
+ Literal,
38
+ Optional,
39
+ TypeVar,
40
+ Union,
41
+ )
33
42
  from zoneinfo import ZoneInfo
34
43
 
35
44
  from pydantic import BaseModel, Field, PrivateAttr
@@ -86,9 +95,10 @@ PREFIX_LOGS: Final[dict[str, dict]] = {
86
95
  "emoji": "⚙️",
87
96
  "desc": "logs from any usage from custom caller function.",
88
97
  },
98
+ "NESTED": {"emoji": "⛓️", "desc": "logs from stages module."},
89
99
  "STAGE": {"emoji": "🔗", "desc": "logs from stages module."},
90
- "JOB": {"emoji": "⛓️", "desc": "logs from job module."},
91
- "WORKFLOW": {"emoji": "🏃", "desc": "logs from workflow module."},
100
+ "JOB": {"emoji": "🏗", "desc": "logs from job module."},
101
+ "WORKFLOW": {"emoji": "👟", "desc": "logs from workflow module."},
92
102
  "RELEASE": {"emoji": "📅", "desc": "logs from release workflow method."},
93
103
  "POKING": {"emoji": "⏰", "desc": "logs from poke workflow method."},
94
104
  "AUDIT": {"emoji": "📌", "desc": "logs from audit model."},
@@ -427,6 +437,9 @@ class BaseHandler(BaseModel, ABC):
427
437
  self, metadata: list[Metadata], *, extra: Optional[DictData] = None
428
438
  ) -> None: ...
429
439
 
440
+ def pre(self) -> None:
441
+ """Pre-process of handler that will execute when start create trance."""
442
+
430
443
 
431
444
  class ConsoleHandler(BaseHandler):
432
445
  """Console Handler model."""
@@ -460,14 +473,20 @@ class FileHandler(BaseHandler):
460
473
  metadata_filename: ClassVar[str] = "metadata.txt"
461
474
 
462
475
  type: Literal["file"] = "file"
463
- path: str = Field(description="A file path.")
476
+ path: str = Field(
477
+ description=(
478
+ "A file path that use to save all trace log files that include "
479
+ "stdout, stderr, and metadata."
480
+ )
481
+ )
464
482
  format: str = Field(
465
483
  default=(
466
484
  "{datetime} ({process:5d}, {thread:5d}) ({cut_id}) {message:120s} "
467
485
  "({filename}:{lineno})"
468
- )
486
+ ),
487
+ description="A trace log format that write on stdout and stderr files.",
469
488
  )
470
- buffer_size: int = 8192
489
+ buffer_size: int = Field(default=8192)
471
490
 
472
491
  # NOTE: Private attrs for the internal process.
473
492
  _lock: Lock = PrivateAttr(default_factory=Lock)
@@ -488,7 +507,9 @@ class FileHandler(BaseHandler):
488
507
  log_file.mkdir(parents=True)
489
508
  return log_file
490
509
 
491
- def pre(self) -> None: ...
510
+ def pre(self) -> None: # pragma: no cov
511
+ if not (p := Path(self.path)).exists():
512
+ p.mkdir(parents=True)
492
513
 
493
514
  def emit(
494
515
  self,
@@ -496,6 +517,7 @@ class FileHandler(BaseHandler):
496
517
  *,
497
518
  extra: Optional[DictData] = None,
498
519
  ) -> None:
520
+ """Emit trace log."""
499
521
  pointer: Path = self.pointer(metadata.pointer_id)
500
522
  std_file = "stderr" if metadata.error_flag else "stdout"
501
523
  with self._lock:
@@ -518,7 +540,9 @@ class FileHandler(BaseHandler):
518
540
  try:
519
541
  import aiofiles
520
542
  except ImportError as e:
521
- raise ImportError("Async mode need aiofiles package") from e
543
+ raise ImportError(
544
+ "Async mode need to install `aiofiles` package first"
545
+ ) from e
522
546
 
523
547
  with self._lock:
524
548
  pointer: Path = self.pointer(metadata.pointer_id)
@@ -538,6 +562,7 @@ class FileHandler(BaseHandler):
538
562
  def flush(
539
563
  self, metadata: list[Metadata], *, extra: Optional[DictData] = None
540
564
  ) -> None:
565
+ """Flush logs."""
541
566
  with self._lock:
542
567
  pointer: Path = self.pointer(metadata[0].pointer_id)
543
568
  stdout_file = open(
@@ -613,7 +638,7 @@ class FileHandler(BaseHandler):
613
638
  """Find trace logs.
614
639
 
615
640
  Args:
616
- path: A trace path that want to find.
641
+ path (Path | None, default None): A trace path that want to find.
617
642
  """
618
643
  for file in sorted(
619
644
  (path or Path(self.path)).glob("./run_id=*"),
@@ -634,6 +659,9 @@ class FileHandler(BaseHandler):
634
659
  run_id: A running ID of trace log.
635
660
  force_raise: Whether to raise an exception if not found.
636
661
  path: Optional path override.
662
+
663
+ Returns:
664
+ TraceData: A TranceData instance that already passed searching data.
637
665
  """
638
666
  base_path: Path = path or self.path
639
667
  file: Path = base_path / f"run_id={run_id}"
@@ -757,7 +785,8 @@ class SQLiteHandler(BaseHandler): # pragma: no cov
757
785
  metadata: Metadata,
758
786
  *,
759
787
  extra: Optional[DictData] = None,
760
- ) -> None: ...
788
+ ) -> None:
789
+ raise NotImplementedError("Does not implement async emit yet.")
761
790
 
762
791
  def flush(
763
792
  self, metadata: list[Metadata], *, extra: Optional[DictData] = None
@@ -1506,7 +1535,6 @@ class ElasticHandler(BaseHandler): # pragma: no cov
1506
1535
  try:
1507
1536
  from elasticsearch import Elasticsearch
1508
1537
 
1509
- # Create client
1510
1538
  client = Elasticsearch(
1511
1539
  hosts=es_hosts if isinstance(es_hosts, list) else [es_hosts],
1512
1540
  basic_auth=(
@@ -1653,8 +1681,6 @@ class ElasticHandler(BaseHandler): # pragma: no cov
1653
1681
 
1654
1682
  for hit in response["hits"]["hits"]:
1655
1683
  source = hit["_source"]
1656
-
1657
- # Convert to TraceMeta
1658
1684
  trace_meta = Metadata(
1659
1685
  run_id=source["run_id"],
1660
1686
  parent_run_id=source["parent_run_id"],
@@ -1724,6 +1750,7 @@ class ElasticHandler(BaseHandler): # pragma: no cov
1724
1750
  return TraceData(stdout="", stderr="")
1725
1751
 
1726
1752
 
1753
+ Handler = TypeVar("Handler", bound=BaseHandler)
1727
1754
  TraceHandler = Annotated[
1728
1755
  Union[
1729
1756
  ConsoleHandler,
@@ -1866,7 +1893,7 @@ class BaseAsyncEmit(ABC):
1866
1893
  await self.amit(msg, level="exception")
1867
1894
 
1868
1895
 
1869
- class TraceManager(BaseModel, BaseEmit, BaseAsyncEmit):
1896
+ class Trace(BaseModel, BaseEmit, BaseAsyncEmit):
1870
1897
  """Trace Manager model that keep all trance handler and emit log to its
1871
1898
  handler.
1872
1899
  """
@@ -1955,7 +1982,7 @@ class TraceManager(BaseModel, BaseEmit, BaseAsyncEmit):
1955
1982
  any logging level.
1956
1983
 
1957
1984
  Args:
1958
- msg: A message that want to log.
1985
+ msg (str): A message that want to log.
1959
1986
  level (Level): A logging mode.
1960
1987
  """
1961
1988
  _msg: str = self.make_message(msg)
@@ -2005,10 +2032,12 @@ class TraceManager(BaseModel, BaseEmit, BaseAsyncEmit):
2005
2032
  def get_trace(
2006
2033
  run_id: str,
2007
2034
  *,
2035
+ handlers: list[Union[DictData, Handler]] = None,
2008
2036
  parent_run_id: Optional[str] = None,
2009
2037
  extras: Optional[DictData] = None,
2010
- ) -> TraceManager:
2011
- """Get dynamic TraceManager instance from the core config.
2038
+ auto_pre_process: bool = False,
2039
+ ) -> Trace:
2040
+ """Get dynamic Trace instance from the core config.
2012
2041
 
2013
2042
  This factory function returns the appropriate trace implementation based on
2014
2043
  configuration. It can be overridden by extras argument and accepts running ID
@@ -2017,16 +2046,27 @@ def get_trace(
2017
2046
  Args:
2018
2047
  run_id (str): A running ID.
2019
2048
  parent_run_id (str | None, default None): A parent running ID.
2049
+ handlers:
2020
2050
  extras: An extra parameter that want to override the core
2021
2051
  config values.
2052
+ auto_pre_process (bool, default False)
2022
2053
 
2023
2054
  Returns:
2024
- TraceManager: The appropriate trace instance.
2055
+ Trace: The appropriate trace instance.
2025
2056
  """
2026
- handlers = dynamic("trace_handlers", extras=extras)
2027
- return TraceManager(
2028
- run_id=run_id,
2029
- parent_run_id=parent_run_id,
2030
- handlers=handlers,
2031
- extras=extras or {},
2057
+ handlers: list[DictData] = dynamic(
2058
+ "trace_handlers", f=handlers, extras=extras
2059
+ )
2060
+ trace = Trace.model_validate(
2061
+ {
2062
+ "run_id": run_id,
2063
+ "parent_run_id": parent_run_id,
2064
+ "handlers": handlers,
2065
+ "extras": extras or {},
2066
+ }
2032
2067
  )
2068
+ # NOTE: Start pre-process when start create trace.
2069
+ if auto_pre_process:
2070
+ for handler in trace.handlers:
2071
+ handler.pre()
2072
+ return trace
ddeutil/workflow/utils.py CHANGED
@@ -218,7 +218,10 @@ def gen_id(
218
218
  hashing value length to 10 if simple mode is enabled.
219
219
 
220
220
  Simple Mode Format:
221
- YYYYMMDDHHMMSSffffffTxxxxxxxxxx
221
+
222
+ The format of ID include full datetime and hashing identity.
223
+
224
+ YYYY MM DD HH MM SS ffffff T **********
222
225
  year month day hour minute second microsecond sep simple-id
223
226
 
224
227
  Args:
@@ -318,7 +321,7 @@ def cross_product(matrix: Matrix) -> Iterator[DictData]:
318
321
  )
319
322
 
320
323
 
321
- def cut_id(run_id: str, *, num: int = 6) -> str:
324
+ def cut_id(run_id: str, *, num: int = 8) -> str:
322
325
  """Cut running ID to specified length.
323
326
 
324
327
  Example:
@@ -334,8 +337,8 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
334
337
  """
335
338
  if "T" in run_id:
336
339
  dt, simple = run_id.split("T", maxsplit=1)
337
- return dt[:12] + simple[-num:]
338
- return run_id[:12] + run_id[-num:]
340
+ return dt[10:20] + simple[-num:]
341
+ return run_id[-num:]
339
342
 
340
343
 
341
344
  @overload
@@ -30,19 +30,18 @@ from concurrent.futures import (
30
30
  as_completed,
31
31
  )
32
32
  from datetime import datetime
33
- from enum import Enum
34
33
  from pathlib import Path
35
34
  from queue import Queue
36
35
  from textwrap import dedent
37
36
  from threading import Event as ThreadEvent
38
- from typing import Any, Optional, Union
37
+ from typing import Any, Literal, Optional, Union
39
38
 
40
39
  from pydantic import BaseModel, Field
41
40
  from pydantic.functional_validators import field_validator, model_validator
42
41
  from typing_extensions import Self
43
42
 
44
43
  from .__types import DictData
45
- from .audits import Audit, get_audit
44
+ from .audits import NORMAL, RERUN, Audit, ReleaseType, get_audit
46
45
  from .conf import YamlParser, dynamic
47
46
  from .errors import WorkflowCancelError, WorkflowError, WorkflowTimeoutError
48
47
  from .event import Event
@@ -61,7 +60,7 @@ from .result import (
61
60
  validate_statuses,
62
61
  )
63
62
  from .reusables import has_template, param2template
64
- from .traces import TraceManager, get_trace
63
+ from .traces import Trace, get_trace
65
64
  from .utils import (
66
65
  UTC,
67
66
  gen_id,
@@ -70,31 +69,6 @@ from .utils import (
70
69
  )
71
70
 
72
71
 
73
- class ReleaseType(str, Enum):
74
- """Release type enumeration for workflow execution modes.
75
-
76
- This enum defines the different types of workflow releases that can be
77
- triggered, each with specific behavior and use cases.
78
-
79
- Attributes:
80
- NORMAL: Standard workflow release execution
81
- RERUN: Re-execution of previously failed workflow
82
- EVENT: Event-triggered workflow execution
83
- FORCE: Forced execution bypassing normal conditions
84
- """
85
-
86
- NORMAL = "normal"
87
- RERUN = "rerun"
88
- EVENT = "event"
89
- FORCE = "force"
90
-
91
-
92
- NORMAL = ReleaseType.NORMAL
93
- RERUN = ReleaseType.RERUN
94
- EVENT = ReleaseType.EVENT
95
- FORCE = ReleaseType.FORCE
96
-
97
-
98
72
  class Workflow(BaseModel):
99
73
  """Main workflow orchestration model for job and schedule management.
100
74
 
@@ -113,17 +87,6 @@ class Workflow(BaseModel):
113
87
  on (list[Crontab]): Schedule definitions using cron expressions
114
88
  jobs (dict[str, Job]): Collection of jobs within this workflow
115
89
 
116
- Example:
117
- Create and execute a workflow:
118
-
119
- ```python
120
- workflow = Workflow.from_conf('my-workflow')
121
- result = workflow.execute({
122
- 'param1': 'value1',
123
- 'param2': 'value2'
124
- })
125
- ```
126
-
127
90
  Note:
128
91
  Workflows can be executed immediately or scheduled for background
129
92
  execution using the cron-like scheduling system.
@@ -134,6 +97,7 @@ class Workflow(BaseModel):
134
97
  description="An extra parameters that want to override config values.",
135
98
  )
136
99
  name: str = Field(description="A workflow name.")
100
+ type: Literal["Workflow"] = Field(default="workflow")
137
101
  desc: Optional[str] = Field(
138
102
  default=None,
139
103
  description=(
@@ -198,10 +162,10 @@ class Workflow(BaseModel):
198
162
  FileNotFoundError: If workflow configuration file not found
199
163
 
200
164
  Example:
201
- >>> # Load from default config path
165
+ >>> # NOTE: Load from default config path
202
166
  >>> workflow = Workflow.from_conf('data-pipeline')
203
167
 
204
- >>> # Load with custom path and extras
168
+ >>> # NOTE: Load with custom path and extras
205
169
  >>> workflow = Workflow.from_conf(
206
170
  ... 'data-pipeline',
207
171
  ... path=Path('./custom-configs'),
@@ -211,7 +175,6 @@ class Workflow(BaseModel):
211
175
  load: YamlParser = YamlParser(name, path=path, extras=extras, obj=cls)
212
176
  data: DictData = copy.deepcopy(load.data)
213
177
  data["name"] = name
214
-
215
178
  if extras:
216
179
  data["extras"] = extras
217
180
 
@@ -287,7 +250,11 @@ class Workflow(BaseModel):
287
250
  return self.model_dump(by_alias=True)
288
251
 
289
252
  def md(self, author: Optional[str] = None) -> str: # pragma: no cov
290
- """Generate the markdown template."""
253
+ """Generate the markdown template from this Workflow model data.
254
+
255
+ Args:
256
+ author (str | None, default None): An author name.
257
+ """
291
258
 
292
259
  def align_newline(value: str) -> str:
293
260
  return value.rstrip("\n").replace("\n", "\n ")
@@ -437,7 +404,7 @@ class Workflow(BaseModel):
437
404
  override_log_name: Optional[str] = None,
438
405
  timeout: int = 600,
439
406
  audit_excluded: Optional[list[str]] = None,
440
- audit: type[Audit] = None,
407
+ audit: Audit = None,
441
408
  ) -> Result:
442
409
  """Release the workflow which is executes workflow with writing audit
443
410
  log tracking. The method is overriding parameter with the release
@@ -471,6 +438,7 @@ class Workflow(BaseModel):
471
438
  method.
472
439
  """
473
440
  name: str = override_log_name or self.name
441
+ audit: Audit = audit or get_audit(extras=self.extras)
474
442
 
475
443
  # NOTE: Generate the parent running ID with not None value.
476
444
  if run_id:
@@ -481,7 +449,15 @@ class Workflow(BaseModel):
481
449
  parent_run_id: str = run_id
482
450
 
483
451
  context: DictData = {"status": WAIT}
484
- trace: TraceManager = get_trace(
452
+ audit_data: DictData = {
453
+ "name": name,
454
+ "release": release,
455
+ "type": release_type,
456
+ "run_id": run_id,
457
+ "parent_run_id": parent_run_id,
458
+ "extras": self.extras,
459
+ }
460
+ trace: Trace = get_trace(
485
461
  run_id, parent_run_id=parent_run_id, extras=self.extras
486
462
  )
487
463
  release: datetime = self.validate_release(dt=release)
@@ -498,6 +474,25 @@ class Workflow(BaseModel):
498
474
  },
499
475
  extras=self.extras,
500
476
  )
477
+
478
+ if release_type == NORMAL and audit.is_pointed(data=audit_data):
479
+ trace.info("[RELEASE]: Skip this release because it already audit.")
480
+ return Result(
481
+ run_id=run_id,
482
+ parent_run_id=parent_run_id,
483
+ status=SKIP,
484
+ context=catch(context, status=SKIP),
485
+ extras=self.extras,
486
+ )
487
+
488
+ if release_type == RERUN:
489
+ # TODO: It will load previous audit and use this data to run with
490
+ # the `rerun` method.
491
+ raise NotImplementedError(
492
+ "Release does not support for rerun type yet. Please use the "
493
+ "`rerun` method instead."
494
+ )
495
+
501
496
  rs: Result = self.execute(
502
497
  params=values,
503
498
  run_id=parent_run_id,
@@ -507,15 +502,10 @@ class Workflow(BaseModel):
507
502
  trace.info(f"[RELEASE]: End {name!r} : {release:%Y-%m-%d %H:%M:%S}")
508
503
  trace.debug(f"[RELEASE]: Writing audit: {name!r}.")
509
504
  (
510
- (audit or get_audit(extras=self.extras)).save(
511
- data={
512
- "name": name,
513
- "release": release,
514
- "type": release_type,
505
+ audit.save(
506
+ data=audit_data
507
+ | {
515
508
  "context": context,
516
- "parent_run_id": parent_run_id,
517
- "run_id": run_id,
518
- "extras": self.extras,
519
509
  "runs_metadata": (
520
510
  (runs_metadata or {})
521
511
  | rs.info
@@ -579,7 +569,7 @@ class Workflow(BaseModel):
579
569
  Returns:
580
570
  tuple[Status, DictData]: The pair of status and result context data.
581
571
  """
582
- trace: TraceManager = get_trace(
572
+ trace: Trace = get_trace(
583
573
  run_id, parent_run_id=parent_run_id, extras=self.extras
584
574
  )
585
575
  if event and event.is_set():
@@ -695,8 +685,8 @@ class Workflow(BaseModel):
695
685
  """
696
686
  ts: float = time.monotonic()
697
687
  parent_run_id: Optional[str] = run_id
698
- run_id: str = gen_id(self.name, extras=self.extras)
699
- trace: TraceManager = get_trace(
688
+ run_id: str = gen_id(self.name, unique=True, extras=self.extras)
689
+ trace: Trace = get_trace(
700
690
  run_id, parent_run_id=parent_run_id, extras=self.extras
701
691
  )
702
692
  context: DictData = self.parameterize(params)
@@ -733,6 +723,11 @@ class Workflow(BaseModel):
733
723
  )
734
724
  catch(context, status=WAIT)
735
725
  if event and event.is_set():
726
+ err_msg: str = (
727
+ "Execution was canceled from the event was set "
728
+ "before workflow execution."
729
+ )
730
+ trace.error(f"[WORKFLOW]: {err_msg}")
736
731
  return Result(
737
732
  run_id=run_id,
738
733
  parent_run_id=parent_run_id,
@@ -740,12 +735,7 @@ class Workflow(BaseModel):
740
735
  context=catch(
741
736
  context,
742
737
  status=CANCEL,
743
- updated={
744
- "errors": WorkflowCancelError(
745
- "Execution was canceled from the event was set "
746
- "before workflow execution."
747
- ).to_dict(),
748
- },
738
+ updated={"errors": WorkflowCancelError(err_msg).to_dict()},
749
739
  ),
750
740
  info={"execution_time": time.monotonic() - ts},
751
741
  extras=self.extras,
@@ -799,7 +789,7 @@ class Workflow(BaseModel):
799
789
  )
800
790
  elif check == SKIP: # pragma: no cov
801
791
  trace.info(
802
- f"[JOB]: Skip job: {job_id!r} from trigger rule."
792
+ f"[JOB]: ⏭️ Skip job: {job_id!r} from trigger rule."
803
793
  )
804
794
  job.set_outputs(output={"status": SKIP}, to=context)
805
795
  job_queue.task_done()
@@ -923,16 +913,17 @@ class Workflow(BaseModel):
923
913
  ) -> Result: # pragma: no cov
924
914
  """Re-Execute workflow with passing the error context data.
925
915
 
926
- :param context: A context result that get the failed status.
927
- :param run_id: (Optional[str]) A workflow running ID.
928
- :param event: (Event) An Event manager instance that use to cancel this
929
- execution if it forces stopped by parent execution.
930
- :param timeout: (float) A workflow execution time out in second unit
931
- that use for limit time of execution and waiting job dependency.
932
- This value does not force stop the task that still running more than
933
- this limit time. (Default: 60 * 60 seconds)
934
- :param max_job_parallel: (int) The maximum workers that use for job
935
- execution in `ThreadPoolExecutor` object. (Default: 2 workers)
916
+ Args:
917
+ context: A context result that get the failed status.
918
+ run_id: (Optional[str]) A workflow running ID.
919
+ event: (Event) An Event manager instance that use to cancel this
920
+ execution if it forces stopped by parent execution.
921
+ timeout: (float) A workflow execution time out in second unit
922
+ that use for limit time of execution and waiting job dependency.
923
+ This value does not force stop the task that still running more
924
+ than this limit time. (Default: 60 * 60 seconds)
925
+ max_job_parallel: (int) The maximum workers that use for job
926
+ execution in `ThreadPoolExecutor` object. (Default: 2 workers)
936
927
 
937
928
  Returns
938
929
  Result: Return Result object that create from execution context with
@@ -941,7 +932,7 @@ class Workflow(BaseModel):
941
932
  ts: float = time.monotonic()
942
933
  parent_run_id: str = run_id
943
934
  run_id: str = gen_id(self.name, extras=self.extras)
944
- trace: TraceManager = get_trace(
935
+ trace: Trace = get_trace(
945
936
  run_id, parent_run_id=parent_run_id, extras=self.extras
946
937
  )
947
938
  if context["status"] == SUCCESS:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.81
3
+ Version: 0.0.82
4
4
  Summary: Lightweight workflow orchestration with YAML template
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -0,0 +1,35 @@
1
+ ddeutil/workflow/__about__.py,sha256=GJmjvBMMhA0y8IwyPpPJLqLsuBWa6J90Pleir68QW5I,60
2
+ ddeutil/workflow/__cron.py,sha256=avOagaHl9xXOmizeRWm13cOrty9Tw0vRjFq-xoEgpAY,29167
3
+ ddeutil/workflow/__init__.py,sha256=elWSX2JPbjORH-CJIH_zR_nrdd8Xw95NjLK49R4Kqdg,3434
4
+ ddeutil/workflow/__main__.py,sha256=Nqk5aO-HsZVKV2BmuJYeJEufJluipvCD9R1k2kMoJ3Y,8581
5
+ ddeutil/workflow/__types.py,sha256=tA2vsr6mzTSzbWB1sb62c5GgxODlfVRz6FvgLNJtQao,4788
6
+ ddeutil/workflow/audits.py,sha256=YPnWQYvhILPwPS6RhYKK0OG3lCeYFLuUNBxJvMlXf5w,26109
7
+ ddeutil/workflow/conf.py,sha256=VfPmwaBYEgOj8bu4eim13ayZwJ4Liy7I702aQf7vS8g,17644
8
+ ddeutil/workflow/errors.py,sha256=Rqtuf1MGxA-hKGP5wMAkaeeayst-u4P2dX6Fp_pzbsA,5678
9
+ ddeutil/workflow/event.py,sha256=qm7QHw-Pozm6oIUzAIxpDkPzzVZVtHgJIUlIle0vEfQ,13943
10
+ ddeutil/workflow/job.py,sha256=WHWOVz0ErOUfbN_aqpDeNmvBvpbKhcFlzcvJmlCpJuI,48430
11
+ ddeutil/workflow/params.py,sha256=y9f6DEIyae1j4awbj3Kbeq75-U2UPFlKv9K57Hdo_Go,17188
12
+ ddeutil/workflow/result.py,sha256=3Lpyv2Jn6T1Uc-lRbweDucSCoBr0ZByHjffKj14bj6s,9492
13
+ ddeutil/workflow/reusables.py,sha256=SBLJSxR8ELoWJErBfSMZS3Rr1O_93T-fFBpfn2AvxuA,25007
14
+ ddeutil/workflow/stages.py,sha256=CCR_D6yqVo74PuMxfqhi8GeeAq8sRbyxReHob6yxrjI,123708
15
+ ddeutil/workflow/traces.py,sha256=YN4XuRfQK523cNy8EVgz2iPh6s6WB865K9JezCNdM7E,74637
16
+ ddeutil/workflow/utils.py,sha256=vQwFu-wPK-lDiX2L8AZIahCkKEF6I0MCrZ1LlP8xkoQ,12011
17
+ ddeutil/workflow/workflow.py,sha256=VN8i0mVyuCUw1kk_CqLkN-8dWLON9i4vTLbHp6lyS-s,42961
18
+ ddeutil/workflow/api/__init__.py,sha256=5DzYL3ngceoRshh5HYCSVWChqNJSiP01E1bEd8XxPi0,4799
19
+ ddeutil/workflow/api/log_conf.py,sha256=WfS3udDLSyrP-C80lWOvxxmhd_XWKvQPkwDqKblcH3E,1834
20
+ ddeutil/workflow/api/routes/__init__.py,sha256=JRaJZB0D6mgR17MbZo8yLtdYDtD62AA8MdKlFqhG84M,420
21
+ ddeutil/workflow/api/routes/job.py,sha256=-lbZ_hS9pEdSy6zeke5qrXEgdNxtQ2w9in7cHuM2Jzs,2536
22
+ ddeutil/workflow/api/routes/logs.py,sha256=9jiYsw8kepud4n3NyXB7SAr2OoQwRn5uNb9kIZ58XJM,3806
23
+ ddeutil/workflow/api/routes/workflows.py,sha256=0pEZEsIrscRFBXG9gf6nttKw0aNbcdw7NsAZKLoKWtk,4392
24
+ ddeutil/workflow/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
+ ddeutil/workflow/plugins/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ ddeutil/workflow/plugins/providers/aws.py,sha256=61uIFBEWt-_D5Sui24qUPier1Hiqlw_RP_eY-rXBCKc,31551
27
+ ddeutil/workflow/plugins/providers/az.py,sha256=o3dh011lEtmr7-d7FPZJPgXdT0ytFzKfc5xnVxSyXGU,34867
28
+ ddeutil/workflow/plugins/providers/container.py,sha256=DSN0RWxMjTJN5ANheeMauDaPa3X6Z2E1eGUcctYkENw,22134
29
+ ddeutil/workflow/plugins/providers/gcs.py,sha256=KgAOdMBvdbMLTH_z_FwVriBFtZfKEYx8_34jzUOVjTY,27460
30
+ ddeutil_workflow-0.0.82.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
31
+ ddeutil_workflow-0.0.82.dist-info/METADATA,sha256=lB4PBeyneqHLwG0S4Lt_Xz8eCz4Lxn2bkDDHIFcBpxE,16087
32
+ ddeutil_workflow-0.0.82.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
33
+ ddeutil_workflow-0.0.82.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
34
+ ddeutil_workflow-0.0.82.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
35
+ ddeutil_workflow-0.0.82.dist-info/RECORD,,