ddeutil-workflow 0.0.31__py3-none-any.whl → 0.0.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -43,7 +43,8 @@ from typing_extensions import Self
43
43
 
44
44
  from .__cron import CronJob, CronRunner
45
45
  from .__types import DictData, TupleStr
46
- from .conf import Loader, Log, config, get_log, get_logger
46
+ from .audit import Audit, get_audit
47
+ from .conf import Loader, config, get_logger
47
48
  from .cron import On
48
49
  from .exceptions import JobException, WorkflowException
49
50
  from .job import Job
@@ -485,7 +486,7 @@ class Workflow(BaseModel):
485
486
  params: DictData,
486
487
  *,
487
488
  run_id: str | None = None,
488
- log: type[Log] = None,
489
+ log: type[Audit] = None,
489
490
  queue: ReleaseQueue | None = None,
490
491
  override_log_name: str | None = None,
491
492
  ) -> Result:
@@ -515,7 +516,7 @@ class Workflow(BaseModel):
515
516
 
516
517
  :rtype: Result
517
518
  """
518
- log: type[Log] = log or get_log()
519
+ log: type[Audit] = log or get_audit()
519
520
  name: str = override_log_name or self.name
520
521
  run_id: str = run_id or gen_id(name, unique=True)
521
522
  rs_release: Result = Result(run_id=run_id)
@@ -562,15 +563,14 @@ class Workflow(BaseModel):
562
563
  # NOTE: Saving execution result to destination of the input log object.
563
564
  logger.debug(f"({cut_id(run_id)}) [LOG]: Writing log: {name!r}.")
564
565
  (
565
- log.model_validate(
566
- {
567
- "name": name,
568
- "release": release.date,
569
- "type": release.type,
570
- "context": rs.context,
571
- "parent_run_id": rs.parent_run_id,
572
- "run_id": rs.run_id,
573
- }
566
+ log(
567
+ name=name,
568
+ release=release.date,
569
+ type=release.type,
570
+ context=rs.context,
571
+ parent_run_id=rs.parent_run_id,
572
+ run_id=rs.run_id,
573
+ execution_time=rs.alive_time(),
574
574
  ).save(excluded=None)
575
575
  )
576
576
 
@@ -602,7 +602,7 @@ class Workflow(BaseModel):
602
602
  offset: float,
603
603
  end_date: datetime,
604
604
  queue: ReleaseQueue,
605
- log: type[Log],
605
+ log: type[Audit],
606
606
  *,
607
607
  force_run: bool = False,
608
608
  ) -> ReleaseQueue:
@@ -671,7 +671,7 @@ class Workflow(BaseModel):
671
671
  *,
672
672
  run_id: str | None = None,
673
673
  periods: int = 1,
674
- log: Log | None = None,
674
+ log: Audit | None = None,
675
675
  force_run: bool = False,
676
676
  timeout: int = 1800,
677
677
  ) -> list[Result]:
@@ -698,7 +698,7 @@ class Workflow(BaseModel):
698
698
  :rtype: list[Result]
699
699
  :return: A list of all results that return from ``self.release`` method.
700
700
  """
701
- log: type[Log] = log or get_log()
701
+ log: type[Audit] = log or get_audit()
702
702
  run_id: str = run_id or gen_id(self.name, unique=True)
703
703
 
704
704
  # VALIDATE: Check the periods value should gather than 0.
@@ -820,6 +820,7 @@ class Workflow(BaseModel):
820
820
  *,
821
821
  run_id: str | None = None,
822
822
  raise_error: bool = True,
823
+ result: Result | None = None,
823
824
  ) -> Result:
824
825
  """Job execution with passing dynamic parameters from the main workflow
825
826
  execution to the target job object via job's ID.
@@ -837,13 +838,18 @@ class Workflow(BaseModel):
837
838
  :param run_id: A workflow running ID for this job execution.
838
839
  :param raise_error: A flag that raise error instead catching to result
839
840
  if it gets exception from job execution.
841
+ :param result: (Result) A result object for keeping context and status
842
+ data.
840
843
 
841
844
  :rtype: Result
842
845
  :return: Return the result object that receive the job execution result
843
846
  context.
844
847
  """
845
- run_id: str = run_id or gen_id(self.name, unique=True)
846
- rs: Result = Result(run_id=run_id)
848
+ if result is None: # pragma: no cov
849
+ run_id: str = run_id or gen_id(self.name, unique=True)
850
+ result: Result = Result(run_id=run_id)
851
+ else:
852
+ run_id: str = result.run_id
847
853
 
848
854
  # VALIDATE: check a job ID that exists in this workflow or not.
849
855
  if job_id not in self.jobs:
@@ -852,9 +858,7 @@ class Workflow(BaseModel):
852
858
  f"workflow."
853
859
  )
854
860
 
855
- logger.info(
856
- f"({cut_id(run_id)}) [WORKFLOW]: Start execute job: {job_id!r}"
857
- )
861
+ result.trace.info(f"[WORKFLOW]: Start execute job: {job_id!r}")
858
862
 
859
863
  # IMPORTANT:
860
864
  # This execution change all job running IDs to the current workflow
@@ -868,10 +872,7 @@ class Workflow(BaseModel):
868
872
  to=params,
869
873
  )
870
874
  except JobException as err:
871
- logger.error(
872
- f"({cut_id(run_id)}) [WORKFLOW]: {err.__class__.__name__}: "
873
- f"{err}"
874
- )
875
+ result.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
875
876
  if raise_error:
876
877
  raise WorkflowException(
877
878
  f"Get job execution error {job_id}: JobException: {err}"
@@ -880,7 +881,7 @@ class Workflow(BaseModel):
880
881
  "Handle error from the job execution does not support yet."
881
882
  ) from None
882
883
 
883
- return rs.catch(status=0, context=params)
884
+ return result.catch(status=0, context=params)
884
885
 
885
886
  def execute(
886
887
  self,
@@ -888,6 +889,7 @@ class Workflow(BaseModel):
888
889
  *,
889
890
  run_id: str | None = None,
890
891
  timeout: int = 0,
892
+ result: Result | None = None,
891
893
  ) -> Result:
892
894
  """Execute workflow with passing a dynamic parameters to all jobs that
893
895
  included in this workflow model with ``jobs`` field.
@@ -907,31 +909,32 @@ class Workflow(BaseModel):
907
909
 
908
910
  :param run_id: A workflow running ID for this job execution.
909
911
  :type run_id: str | None (default: None)
910
- :param timeout: A workflow execution time out in second unit that use
912
+ :param timeout: (int) A workflow execution time out in second unit that use
911
913
  for limit time of execution and waiting job dependency. This value
912
914
  does not force stop the task that still running more than this limit
913
- time.
914
- :type timeout: int (default: 0)
915
+ time. (default: 0)
916
+ :param result: (Result) A result object for keeping context and status
917
+ data.
915
918
 
916
919
  :rtype: Result
917
920
  """
918
- run_id: str = run_id or gen_id(self.name, unique=True)
919
- logger.info(
920
- f"({cut_id(run_id)}) [WORKFLOW]: Start Execute: {self.name!r} ..."
921
- )
922
-
923
921
  # NOTE: I use this condition because this method allow passing empty
924
922
  # params and I do not want to create new dict object.
925
923
  ts: float = time.monotonic()
926
- rs: Result = Result(run_id=run_id)
924
+ if result is None: # pragma: no cov
925
+ result: Result = Result(
926
+ run_id=(run_id or gen_id(self.name, unique=True))
927
+ )
928
+
929
+ result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
927
930
 
928
931
  # NOTE: It should not do anything if it does not have job.
929
932
  if not self.jobs:
930
- logger.warning(
931
- f"({cut_id(run_id)}) [WORKFLOW]: This workflow: {self.name!r} "
932
- f"does not have any jobs"
933
+ result.trace.warning(
934
+ f"[WORKFLOW]: This workflow: {self.name!r} does not have any "
935
+ f"jobs"
933
936
  )
934
- return rs.catch(status=0, context=params)
937
+ return result.catch(status=0, context=params)
935
938
 
936
939
  # NOTE: Create a job queue that keep the job that want to run after
937
940
  # its dependency condition.
@@ -952,7 +955,7 @@ class Workflow(BaseModel):
952
955
  try:
953
956
  if config.max_job_parallel == 1:
954
957
  self.__exec_non_threading(
955
- run_id=run_id,
958
+ result=result,
956
959
  context=context,
957
960
  ts=ts,
958
961
  job_queue=jq,
@@ -960,7 +963,7 @@ class Workflow(BaseModel):
960
963
  )
961
964
  else:
962
965
  self.__exec_threading(
963
- run_id=run_id,
966
+ result=result,
964
967
  context=context,
965
968
  ts=ts,
966
969
  job_queue=jq,
@@ -974,11 +977,11 @@ class Workflow(BaseModel):
974
977
  "error_message": f"{err.__class__.__name__}: {err}",
975
978
  },
976
979
  )
977
- return rs.catch(status=status, context=context)
980
+ return result.catch(status=status, context=context)
978
981
 
979
982
  def __exec_threading(
980
983
  self,
981
- run_id: str,
984
+ result: Result,
982
985
  context: DictData,
983
986
  ts: float,
984
987
  job_queue: Queue,
@@ -991,6 +994,7 @@ class Workflow(BaseModel):
991
994
  If a job need dependency, it will check dependency job ID from
992
995
  context data before allow it run.
993
996
 
997
+ :param result: A result model.
994
998
  :param context: A context workflow data that want to downstream passing.
995
999
  :param ts: A start timestamp that use for checking execute time should
996
1000
  time out.
@@ -1002,9 +1006,7 @@ class Workflow(BaseModel):
1002
1006
  """
1003
1007
  not_timeout_flag: bool = True
1004
1008
  timeout: int = timeout or config.max_job_exec_timeout
1005
- logger.debug(
1006
- f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with threading."
1007
- )
1009
+ result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with threading.")
1008
1010
 
1009
1011
  # IMPORTANT: The job execution can run parallel and waiting by
1010
1012
  # needed.
@@ -1041,6 +1043,7 @@ class Workflow(BaseModel):
1041
1043
  self.execute_job,
1042
1044
  job_id,
1043
1045
  params=context,
1046
+ result=result,
1044
1047
  ),
1045
1048
  )
1046
1049
 
@@ -1055,7 +1058,7 @@ class Workflow(BaseModel):
1055
1058
 
1056
1059
  for future in as_completed(futures, timeout=thread_timeout):
1057
1060
  if err := future.exception():
1058
- logger.error(f"({cut_id(run_id)}) [WORKFLOW]: {err}")
1061
+ result.trace.error(f"[WORKFLOW]: {err}")
1059
1062
  raise WorkflowException(str(err))
1060
1063
 
1061
1064
  # NOTE: This getting result does not do anything.
@@ -1067,15 +1070,14 @@ class Workflow(BaseModel):
1067
1070
  future.cancel()
1068
1071
 
1069
1072
  # NOTE: Raise timeout error.
1070
- logger.warning(
1071
- f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
1072
- f"was timeout."
1073
+ result.trace.warning(
1074
+ f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1073
1075
  )
1074
1076
  raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1075
1077
 
1076
1078
  def __exec_non_threading(
1077
1079
  self,
1078
- run_id: str,
1080
+ result: Result,
1079
1081
  context: DictData,
1080
1082
  ts: float,
1081
1083
  job_queue: Queue,
@@ -1088,6 +1090,7 @@ class Workflow(BaseModel):
1088
1090
  If a job need dependency, it will check dependency job ID from
1089
1091
  context data before allow it run.
1090
1092
 
1093
+ :param result: A result model.
1091
1094
  :param context: A context workflow data that want to downstream passing.
1092
1095
  :param ts: A start timestamp that use for checking execute time should
1093
1096
  time out.
@@ -1097,10 +1100,7 @@ class Workflow(BaseModel):
1097
1100
  """
1098
1101
  not_timeout_flag: bool = True
1099
1102
  timeout: int = timeout or config.max_job_exec_timeout
1100
- logger.debug(
1101
- f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with "
1102
- f"non-threading."
1103
- )
1103
+ result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with non-threading.")
1104
1104
 
1105
1105
  while not job_queue.empty() and (
1106
1106
  not_timeout_flag := ((time.monotonic() - ts) < timeout)
@@ -1123,7 +1123,7 @@ class Workflow(BaseModel):
1123
1123
  # 'params': <input-params>,
1124
1124
  # 'jobs': {},
1125
1125
  # }
1126
- self.execute_job(job_id=job_id, params=context, run_id=run_id)
1126
+ self.execute_job(job_id=job_id, params=context, result=result)
1127
1127
 
1128
1128
  # NOTE: Mark this job queue done.
1129
1129
  job_queue.task_done()
@@ -1137,9 +1137,8 @@ class Workflow(BaseModel):
1137
1137
  return context
1138
1138
 
1139
1139
  # NOTE: Raise timeout error.
1140
- logger.warning(
1141
- f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
1142
- f"was timeout."
1140
+ result.trace.warning(
1141
+ f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1143
1142
  )
1144
1143
  raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1145
1144
 
@@ -1165,7 +1164,7 @@ class WorkflowTask:
1165
1164
  self,
1166
1165
  release: datetime | Release | None = None,
1167
1166
  run_id: str | None = None,
1168
- log: type[Log] = None,
1167
+ log: type[Audit] = None,
1169
1168
  queue: ReleaseQueue | None = None,
1170
1169
  ) -> Result:
1171
1170
  """Release the workflow task data.
@@ -1177,7 +1176,7 @@ class WorkflowTask:
1177
1176
 
1178
1177
  :rtype: Result
1179
1178
  """
1180
- log: type[Log] = log or get_log()
1179
+ log: type[Audit] = log or get_audit()
1181
1180
 
1182
1181
  if release is None:
1183
1182
 
@@ -1213,7 +1212,7 @@ class WorkflowTask:
1213
1212
  self,
1214
1213
  end_date: datetime,
1215
1214
  queue: ReleaseQueue,
1216
- log: type[Log],
1215
+ log: type[Audit],
1217
1216
  *,
1218
1217
  force_run: bool = False,
1219
1218
  ) -> ReleaseQueue:
@@ -1223,8 +1222,8 @@ class WorkflowTask:
1223
1222
  :param end_date: An end datetime object.
1224
1223
  :param queue: A workflow queue object.
1225
1224
  :param log: A log class that want to make log object.
1226
- :param force_run: A flag that allow to release workflow if the log with
1227
- that release was pointed.
1225
+ :param force_run: (bool) A flag that allow to release workflow if the
1226
+ log with that release was pointed.
1228
1227
 
1229
1228
  :rtype: ReleaseQueue
1230
1229
  """
@@ -1260,7 +1259,7 @@ class WorkflowTask:
1260
1259
  return queue
1261
1260
 
1262
1261
  def __repr__(self) -> str:
1263
- """Override ___repr__ method."""
1262
+ """Override the `__repr__` method."""
1264
1263
  return (
1265
1264
  f"{self.__class__.__name__}(alias={self.alias!r}, "
1266
1265
  f"workflow={self.workflow.name!r}, runner={self.runner!r}, "
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.31
3
+ Version: 0.0.33
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -92,7 +92,7 @@ flowchart LR
92
92
 
93
93
  subgraph Data Context
94
94
  D@{ shape: processes, label: "Logs" }
95
- E@{ shape: lin-cyl, label: "Metadata" }
95
+ E@{ shape: lin-cyl, label: "Audit<br>Logs" }
96
96
  end
97
97
 
98
98
  subgraph Git Context
@@ -110,7 +110,7 @@ flowchart LR
110
110
  E -.->|read| G
111
111
  ```
112
112
 
113
- > [!NOTE]
113
+ > [!WARNING]
114
114
  > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
115
115
  > with `.yml` files and all configs file from several data orchestration framework
116
116
  > tools from my experience on Data Engineer. :grimacing:
@@ -201,7 +201,8 @@ result: Result = workflow.execute(
201
201
  )
202
202
  ```
203
203
 
204
- So, this package provide the `Schedule` template for this action.
204
+ So, this package provide the `Schedule` template for this action, and you can dynamic
205
+ pass the parameters for changing align with that running time by the `release` prefix.
205
206
 
206
207
  ```yaml
207
208
  schedule-run-local-wf:
@@ -219,6 +220,20 @@ schedule-run-local-wf:
219
220
  asat-dt: "${{ release.logical_date }}"
220
221
  ```
221
222
 
223
+ The main method of the `Schedule` model that use to running is `pending`. If you
224
+ do not pass the `stop` date on this method, it will use config with `WORKFLOW_APP_STOP_BOUNDARY_DELTA`
225
+ key for generate this stop date.
226
+
227
+ ```python
228
+ from ddeutil.workflow import Schedule
229
+
230
+ (
231
+ Schedule
232
+ .from_loader("schedule-run-local-wf")
233
+ .pending(stop=None)
234
+ )
235
+ ```
236
+
222
237
  ## :cookie: Configuration
223
238
 
224
239
  The main configuration that use to dynamic changing this workflow engine for your
@@ -229,29 +244,33 @@ it will use default value and do not raise any error to you.
229
244
  > The config value that you will set on the environment should combine with
230
245
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
231
246
 
232
- | Name | Component | Default | Description |
233
- |:-----------------------------|:---------:|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------|
234
- | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
235
- | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
236
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
237
- | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
238
- | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
239
- | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
240
- | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
241
- | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
242
- | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
243
- | **MAX_NUM_POKING** | Core | `4` | . |
244
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
245
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
246
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
247
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
248
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
249
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
250
- | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
251
- | **ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
252
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
253
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
254
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
247
+ | Name | Component | Default | Description |
248
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
249
+ | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
250
+ | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
251
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
252
+ | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
253
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
254
+ | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
255
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
256
+ | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
257
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
258
+ | **MAX_NUM_POKING** | Core | `4` | . |
259
+ | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
260
+ | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
261
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
262
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
263
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
264
+ | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
265
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
266
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
267
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
268
+ | **ENABLE_ROTATED_FILE** | Log | `false` | |
269
+ | **PATH** | Audit | `./logs` | |
270
+ | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
271
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
272
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
273
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
255
274
 
256
275
  **API Application**:
257
276
 
@@ -0,0 +1,26 @@
1
+ ddeutil/workflow/__about__.py,sha256=3CTb0QhYG3rUiN5ms0KRCjX1IdXetGlrmdY7SfsFOQQ,28
2
+ ddeutil/workflow/__cron.py,sha256=3i-wmjTlh0ADCzN9pLKaWHzJkXzC72aIBmVEQSbyCCE,26895
3
+ ddeutil/workflow/__init__.py,sha256=Mo7M7-PU_brXSZJ9JSylPoNi7-njZxD3jr3BenTaB9k,1705
4
+ ddeutil/workflow/__types.py,sha256=CK1jfzyHP9P-MB0ElhpJZ59ZFGJC9MkQuAop5739_9k,4304
5
+ ddeutil/workflow/audit.py,sha256=i1He3T44nPrUabpof_ACQB0azEKucXxjlG3lwOpN3hA,8289
6
+ ddeutil/workflow/conf.py,sha256=4ndpWsdvLS7_Ae0udxU0MX3Uow2PpTGxV4F06GZ5-KQ,12985
7
+ ddeutil/workflow/cron.py,sha256=j8EeoHst70toRfnD_frix41vrI-eLYVJkZ9yeJtpfnI,8871
8
+ ddeutil/workflow/exceptions.py,sha256=5ghT443VLq0IeU87loHNEqqrrrctklP7YfxwJ51ImWU,949
9
+ ddeutil/workflow/hook.py,sha256=MgZFlTGvaRSBrTouZGlxwYpKQoKDOT26PNhESeL3LY0,5469
10
+ ddeutil/workflow/job.py,sha256=QhB1fw17EgAyltLLComkmD4Ao3mHKHrF2h6A1He1TSQ,24584
11
+ ddeutil/workflow/params.py,sha256=LKR7jXyxTb5NVrFav_fl2y9xo3p7qL1S9h-i6CtvNwE,5851
12
+ ddeutil/workflow/result.py,sha256=XOs4VzZb3HJr2hXzA3rPPAB1fG2cWGMR-Btjo-qiFYE,4567
13
+ ddeutil/workflow/scheduler.py,sha256=MODsAYg0aP3AJX45O_PT7_XzIDA3_dGuR_8Q1LETv60,24430
14
+ ddeutil/workflow/stage.py,sha256=ct9kqFKxnS2eJZv-ZMjKP1LROivooBDVwcMm8dlGMjk,24705
15
+ ddeutil/workflow/templates.py,sha256=A0JgZFGkBv-AX-EskZj656nG5zFd3j1PpLpyXihf6Xg,10967
16
+ ddeutil/workflow/utils.py,sha256=rTDQKaaber7cRqzJjWpCP9OTbarti1UMKdLgH6VRjFM,6709
17
+ ddeutil/workflow/workflow.py,sha256=HfUKAZo7LMs6Mn1mGSw8-D8To4os-sbNuux9RjhSyCY,43687
18
+ ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
19
+ ddeutil/workflow/api/api.py,sha256=TfgJtu-yREsrRveLcqTjxoJszuhq21qKkl4oyQpSzvQ,3959
20
+ ddeutil/workflow/api/repeat.py,sha256=zyvsrXKk-3-_N8ZRZSki0Mueshugum2jtqctEOp9QSc,4927
21
+ ddeutil/workflow/api/route.py,sha256=XHPw9IKiVLPWYl937u09s0_Kd6rolWK9TbuIN0RFkfA,8625
22
+ ddeutil_workflow-0.0.33.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
23
+ ddeutil_workflow-0.0.33.dist-info/METADATA,sha256=syJ1Sk2H6sKxsVTLxJeQr5oIMb4oozPFQ_lfz5INEiU,18887
24
+ ddeutil_workflow-0.0.33.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
25
+ ddeutil_workflow-0.0.33.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
26
+ ddeutil_workflow-0.0.33.dist-info/RECORD,,
@@ -1,25 +0,0 @@
1
- ddeutil/workflow/__about__.py,sha256=3Fv8LNxLB3UmoMSQqDZv556u3eR-OYBH8pw9kM6FeE4,28
2
- ddeutil/workflow/__cron.py,sha256=3i-wmjTlh0ADCzN9pLKaWHzJkXzC72aIBmVEQSbyCCE,26895
3
- ddeutil/workflow/__init__.py,sha256=pRIZIGwC7Xs8Ur7-jHPIAMLriD5If9zOPc-ZmKZS2XQ,1678
4
- ddeutil/workflow/__types.py,sha256=CK1jfzyHP9P-MB0ElhpJZ59ZFGJC9MkQuAop5739_9k,4304
5
- ddeutil/workflow/conf.py,sha256=6yGbSi69lsccYgnrwTzdjdPhU54hUop2e1GjBNres08,17663
6
- ddeutil/workflow/cron.py,sha256=j8EeoHst70toRfnD_frix41vrI-eLYVJkZ9yeJtpfnI,8871
7
- ddeutil/workflow/exceptions.py,sha256=5ghT443VLq0IeU87loHNEqqrrrctklP7YfxwJ51ImWU,949
8
- ddeutil/workflow/hook.py,sha256=MgZFlTGvaRSBrTouZGlxwYpKQoKDOT26PNhESeL3LY0,5469
9
- ddeutil/workflow/job.py,sha256=XcewyALsLYYq94ycF6mkj3Ydr6if683z7t1oBqEVInE,24290
10
- ddeutil/workflow/params.py,sha256=AJLiTaF6lG37SvzyniTCug5-TgZTmNCn5fJXb-CCcqM,5707
11
- ddeutil/workflow/result.py,sha256=8LItqF-Xe6pAAWkAsY_QFkKBOA0fEBh97I2og3CZsPc,3409
12
- ddeutil/workflow/scheduler.py,sha256=2Y_ewAP1iQKgD81i6H6fXuJLCVqLocumEmCG2SomEqg,22214
13
- ddeutil/workflow/stage.py,sha256=glAhvgvyQ98n2JLUBU8MUs48FEPkfU1fvh3Wwi9PSCg,24293
14
- ddeutil/workflow/templates.py,sha256=bVU_8gnMQmdhhw3W28ZqwmpEaOx10Nx_aauqiLS0lqg,10807
15
- ddeutil/workflow/utils.py,sha256=rTDQKaaber7cRqzJjWpCP9OTbarti1UMKdLgH6VRjFM,6709
16
- ddeutil/workflow/workflow.py,sha256=ET1otR5VcfnOMoNiW7EMb1_wIaxNw9yWsBXS5kVWG9s,43428
17
- ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
18
- ddeutil/workflow/api/api.py,sha256=Ma9R8yuQAhowG_hox-k53swFsf5IAvheEnSxNQ-8DaQ,4039
19
- ddeutil/workflow/api/repeat.py,sha256=zyvsrXKk-3-_N8ZRZSki0Mueshugum2jtqctEOp9QSc,4927
20
- ddeutil/workflow/api/route.py,sha256=v96jNbgjM1cJ2MpVSRWs2kgRqF8DQElEBdRZrVFEpEw,8578
21
- ddeutil_workflow-0.0.31.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
22
- ddeutil_workflow-0.0.31.dist-info/METADATA,sha256=-cDcCfcV-4_dU_91GSy5sdV-ZkEWbdk7wDmAJB24puU,15090
23
- ddeutil_workflow-0.0.31.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
24
- ddeutil_workflow-0.0.31.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
25
- ddeutil_workflow-0.0.31.dist-info/RECORD,,