ddeutil-workflow 0.0.53__py3-none-any.whl → 0.0.55__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/utils.py CHANGED
@@ -24,6 +24,7 @@ from .__types import DictData, Matrix
24
24
 
25
25
  T = TypeVar("T")
26
26
  UTC: Final[ZoneInfo] = ZoneInfo("UTC")
27
+ NEWLINE: Final[str] = "\n\t| ...\t"
27
28
 
28
29
 
29
30
  def replace_sec(dt: datetime) -> datetime:
@@ -200,7 +201,7 @@ def filter_func(value: T) -> T:
200
201
  def cross_product(matrix: Matrix) -> Iterator[DictData]:
201
202
  """Iterator of products value from matrix.
202
203
 
203
- :param matrix:
204
+ :param matrix: (Matrix)
204
205
 
205
206
  :rtype: Iterator[DictData]
206
207
  """
@@ -223,7 +224,7 @@ def batch(iterable: Iterator[Any] | range, n: int) -> Iterator[Any]:
223
224
  ['G']
224
225
 
225
226
  :param iterable:
226
- :param n:
227
+ :param n: (int) A number of returning batch size.
227
228
 
228
229
  :rtype: Iterator[Any]
229
230
  """
@@ -247,8 +248,8 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
247
248
  >>> cut_id(run_id='20240101081330000000T1354680202')
248
249
  '202401010813680202'
249
250
 
250
- :param run_id: A running ID That want to cut
251
- :param num:
251
+ :param run_id: (str) A running ID That want to cut.
252
+ :param num: (int) A number of cutting length.
252
253
 
253
254
  :rtype: str
254
255
  """
@@ -4,11 +4,11 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  # [x] Use dynamic config
7
- """A Workflow module that is the core module of this package. It keeps Release
8
- and Workflow Pydantic models.
7
+ """Workflow module is the core module of this Workflow package. It keeps
8
+ Release, ReleaseQueue, and Workflow Pydantic models.
9
9
 
10
- I will implement timeout on the workflow execution layer only because the
11
- main propose of this package in Workflow model.
10
+ This package implement timeout strategy on the workflow execution layer only
11
+ because the main propose of this package is using Workflow to be orchestrator.
12
12
  """
13
13
  from __future__ import annotations
14
14
 
@@ -43,9 +43,10 @@ from .exceptions import JobException, UtilException, WorkflowException
43
43
  from .job import Job
44
44
  from .logs import Audit, get_audit
45
45
  from .params import Param
46
- from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
46
+ from .result import FAILED, SKIP, SUCCESS, WAIT, Result
47
47
  from .reusables import has_template, param2template
48
48
  from .utils import (
49
+ NEWLINE,
49
50
  gen_id,
50
51
  get_dt_now,
51
52
  reach_next_minute,
@@ -165,6 +166,7 @@ class ReleaseQueue:
165
166
  extras: DictData = Field(
166
167
  default_factory=dict,
167
168
  description="An extra parameters that want to override config values.",
169
+ repr=False,
168
170
  )
169
171
 
170
172
  @classmethod
@@ -212,15 +214,6 @@ class ReleaseQueue:
212
214
  """
213
215
  return len(self.queue) > 0
214
216
 
215
- @property
216
- def first_queue(self) -> Release:
217
- """Check an input Release object is the first value of the
218
- waiting queue.
219
-
220
- :rtype: Release
221
- """
222
- return self.queue[0]
223
-
224
217
  def check_queue(self, value: Release | datetime) -> bool:
225
218
  """Check a Release value already exists in list of tracking
226
219
  queues.
@@ -239,16 +232,6 @@ class ReleaseQueue:
239
232
  or (value in self.complete)
240
233
  )
241
234
 
242
- def remove_running(self, value: Release) -> Self:
243
- """Remove Release in the running queue if it exists.
244
-
245
- :rtype: Self
246
- """
247
- if value in self.running:
248
- self.running.remove(value)
249
-
250
- return self
251
-
252
235
  def mark_complete(self, value: Release) -> Self:
253
236
  """Push Release to the complete queue.
254
237
 
@@ -462,7 +445,7 @@ class Workflow(BaseModel):
462
445
  if isinstance(on, str):
463
446
  on: list[str] = [on]
464
447
  if any(not isinstance(i, (dict, str)) for i in on):
465
- raise TypeError("The ``on`` key should be list of str or dict")
448
+ raise TypeError("The `on` key should be list of str or dict")
466
449
 
467
450
  # NOTE: Pass on value to SimLoad and keep on model object to the on
468
451
  # field.
@@ -602,8 +585,8 @@ class Workflow(BaseModel):
602
585
  ... "jobs": {}
603
586
  ... }
604
587
 
605
- :param params: A parameter mapping that receive from workflow execution.
606
- :type params: DictData
588
+ :param params: (DictData) A parameter data that receive from workflow
589
+ execute method.
607
590
 
608
591
  :raise WorkflowException: If parameter value that want to validate does
609
592
  not include the necessary parameter that had required flag.
@@ -623,7 +606,7 @@ class Workflow(BaseModel):
623
606
  f"{', '.join(check_key)}."
624
607
  )
625
608
 
626
- # NOTE: Mapping type of param before adding it to the ``params`` key.
609
+ # NOTE: Mapping type of param before adding it to the `params` key.
627
610
  return {
628
611
  "params": (
629
612
  params
@@ -701,10 +684,10 @@ class Workflow(BaseModel):
701
684
  if isinstance(release, datetime):
702
685
  release: Release = Release.from_dt(release, extras=self.extras)
703
686
 
704
- result.trace.debug(
687
+ result.trace.info(
705
688
  f"[RELEASE]: Start {name!r} : {release.date:%Y-%m-%d %H:%M:%S}"
706
689
  )
707
- self.execute(
690
+ rs: Result = self.execute(
708
691
  params=param2template(
709
692
  params,
710
693
  params={
@@ -723,7 +706,7 @@ class Workflow(BaseModel):
723
706
  parent_run_id=result.parent_run_id,
724
707
  timeout=timeout,
725
708
  )
726
- result.trace.debug(
709
+ result.trace.info(
727
710
  f"[RELEASE]: End {name!r} : {release.date:%Y-%m-%d %H:%M:%S}"
728
711
  )
729
712
 
@@ -744,11 +727,12 @@ class Workflow(BaseModel):
744
727
  )
745
728
 
746
729
  if queue:
747
- queue.remove_running(release)
730
+ if release in queue.running:
731
+ queue.running.remove(release)
748
732
  queue.mark_complete(release)
749
733
 
750
734
  return result.catch(
751
- status=SUCCESS,
735
+ status=rs.status,
752
736
  context={
753
737
  "params": params,
754
738
  "release": {
@@ -969,22 +953,19 @@ class Workflow(BaseModel):
969
953
  execution to the target job object via job's ID.
970
954
 
971
955
  This execution is the minimum level of execution of this workflow
972
- model. It different with ``self.execute`` because this method run only
956
+ model. It different with `self.execute` because this method run only
973
957
  one job and return with context of this job data.
974
958
 
975
959
  :raise WorkflowException: If execute with not exist job's ID.
976
960
  :raise WorkflowException: If the job execution raise JobException.
977
961
 
978
- :param job_id: A job ID that want to execute.
979
- :param params: A params that was parameterized from workflow execution.
980
- :param result: (Result) A result object for keeping context and status
981
- data.
982
- :param event: (Event) An event manager that pass to the
983
- PoolThreadExecutor.
962
+ :param job_id: A job ID.
963
+ :param params: (DictData) A parameter data.
964
+ :param result: (Result) A Result instance for return context and status.
965
+ :param event: (Event) An Event manager instance that use to cancel this
966
+ execution if it forces stopped by parent execution.
984
967
 
985
968
  :rtype: Result
986
- :return: Return the result object that receive the job execution result
987
- context.
988
969
  """
989
970
  if result is None: # pragma: no cov
990
971
  result: Result = Result(run_id=gen_id(self.name, unique=True))
@@ -1020,13 +1001,12 @@ class Workflow(BaseModel):
1020
1001
  except (JobException, UtilException) as e:
1021
1002
  result.trace.error(f"[WORKFLOW]: {e.__class__.__name__}: {e}")
1022
1003
  raise WorkflowException(
1023
- f"Get job execution error {job_id}: JobException: {e}"
1004
+ f"Job {job_id!r} raise {e.__class__.__name__}: {e}"
1024
1005
  ) from None
1025
1006
 
1026
1007
  if rs.status == FAILED:
1027
- error_msg: str = (
1028
- f"Workflow job, {job.id}, failed without raise error."
1029
- )
1008
+ error_msg: str = f"Workflow job, {job.id!r}, return FAILED status."
1009
+ result.trace.warning(f"[WORKFLOW]: {error_msg}")
1030
1010
  return result.catch(
1031
1011
  status=FAILED,
1032
1012
  context={
@@ -1042,37 +1022,45 @@ class Workflow(BaseModel):
1042
1022
  *,
1043
1023
  run_id: str | None = None,
1044
1024
  parent_run_id: str | None = None,
1045
- timeout: int = 600,
1046
1025
  result: Result | None = None,
1047
- max_job_parallel: int = 2,
1048
1026
  event: Event | None = None,
1027
+ timeout: int = 3600,
1028
+ max_job_parallel: int = 2,
1049
1029
  ) -> Result:
1050
1030
  """Execute workflow with passing a dynamic parameters to all jobs that
1051
- included in this workflow model with ``jobs`` field.
1031
+ included in this workflow model with `jobs` field.
1052
1032
 
1053
1033
  The result of execution process for each job and stages on this
1054
1034
  workflow will keep in dict which able to catch out with all jobs and
1055
1035
  stages by dot annotation.
1056
1036
 
1057
- For example, when I want to use the output from previous stage, I
1058
- can access it with syntax:
1037
+ For example with non-strategy job, when I want to use the output
1038
+ from previous stage, I can access it with syntax:
1059
1039
 
1060
- ... ${job-name}.stages.${stage-id}.outputs.${key}
1061
- ... ${job-name}.stages.${stage-id}.errors.${key}
1040
+ ... ${job-id}.stages.${stage-id}.outputs.${key}
1041
+ ... ${job-id}.stages.${stage-id}.errors.${key}
1062
1042
 
1063
- :param params: An input parameters that use on workflow execution that
1064
- will parameterize before using it. Default is None.
1065
- :param run_id: A workflow running ID for this job execution.
1066
- :param parent_run_id: A parent workflow running ID for this release.
1043
+ But example for strategy job:
1044
+
1045
+ ... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.outputs.${key}
1046
+ ... ${job-id}.strategies.${strategy-id}.stages.${stage-id}.errors.${key}
1047
+
1048
+ This method already handle all exception class that can raise from
1049
+ the job execution. It will warp that error and keep it in the key `errors`
1050
+ at the result context.
1051
+
1052
+ :param params: A parameter data that will parameterize before execution.
1053
+ :param run_id: (str | None) A workflow running ID.
1054
+ :param parent_run_id: (str | None) A parent workflow running ID.
1055
+ :param result: (Result) A Result instance for return context and status.
1056
+ :param event: (Event) An Event manager instance that use to cancel this
1057
+ execution if it forces stopped by parent execution.
1067
1058
  :param timeout: (int) A workflow execution time out in second unit that
1068
1059
  use for limit time of execution and waiting job dependency. This
1069
1060
  value does not force stop the task that still running more than this
1070
- limit time. (default: 0)
1071
- :param result: (Result) A result object for keeping context and status
1072
- data.
1073
- :param max_job_parallel: (int) The maximum threads of job execution.
1074
- :param event: (Event) An event manager that pass to the
1075
- PoolThreadExecutor.
1061
+ limit time. (Default: 60 * 60 seconds)
1062
+ :param max_job_parallel: (int) The maximum workers that use for job
1063
+ execution in `PoolThreadExecutor` object. (Default: 2 workers)
1076
1064
 
1077
1065
  :rtype: Result
1078
1066
  """
@@ -1084,95 +1072,28 @@ class Workflow(BaseModel):
1084
1072
  id_logic=self.name,
1085
1073
  extras=self.extras,
1086
1074
  )
1087
-
1075
+ context: DictData = self.parameterize(params)
1088
1076
  result.trace.info(f"[WORKFLOW]: Execute: {self.name!r} ...")
1089
1077
  if not self.jobs:
1090
- result.trace.warning(
1091
- f"[WORKFLOW]: {self.name!r} does not have any jobs"
1092
- )
1093
- return result.catch(status=SUCCESS, context=params)
1078
+ result.trace.warning(f"[WORKFLOW]: {self.name!r} does not set jobs")
1079
+ return result.catch(status=SUCCESS, context=context)
1094
1080
 
1095
- jq: Queue = Queue()
1081
+ job_queue: Queue = Queue()
1096
1082
  for job_id in self.jobs:
1097
- jq.put(job_id)
1098
-
1099
- # NOTE: Create data context that will pass to any job executions
1100
- # on this workflow.
1101
- #
1102
- # {
1103
- # 'params': <input-params>,
1104
- # 'jobs': {},
1105
- # }
1106
- #
1107
- context: DictData = self.parameterize(params)
1108
- status: Status = SUCCESS
1109
- try:
1110
- if (
1111
- dynamic(
1112
- "max_job_parallel", f=max_job_parallel, extras=self.extras
1113
- )
1114
- == 1
1115
- ):
1116
- self.__exec_non_threading(
1117
- result=result,
1118
- context=context,
1119
- ts=ts,
1120
- job_queue=jq,
1121
- timeout=timeout,
1122
- event=event,
1123
- )
1124
- else:
1125
- self.__exec_threading(
1126
- result=result,
1127
- context=context,
1128
- ts=ts,
1129
- job_queue=jq,
1130
- timeout=timeout,
1131
- event=event,
1132
- )
1133
- except (WorkflowException, JobException) as e:
1134
- status: Status = FAILED
1135
- context.update({"errors": e.to_dict()})
1136
-
1137
- return result.catch(status=status, context=context)
1138
-
1139
- def __exec_threading(
1140
- self,
1141
- result: Result,
1142
- context: DictData,
1143
- ts: float,
1144
- job_queue: Queue,
1145
- *,
1146
- timeout: int = 600,
1147
- thread_timeout: int = 1800,
1148
- event: Event | None = None,
1149
- ) -> DictData:
1150
- """Workflow execution by threading strategy that use multithreading.
1151
-
1152
- If a job need dependency, it will check dependency job ID from
1153
- context data before allow it run.
1083
+ job_queue.put(job_id)
1154
1084
 
1155
- :param result: (Result) A result model.
1156
- :param context: A context workflow data that want to downstream passing.
1157
- :param ts: A start timestamp that use for checking execute time should
1158
- time out.
1159
- :param job_queue: (Queue) A job queue object.
1160
- :param timeout: (int) A second value unit that bounding running time.
1161
- :param thread_timeout: A timeout to waiting all futures complete.
1162
- :param event: (Event) An event manager that pass to the
1163
- PoolThreadExecutor.
1164
-
1165
- :rtype: DictData
1166
- """
1167
1085
  not_timeout_flag: bool = True
1168
1086
  timeout: int = dynamic(
1169
1087
  "max_job_exec_timeout", f=timeout, extras=self.extras
1170
1088
  )
1171
1089
  event: Event = event or Event()
1172
- result.trace.debug(f"... Run {self.name!r} with threading.")
1090
+ result.trace.debug(f"... Run {self.name!r} with non-threading.")
1091
+ max_job_parallel: int = dynamic(
1092
+ "max_job_parallel", f=max_job_parallel, extras=self.extras
1093
+ )
1173
1094
  with ThreadPoolExecutor(
1174
- max_workers=dynamic("max_job_parallel", extras=self.extras),
1175
- thread_name_prefix="wf_exec_threading_",
1095
+ max_workers=max_job_parallel,
1096
+ thread_name_prefix="wf_exec_non_threading_",
1176
1097
  ) as executor:
1177
1098
  futures: list[Future] = []
1178
1099
 
@@ -1181,16 +1102,20 @@ class Workflow(BaseModel):
1181
1102
  ):
1182
1103
  job_id: str = job_queue.get()
1183
1104
  job: Job = self.job(name=job_id)
1184
-
1185
1105
  if (check := job.check_needs(context["jobs"])) == WAIT:
1186
1106
  job_queue.task_done()
1187
1107
  job_queue.put(job_id)
1188
1108
  time.sleep(0.15)
1189
1109
  continue
1190
1110
  elif check == FAILED: # pragma: no cov
1191
- raise WorkflowException(
1192
- f"Validate job trigger rule was failed with "
1193
- f"{job.trigger_rule.value!r}."
1111
+ return result.catch(
1112
+ status=FAILED,
1113
+ context={
1114
+ "errors": WorkflowException(
1115
+ f"Validate job trigger rule was failed with "
1116
+ f"{job.trigger_rule.value!r}."
1117
+ ).to_dict()
1118
+ },
1194
1119
  )
1195
1120
  elif check == SKIP: # pragma: no cov
1196
1121
  result.trace.info(f"[JOB]: Skip job: {job_id!r}")
@@ -1198,119 +1123,42 @@ class Workflow(BaseModel):
1198
1123
  job_queue.task_done()
1199
1124
  continue
1200
1125
 
1201
- futures.append(
1202
- executor.submit(
1203
- self.execute_job,
1204
- job_id=job_id,
1205
- params=context,
1206
- result=result,
1207
- event=event,
1208
- ),
1209
- )
1210
-
1211
- job_queue.task_done()
1212
-
1213
- if not_timeout_flag:
1214
- job_queue.join()
1215
- for future in as_completed(futures, timeout=thread_timeout):
1216
- if e := future.exception():
1217
- result.trace.error(f"[WORKFLOW]: {e}")
1218
- raise WorkflowException(str(e))
1219
-
1220
- future.result()
1221
-
1222
- return context
1223
-
1224
- result.trace.error(
1225
- f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1226
- )
1227
- event.set()
1228
- for future in futures:
1229
- future.cancel()
1230
-
1231
- raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1232
-
1233
- def __exec_non_threading(
1234
- self,
1235
- result: Result,
1236
- context: DictData,
1237
- ts: float,
1238
- job_queue: Queue,
1239
- *,
1240
- timeout: int = 600,
1241
- event: Event | None = None,
1242
- ) -> DictData:
1243
- """Workflow execution with non-threading strategy that use sequential
1244
- job running and waiting previous job was run successful.
1245
-
1246
- If a job need dependency, it will check dependency job ID from
1247
- context data before allow it run.
1248
-
1249
- :param result: (Result) A result model.
1250
- :param context: A context workflow data that want to downstream passing.
1251
- :param ts: (float) A start timestamp that use for checking execute time
1252
- should time out.
1253
- :param timeout: (int) A second value unit that bounding running time.
1254
- :param event: (Event) An event manager that pass to the
1255
- PoolThreadExecutor.
1256
-
1257
- :rtype: DictData
1258
- """
1259
- not_timeout_flag: bool = True
1260
- timeout: int = dynamic(
1261
- "max_job_exec_timeout", f=timeout, extras=self.extras
1262
- )
1263
- event: Event = event or Event()
1264
- result.trace.debug(f"... Run {self.name!r} with non-threading.")
1265
- with ThreadPoolExecutor(
1266
- max_workers=1,
1267
- thread_name_prefix="wf_exec_non_threading_",
1268
- ) as executor:
1269
- future: Optional[Future] = None
1270
-
1271
- while not job_queue.empty() and (
1272
- not_timeout_flag := ((time.monotonic() - ts) < timeout)
1273
- ):
1274
- job_id: str = job_queue.get()
1275
- job: Job = self.job(name=job_id)
1276
-
1277
- if (check := job.check_needs(context["jobs"])) == WAIT:
1278
- job_queue.task_done()
1279
- job_queue.put(job_id)
1280
- time.sleep(0.075)
1281
- continue
1282
- elif check == FAILED:
1283
- raise WorkflowException(
1284
- f"Validate job trigger rule was failed with "
1285
- f"{job.trigger_rule.value!r}."
1126
+ if max_job_parallel > 1:
1127
+ futures.append(
1128
+ executor.submit(
1129
+ self.execute_job,
1130
+ job_id=job_id,
1131
+ params=context,
1132
+ result=result,
1133
+ event=event,
1134
+ ),
1286
1135
  )
1287
- elif check == SKIP: # pragma: no cov
1288
- result.trace.info(f"[JOB]: Skip job: {job_id!r}")
1289
- job.set_outputs(output={"skipped": True}, to=context)
1290
1136
  job_queue.task_done()
1291
1137
  continue
1292
1138
 
1293
- if future is None:
1294
- future: Future = executor.submit(
1295
- self.execute_job,
1296
- job_id=job_id,
1297
- params=context,
1298
- result=result,
1299
- event=event,
1139
+ if len(futures) < 1:
1140
+ futures.append(
1141
+ executor.submit(
1142
+ self.execute_job,
1143
+ job_id=job_id,
1144
+ params=context,
1145
+ result=result,
1146
+ event=event,
1147
+ )
1300
1148
  )
1301
1149
  time.sleep(0.025)
1302
- elif future.done() or future.cancelled():
1150
+ elif (future := futures.pop(0)).done() or future.cancelled():
1303
1151
  if e := future.exception():
1304
1152
  result.trace.error(f"[WORKFLOW]: {e}")
1305
1153
  raise WorkflowException(str(e))
1306
-
1307
- future = None
1308
1154
  job_queue.put(job_id)
1309
1155
  elif future.running() or "state=pending" in str(future):
1310
1156
  time.sleep(0.075)
1157
+ futures.insert(0, future)
1311
1158
  job_queue.put(job_id)
1312
1159
  else: # pragma: no cov
1313
1160
  job_queue.put(job_id)
1161
+ futures.insert(0, future)
1314
1162
  result.trace.warning(
1315
1163
  f"... Execution non-threading not handle: {future}."
1316
1164
  )
@@ -1319,23 +1167,35 @@ class Workflow(BaseModel):
1319
1167
 
1320
1168
  if not_timeout_flag:
1321
1169
  job_queue.join()
1322
- if future: # pragma: no cov
1323
- if e := future.exception():
1324
- result.trace.error(f"[WORKFLOW]: {e}")
1325
- raise WorkflowException(str(e))
1326
-
1327
- future.result()
1328
-
1329
- return context
1170
+ for future in as_completed(futures):
1171
+ try:
1172
+ future.result()
1173
+ except WorkflowException as e:
1174
+ result.trace.error(f"[WORKFLOW]: Handler:{NEWLINE}{e}")
1175
+ return result.catch(
1176
+ status=FAILED,
1177
+ context={
1178
+ "errors": WorkflowException(str(e)).to_dict()
1179
+ },
1180
+ )
1181
+ return result.catch(
1182
+ status=FAILED if "errors" in result.context else SUCCESS,
1183
+ context=context,
1184
+ )
1330
1185
 
1331
- result.trace.error(
1332
- f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1333
- )
1186
+ result.trace.error(f"[WORKFLOW]: {self.name!r} was timeout.")
1334
1187
  event.set()
1335
- if future:
1188
+ for future in futures:
1336
1189
  future.cancel()
1337
1190
 
1338
- raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1191
+ return result.catch(
1192
+ status=FAILED,
1193
+ context={
1194
+ "errors": WorkflowException(
1195
+ f"{self.name!r} was timeout."
1196
+ ).to_dict()
1197
+ },
1198
+ )
1339
1199
 
1340
1200
 
1341
1201
  @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
@@ -1352,8 +1212,8 @@ class WorkflowTask:
1352
1212
  :param alias: (str) An alias name of Workflow model.
1353
1213
  :param workflow: (Workflow) A Workflow model instance.
1354
1214
  :param runner: (CronRunner)
1355
- :param values:
1356
- :param extras:
1215
+ :param values: A value data that want to parameterize.
1216
+ :param extras: An extra parameter that use to override core config values.
1357
1217
  """
1358
1218
 
1359
1219
  alias: str
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.53
3
+ Version: 0.0.55
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -62,12 +62,6 @@ by a `.yaml` template.
62
62
  > use the workflow stage to process any large volume data which use a lot of compute
63
63
  > resource :cold_sweat:.
64
64
 
65
- In my opinion, I think it should not create duplicate workflow codes if I can
66
- write with dynamic input parameters on the one template workflow that just change
67
- the input parameters per use-case instead.
68
- This way I can handle a lot of logical workflows in our orgs with only metadata
69
- configuration. It called **Metadata Driven Data Workflow**.
70
-
71
65
  ---
72
66
 
73
67
  **:pushpin: <u>Rules of This Workflow engine</u>**:
@@ -127,12 +121,10 @@ flowchart LR
127
121
 
128
122
  > [!WARNING]
129
123
  > _**Disclaimer**_: I inspire the dynamic YAML statement from the [**GitHub Action**](https://github.com/features/actions),
130
- > and all configs pattern from several data orchestration framework tools from
131
- > my data engineering experience. :grimacing:
132
-
133
- > [!NOTE]
134
- > Other workflow orchestration tools that I interest and pick them to be inspiration
135
- > some for this package:
124
+ > and my experience of data framework configs pattern. :grimacing:
125
+ >
126
+ > Other workflow orchestration services that I interest and pick them to be
127
+ > this project inspiration:
136
128
  >
137
129
  > - [Google **Workflows**](https://cloud.google.com/workflows)
138
130
  > - [AWS **Step Functions**](https://aws.amazon.com/step-functions/)
@@ -0,0 +1,30 @@
1
+ ddeutil/workflow/__about__.py,sha256=kfTS6Gqr5xkMNDVKm-NTS4vmeIl2Zj4oMBQGmq19LHA,28
2
+ ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
+ ddeutil/workflow/__init__.py,sha256=noE8LNRcgq32m9OnIFcQqh0P7PXWdp-SGmvBCYIXgf4,1338
4
+ ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
6
+ ddeutil/workflow/conf.py,sha256=80rgmJKFU7BlH5xTLnghGzGhE8C6LFAQykd9mjHSjo8,12528
7
+ ddeutil/workflow/cron.py,sha256=WS2MInn0Sp5DKlZDZH5VFZ5AA0Q3_AnBnYEU4lZSv4I,9779
8
+ ddeutil/workflow/exceptions.py,sha256=r4Jrf9qtVPALU4wh4bnb_OYqC-StqSQJEmFC-_QK934,1408
9
+ ddeutil/workflow/job.py,sha256=aVRWLMLv5vYFbckT6AKYrMu29FzXYESOEzDHhFIpUyo,34159
10
+ ddeutil/workflow/logs.py,sha256=rsoBrUGQrooou18fg2yvPsB8NOaXnUA5ThQpBr_WVMg,26598
11
+ ddeutil/workflow/params.py,sha256=FKY4Oo1Ze4QZKRfAk7rqKsi44YaJQAbqAtXM6vlO2hI,11392
12
+ ddeutil/workflow/result.py,sha256=rI0S8-HanFDk1l6_BsYRRamzSfzKUy7bkKJUae1w_aQ,5708
13
+ ddeutil/workflow/reusables.py,sha256=iXcS7Gg-71qVX4ln0ILTDx03cTtUnj_rNoXHTVdVrxc,17636
14
+ ddeutil/workflow/scheduler.py,sha256=hk3-9R63DZH9J0PWbtBzaD8rqHyiOf03vTvAJDgsxTA,28279
15
+ ddeutil/workflow/stages.py,sha256=E5XoMVijjcvm_YK8AbiA8xGAQUphCPTtGazW-oLAdeI,82543
16
+ ddeutil/workflow/utils.py,sha256=NZPvPPP_5g4cigFcD7tHjIKLtKMeYAcb3oUhNyhTpJ0,7947
17
+ ddeutil/workflow/workflow.py,sha256=F3I_c0LHMoJFZIFt1a92sxFMlQVkE-cH--TLyX2rwuo,46799
18
+ ddeutil/workflow/api/__init__.py,sha256=kY30dL8HPY8tY_GBmm7y_3OdoXzB1-EA2a96PLU0AQw,5278
19
+ ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
20
+ ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
21
+ ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
22
+ ddeutil/workflow/api/routes/job.py,sha256=8X5VLDJH6PumyNIY6JGRNBsf2gWN0eG9DzxRPSh6n4I,2190
23
+ ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
24
+ ddeutil/workflow/api/routes/schedules.py,sha256=14RnaJKEGMSJtncI1H_QQVZNBe_jDS40PPRO6qFc3i0,4805
25
+ ddeutil/workflow/api/routes/workflows.py,sha256=GJu5PiXEylswrXylEImpncySjeU9chrvrtjhiMCw2RQ,4529
26
+ ddeutil_workflow-0.0.55.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
+ ddeutil_workflow-0.0.55.dist-info/METADATA,sha256=sTjAMsv4yIgw-CKKHyr-sg_yrgnnDT_0UMF3E2TP-Js,19008
28
+ ddeutil_workflow-0.0.55.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
29
+ ddeutil_workflow-0.0.55.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
30
+ ddeutil_workflow-0.0.55.dist-info/RECORD,,