ddeutil-workflow 0.0.17__py3-none-any.whl → 0.0.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.17"
1
+ __version__: str = "0.0.19"
@@ -646,6 +646,7 @@ class CronJob:
646
646
 
647
647
  :param date: An initial date that want to mark as the start point.
648
648
  :param tz: A string timezone that want to change on runner.
649
+
649
650
  :rtype: CronRunner
650
651
  """
651
652
  return CronRunner(self, date, tz=tz)
@@ -3,7 +3,11 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- from .conf import Config, FileLog, Loader
6
+ from .conf import (
7
+ Config,
8
+ FileLog,
9
+ Loader,
10
+ )
7
11
  from .exceptions import (
8
12
  JobException,
9
13
  ParamValueException,
@@ -12,14 +16,57 @@ from .exceptions import (
12
16
  WorkflowException,
13
17
  )
14
18
  from .job import Job, Strategy
15
- from .on import On, interval2crontab
19
+ from .on import (
20
+ On,
21
+ YearOn,
22
+ interval2crontab,
23
+ )
16
24
  from .scheduler import (
17
25
  Schedule,
26
+ ScheduleWorkflow,
18
27
  Workflow,
28
+ WorkflowTaskData,
29
+ )
30
+ from .stage import (
31
+ BashStage,
32
+ EmptyStage,
33
+ HookStage,
34
+ PyStage,
35
+ Stage,
36
+ TriggerStage,
37
+ handler_result,
19
38
  )
20
- from .stage import Stage, handler_result
21
39
  from .utils import (
40
+ FILTERS,
41
+ ChoiceParam,
42
+ DatetimeParam,
43
+ DefaultParam,
44
+ FilterFunc,
45
+ FilterRegistry,
46
+ IntParam,
22
47
  Param,
48
+ Result,
49
+ ReturnTagFunc,
50
+ StrParam,
51
+ TagFunc,
52
+ batch,
53
+ cross_product,
54
+ custom_filter,
23
55
  dash2underscore,
56
+ delay,
57
+ filter_func,
58
+ gen_id,
59
+ get_args_const,
60
+ get_diff_sec,
61
+ get_dt_now,
62
+ has_template,
63
+ make_exec,
64
+ make_filter_registry,
65
+ make_registry,
66
+ map_post_filter,
67
+ not_in_template,
24
68
  param2template,
69
+ queue2str,
70
+ str2template,
71
+ tag,
25
72
  )
ddeutil/workflow/conf.py CHANGED
@@ -41,8 +41,8 @@ class Config:
41
41
  # NOTE: Core
42
42
  root_path: Path = Path(os.getenv("WORKFLOW_ROOT_PATH", "."))
43
43
  tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
44
- workflow_id_simple_mode: bool = str2bool(
45
- os.getenv("WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE", "true")
44
+ gen_id_simple_mode: bool = str2bool(
45
+ os.getenv("WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE", "true")
46
46
  )
47
47
 
48
48
  # NOTE: Register
@@ -195,6 +195,7 @@ class SimLoad:
195
195
  :param conf: A config object.
196
196
  :param include:
197
197
  :param exclude:
198
+
198
199
  :rtype: Iterator[tuple[str, DictData]]
199
200
  """
200
201
  exclude: list[str] = exclude or []
@@ -247,12 +248,14 @@ class Loader(SimLoad):
247
248
  include: list[str] | None = None,
248
249
  exclude: list[str] | None = None,
249
250
  **kwargs,
250
- ) -> DictData:
251
+ ) -> Iterator[tuple[str, DictData]]:
251
252
  """Override the find class method from the Simple Loader object.
252
253
 
253
254
  :param obj: A object that want to validate matching before return.
254
255
  :param include:
255
256
  :param exclude:
257
+
258
+ :rtype: Iterator[tuple[str, DictData]]
256
259
  """
257
260
  return super().finds(
258
261
  obj=obj, conf=Config(), include=include, exclude=exclude
@@ -268,6 +271,7 @@ def get_type(t: str, params: Config) -> AnyModelType:
268
271
  :param t: A importable type string.
269
272
  :param params: A config parameters that use registry to search this
270
273
  type.
274
+
271
275
  :rtype: AnyModelType
272
276
  """
273
277
  try:
@@ -366,6 +370,8 @@ class FileLog(BaseLog):
366
370
  workflow name.
367
371
 
368
372
  :param name: A workflow name that want to search release logging data.
373
+
374
+ :rtype: Iterator[Self]
369
375
  """
370
376
  pointer: Path = config.root_path / f"./logs/workflow={name}"
371
377
  if not pointer.exists():
@@ -387,6 +393,9 @@ class FileLog(BaseLog):
387
393
  workflow name and release values. If a release does not pass to an input
388
394
  argument, it will return the latest release from the current log path.
389
395
 
396
+ :param name:
397
+ :param release:
398
+
390
399
  :raise FileNotFoundError:
391
400
  :raise NotImplementedError:
392
401
 
@@ -411,21 +420,17 @@ class FileLog(BaseLog):
411
420
  return cls.model_validate(obj=json.load(f))
412
421
 
413
422
  @classmethod
414
- def is_pointed(
415
- cls,
416
- name: str,
417
- release: datetime,
418
- *,
419
- queue: list[datetime] | None = None,
420
- ) -> bool:
421
- """Check this log already point in the destination.
423
+ def is_pointed(cls, name: str, release: datetime) -> bool:
424
+ """Check the release log already pointed or created at the destination
425
+ log path.
422
426
 
423
427
  :param name: A workflow name.
424
428
  :param release: A release datetime.
425
- :param queue: A list of queue of datetime that already run in the
426
- future.
429
+
430
+ :rtype: bool
431
+ :return: Return False if the release log was not pointed or created.
427
432
  """
428
- # NOTE: Check environ variable was set for real writing.
433
+ # NOTE: Return False if enable writing log flag does not set.
429
434
  if not config.enable_write_log:
430
435
  return False
431
436
 
@@ -434,9 +439,7 @@ class FileLog(BaseLog):
434
439
  name=name, release=release
435
440
  )
436
441
 
437
- if not queue:
438
- return pointer.exists()
439
- return pointer.exists() or (release in queue)
442
+ return pointer.exists()
440
443
 
441
444
  def pointer(self) -> Path:
442
445
  """Return release directory path that was generated from model data.
ddeutil/workflow/job.py CHANGED
@@ -19,6 +19,7 @@ from concurrent.futures import (
19
19
  as_completed,
20
20
  wait,
21
21
  )
22
+ from enum import Enum
22
23
  from functools import lru_cache
23
24
  from textwrap import dedent
24
25
  from threading import Event
@@ -198,6 +199,11 @@ class Strategy(BaseModel):
198
199
  return make(self.matrix, self.include, self.exclude)
199
200
 
200
201
 
202
+ class TriggerRules(str, Enum):
203
+ all_success: str = "all_success"
204
+ all_failed: str = "all_failed"
205
+
206
+
201
207
  class Job(BaseModel):
202
208
  """Job Pydantic model object (group of stages).
203
209
 
@@ -245,6 +251,11 @@ class Job(BaseModel):
245
251
  default_factory=list,
246
252
  description="A list of Stage of this job.",
247
253
  )
254
+ trigger_rule: TriggerRules = Field(
255
+ default=TriggerRules.all_success,
256
+ description="A trigger rule of tracking needed jobs.",
257
+ serialization_alias="trigger-rule",
258
+ )
248
259
  needs: list[str] = Field(
249
260
  default_factory=list,
250
261
  description="A list of the job ID that want to run before this job.",
@@ -269,6 +280,7 @@ class Job(BaseModel):
269
280
  :rtype: DictData
270
281
  """
271
282
  dash2underscore("runs-on", values)
283
+ dash2underscore("trigger-rule", values)
272
284
  return values
273
285
 
274
286
  @field_validator("desc", mode="after")
@@ -553,8 +565,6 @@ class Job(BaseModel):
553
565
  # NOTE: Create event for cancel executor by trigger stop running event.
554
566
  event: Event = Event()
555
567
 
556
- print("Job Run Fail-Fast:", self.strategy.fail_fast)
557
-
558
568
  # IMPORTANT: Start running strategy execution by multithreading because
559
569
  # it will running by strategy values without waiting previous
560
570
  # execution.
ddeutil/workflow/on.py CHANGED
@@ -190,6 +190,19 @@ class On(BaseModel):
190
190
  """
191
191
  return self.generate(start=start).next
192
192
 
193
+ # def pop(self, queue: list[datetime]) -> datetime:
194
+ # """Pop the matching datetime value from list of datetime alias queue."""
195
+ # for dt in queue:
196
+ # if self.next(dt) == dt:
197
+ # return dt
198
+ #
199
+ # # NOTE: Add 1 second value to the current datetime for forcing crontab
200
+ # # runner generate the next datetime instead if current datetime be
201
+ # # valid because I already replaced second to zero before passing.
202
+ # return datetime.now(tz=config.tz).replace(
203
+ # second=0, microsecond=0
204
+ # ) + timedelta(seconds=1)
205
+
193
206
 
194
207
  class YearOn(On):
195
208
  """Implement On Year Schedule Model for limit year matrix that use by some
@@ -12,8 +12,8 @@ from functools import wraps
12
12
 
13
13
  from starlette.concurrency import run_in_threadpool
14
14
 
15
+ from .__cron import CronJob
15
16
  from .conf import config, get_logger
16
- from .cron import CronJob
17
17
 
18
18
  logger = get_logger("ddeutil.workflow")
19
19
 
@@ -218,6 +218,24 @@ class Workflow(BaseModel):
218
218
  """
219
219
  return dedent(value)
220
220
 
221
+ @field_validator("on", mode="after")
222
+ def __on_no_dup__(cls, value: list[On]) -> list[On]:
223
+ """Validate the on fields should not contain duplicate values and if it
224
+ contain every minute value, it should has only one on value."""
225
+ set_ons: set[str] = {str(on.cronjob) for on in value}
226
+ if len(set_ons) != len(value):
227
+ raise ValueError(
228
+ "The on fields should not contain duplicate on value."
229
+ )
230
+
231
+ # WARNING:
232
+ # if '* * * * *' in set_ons and len(set_ons) > 1:
233
+ # raise ValueError(
234
+ # "If it has every minute cronjob on value, it should has only "
235
+ # "one value in the on field."
236
+ # )
237
+ return value
238
+
221
239
  @model_validator(mode="after")
222
240
  def __validate_jobs_need_and_prepare_running_id(self) -> Self:
223
241
  """Validate each need job in any jobs should exists.
@@ -265,7 +283,7 @@ class Workflow(BaseModel):
265
283
  :type name: str
266
284
 
267
285
  :rtype: Job
268
- :returns: A job model that exists on this workflow by input name.
286
+ :return: A job model that exists on this workflow by input name.
269
287
  """
270
288
  if name not in self.jobs:
271
289
  raise ValueError(
@@ -287,7 +305,13 @@ class Workflow(BaseModel):
287
305
 
288
306
  :param params: A parameter mapping that receive from workflow execution.
289
307
  :type params: DictData
308
+
309
+ :raise WorkflowException: If parameter value that want to validate does
310
+ not include the necessary parameter that had required flag.
311
+
290
312
  :rtype: DictData
313
+ :return: The parameter value that validate with its parameter fields and
314
+ adding jobs key to this parameter.
291
315
  """
292
316
  # VALIDATE: Incoming params should have keys that set on this workflow.
293
317
  if check_key := tuple(
@@ -315,7 +339,7 @@ class Workflow(BaseModel):
315
339
 
316
340
  def release(
317
341
  self,
318
- on: On,
342
+ runner: CronRunner,
319
343
  params: DictData,
320
344
  queue: list[datetime],
321
345
  *,
@@ -330,41 +354,42 @@ class Workflow(BaseModel):
330
354
  This method allow workflow use log object to save the execution
331
355
  result to log destination like file log to local `/logs` directory.
332
356
 
333
- :param on: An on schedule value.
357
+ I will add sleep with 0.15 seconds on every step that interact with
358
+ the queue object.
359
+
360
+ :param runner: A CronRunner instance.
334
361
  :param params: A workflow parameter that pass to execute method.
335
362
  :param queue: A list of release time that already running.
336
363
  :param waiting_sec: A second period value that allow workflow execute.
337
364
  :param sleep_interval: A second value that want to waiting until time
338
365
  to execute.
339
366
  :param log: A log object that want to save execution result.
367
+
340
368
  :rtype: Result
341
369
  """
342
370
  logger.debug(
343
- f"({self.run_id}) [CORE]: {self.name!r}: {on.cronjob} : run with "
371
+ f"({self.run_id}) [CORE]: {self.name!r}: {runner.cron} : run with "
344
372
  f"queue id: {id(queue)}"
345
373
  )
346
374
  log: Log = log or FileLog
347
- gen: CronRunner = on.generate(
348
- datetime.now(tz=config.tz).replace(second=0, microsecond=0)
349
- + timedelta(seconds=1)
350
- )
351
- cron_tz: ZoneInfo = gen.tz
375
+ cron_tz: ZoneInfo = runner.tz
352
376
 
353
377
  # NOTE: get next schedule time that generate from now.
354
- next_time: datetime = gen.next
378
+ next_time: datetime = runner.next
355
379
 
356
380
  # NOTE: While-loop to getting next until it does not logger.
357
- while log.is_pointed(self.name, next_time, queue=queue):
358
- next_time: datetime = gen.next
381
+ while log.is_pointed(self.name, next_time) or (next_time in queue):
382
+ next_time: datetime = runner.next
359
383
 
360
384
  # NOTE: Heap-push this next running time to log queue list.
361
385
  heappush(queue, next_time)
386
+ time.sleep(0.15)
362
387
 
363
388
  # VALIDATE: Check the different time between the next schedule time and
364
389
  # now that less than waiting period (second unit).
365
390
  if get_diff_sec(next_time, tz=cron_tz) > waiting_sec:
366
391
  logger.debug(
367
- f"({self.run_id}) [CORE]: {self.name!r} : {on.cronjob} : "
392
+ f"({self.run_id}) [CORE]: {self.name!r} : {runner.cron} : "
368
393
  f"Does not closely >> {next_time:%Y-%m-%d %H:%M:%S}"
369
394
  )
370
395
 
@@ -376,54 +401,51 @@ class Workflow(BaseModel):
376
401
  status=0,
377
402
  context={
378
403
  "params": params,
379
- "poking": {"skipped": [str(on.cronjob)], "run": []},
404
+ "release": {
405
+ "status": "skipped",
406
+ "cron": [str(runner.cron)],
407
+ },
380
408
  },
381
409
  )
382
410
 
383
411
  logger.debug(
384
- f"({self.run_id}) [CORE]: {self.name!r} : {on.cronjob} : "
412
+ f"({self.run_id}) [CORE]: {self.name!r} : {runner.cron} : "
385
413
  f"Closely to run >> {next_time:%Y-%m-%d %H:%M:%S}"
386
414
  )
387
415
 
388
416
  # NOTE: Release when the time is nearly to schedule time.
389
417
  while (duration := get_diff_sec(next_time, tz=cron_tz)) > (
390
418
  sleep_interval + 5
391
- ):
419
+ ): # pragma: no cov
392
420
  logger.debug(
393
- f"({self.run_id}) [CORE]: {self.name!r} : {on.cronjob} : "
421
+ f"({self.run_id}) [CORE]: {self.name!r} : {runner.cron} : "
394
422
  f"Sleep until: {duration}"
395
423
  )
396
424
  time.sleep(sleep_interval)
397
425
 
398
- time.sleep(0.5)
426
+ time.sleep(0.15)
399
427
 
400
- # NOTE: Release parameter that use to change if params has
401
- # templating.
402
- release_params: DictData = {
403
- "release": {
404
- "logical_date": next_time,
405
- },
406
- }
428
+ # NOTE: Release parameter that use to change if params has templating.
429
+ release_params: DictData = {"release": {"logical_date": next_time}}
407
430
 
408
- # WARNING: Re-create workflow object that use new running workflow
409
- # ID.
410
- runner: Self = self.get_running_id(run_id=self.new_run_id)
411
- rs: Result = runner.execute(
431
+ # WARNING: Re-create workflow object that use new running workflow ID.
432
+ workflow: Self = self.get_running_id(run_id=self.new_run_id)
433
+ rs: Result = workflow.execute(
412
434
  params=param2template(params, release_params),
413
435
  )
414
436
  logger.debug(
415
- f"({runner.run_id}) [CORE]: {self.name!r} : {on.cronjob} : "
437
+ f"({workflow.run_id}) [CORE]: {self.name!r} : {runner.cron} : "
416
438
  f"End release {next_time:%Y-%m-%d %H:%M:%S}"
417
439
  )
418
440
 
419
441
  # NOTE: Delete a copied workflow instance for saving memory.
420
- del runner
442
+ del workflow
421
443
 
422
444
  rs.set_parent_run_id(self.run_id)
423
445
  rs_log: Log = log.model_validate(
424
446
  {
425
447
  "name": self.name,
426
- "on": str(on.cronjob),
448
+ "on": str(runner.cron),
427
449
  "release": next_time,
428
450
  "context": rs.context,
429
451
  "parent_run_id": rs.run_id,
@@ -434,27 +456,31 @@ class Workflow(BaseModel):
434
456
  rs_log.save(excluded=None)
435
457
 
436
458
  queue.remove(next_time)
437
- time.sleep(0.05)
459
+ time.sleep(0.15)
438
460
  return Result(
439
461
  status=0,
440
462
  context={
441
463
  "params": params,
442
- "poking": {"skipped": [], "run": [str(on.cronjob)]},
464
+ "release": {"status": "run", "cron": [str(runner.cron)]},
443
465
  },
444
466
  )
445
467
 
446
468
  def poke(
447
469
  self,
470
+ start_date: datetime | None = None,
448
471
  params: DictData | None = None,
449
472
  *,
450
473
  log: Log | None = None,
451
474
  ) -> list[Result]:
452
- """Poke workflow with threading executor pool for executing with all its
453
- schedules that was set on the `on` value. This method will observe its
454
- schedule that nearing to run with the ``self.release()`` method.
475
+ """Poke workflow with the ``on`` field with threading executor pool for
476
+ executing with all its schedules that was set on the `on` value.
477
+ This method will observe its schedule that nearing to run with the
478
+ ``self.release()`` method.
455
479
 
480
+ :param start_date: A start datetime object.
456
481
  :param params: A parameters that want to pass to the release method.
457
482
  :param log: A log object that want to use on this poking process.
483
+
458
484
  :rtype: list[Result]
459
485
  """
460
486
  logger.info(
@@ -470,21 +496,32 @@ class Workflow(BaseModel):
470
496
  queue: list[datetime] = []
471
497
  results: list[Result] = []
472
498
 
499
+ start_date: datetime = start_date or datetime.now(tz=config.tz).replace(
500
+ second=0, microsecond=0
501
+ ) + timedelta(seconds=1)
502
+
473
503
  with ThreadPoolExecutor(
474
504
  max_workers=config.max_poking_pool_worker,
475
505
  thread_name_prefix="wf_poking_",
476
506
  ) as executor:
507
+
477
508
  futures: list[Future] = []
509
+
510
+ # NOTE: For-loop the on values that exists in this workflow object.
478
511
  for on in self.on:
479
512
  futures.append(
480
513
  executor.submit(
481
514
  self.release,
482
- on,
515
+ on.generate(start_date),
483
516
  params=params,
484
517
  log=log,
485
518
  queue=queue,
486
519
  )
487
520
  )
521
+
522
+ # NOTE: Delay release date because it run so fast and making
523
+ # queue object can not handle release date that will duplicate
524
+ # by the cron runner object.
488
525
  delay(second=0.15)
489
526
 
490
527
  # WARNING: This poking method does not allow to use fail-fast logic
@@ -492,7 +529,7 @@ class Workflow(BaseModel):
492
529
  for future in as_completed(futures):
493
530
  results.append(future.result(timeout=60))
494
531
 
495
- if len(queue) > 0:
532
+ if len(queue) > 0: # pragma: no cov
496
533
  logger.error(
497
534
  f"({self.run_id}) [POKING]: Log Queue does empty when poking "
498
535
  f"process was finishing."
@@ -717,11 +754,11 @@ class Workflow(BaseModel):
717
754
  return context
718
755
 
719
756
  # NOTE: Raise timeout error.
720
- logger.warning(
757
+ logger.warning( # pragma: no cov
721
758
  f"({self.run_id}) [WORKFLOW]: Execution of workflow, {self.name!r} "
722
759
  f", was timeout"
723
760
  )
724
- raise WorkflowException(
761
+ raise WorkflowException( # pragma: no cov
725
762
  f"Execution of workflow: {self.name} was timeout"
726
763
  )
727
764
 
@@ -765,7 +802,8 @@ class Workflow(BaseModel):
765
802
  continue
766
803
 
767
804
  # NOTE: Start workflow job execution with deep copy context data
768
- # before release.
805
+ # before release. This job execution process will running until
806
+ # done before checking all execution timeout or not.
769
807
  #
770
808
  # {
771
809
  # 'params': <input-params>,
@@ -783,10 +821,10 @@ class Workflow(BaseModel):
783
821
  return context
784
822
 
785
823
  # NOTE: Raise timeout error.
786
- logger.warning(
824
+ logger.warning( # pragma: no cov
787
825
  f"({self.run_id}) [WORKFLOW]: Execution of workflow was timeout"
788
826
  )
789
- raise WorkflowException(
827
+ raise WorkflowException( # pragma: no cov
790
828
  f"Execution of workflow: {self.name} was timeout"
791
829
  )
792
830
 
@@ -805,7 +843,7 @@ class ScheduleWorkflow(BaseModel):
805
843
  )
806
844
  params: DictData = Field(
807
845
  default_factory=dict,
808
- description="A parameters that want to use to workflow execution.",
846
+ description="A parameters that want to use in workflow execution.",
809
847
  )
810
848
 
811
849
  @model_validator(mode="before")
@@ -825,19 +863,23 @@ class ScheduleWorkflow(BaseModel):
825
863
  data: DictData,
826
864
  externals: DictData | None = None,
827
865
  ) -> DictData:
828
- """Bypass the on data to loaded config data.
866
+ """Bypass and prepare the on data to loaded config data.
867
+
868
+ :param data:
869
+ :param externals:
829
870
 
830
871
  :rtype: DictData
831
872
  """
832
873
  if on := data.pop("on", []):
833
874
 
834
875
  if isinstance(on, str):
835
- on = [on]
876
+ on: list[str] = [on]
836
877
 
837
878
  if any(not isinstance(n, (dict, str)) for n in on):
838
879
  raise TypeError("The ``on`` key should be list of str or dict")
839
880
 
840
- # NOTE: Pass on value to Loader and keep on model object to on field
881
+ # NOTE: Pass on value to Loader and keep on model object to on
882
+ # field.
841
883
  data["on"] = [
842
884
  (
843
885
  Loader(n, externals=(externals or {})).data
@@ -848,6 +890,24 @@ class ScheduleWorkflow(BaseModel):
848
890
  ]
849
891
  return data
850
892
 
893
+ @field_validator("on", mode="after")
894
+ def __on_no_dup__(cls, value: list[On]) -> list[On]:
895
+ """Validate the on fields should not contain duplicate values and if it
896
+ contain every minute value, it should has only one on value."""
897
+ set_ons: set[str] = {str(on.cronjob) for on in value}
898
+ if len(set_ons) != len(value):
899
+ raise ValueError(
900
+ "The on fields should not contain duplicate on value."
901
+ )
902
+
903
+ # WARNING:
904
+ # if '* * * * *' in set_ons and len(set_ons) > 1:
905
+ # raise ValueError(
906
+ # "If it has every minute cronjob on value, it should has only "
907
+ # "one value in the on field."
908
+ # )
909
+ return value
910
+
851
911
 
852
912
  class Schedule(BaseModel):
853
913
  """Schedule Pydantic Model that use to run with scheduler package. It does
@@ -866,6 +926,15 @@ class Schedule(BaseModel):
866
926
  description="A list of ScheduleWorkflow models.",
867
927
  )
868
928
 
929
+ @field_validator("desc", mode="after")
930
+ def __dedent_desc__(cls, value: str) -> str:
931
+ """Prepare description string that was created on a template.
932
+
933
+ :param value: A description string value that want to dedent.
934
+ :rtype: str
935
+ """
936
+ return dedent(value)
937
+
869
938
  @classmethod
870
939
  def from_loader(
871
940
  cls,
@@ -902,45 +971,51 @@ class Schedule(BaseModel):
902
971
  *,
903
972
  externals: DictData | None = None,
904
973
  ) -> list[WorkflowTaskData]:
905
- """Generate Task from the current datetime.
974
+ """Return the list of WorkflowTaskData object from the specific input
975
+ datetime that mapping with the on field.
906
976
 
907
977
  :param start_date: A start date that get from the workflow schedule.
908
978
  :param queue: A mapping of name and list of datetime for queue.
909
979
  :param running: A mapping of name and list of datetime for running.
910
980
  :param externals: An external parameters that pass to the Loader object.
981
+
911
982
  :rtype: list[WorkflowTaskData]
983
+ :return: Return the list of WorkflowTaskData object from the specific
984
+ input datetime that mapping with the on field.
912
985
  """
913
986
 
914
987
  # NOTE: Create pair of workflow and on.
915
988
  workflow_tasks: list[WorkflowTaskData] = []
916
- externals: DictData = externals or {}
989
+ extras: DictData = externals or {}
917
990
 
918
- for wfs in self.workflows:
919
- wf: Workflow = Workflow.from_loader(wfs.name, externals=externals)
991
+ for sch_wf in self.workflows:
992
+ wf: Workflow = Workflow.from_loader(sch_wf.name, externals=extras)
920
993
 
921
994
  # NOTE: Create default list of release datetime.
922
- queue[wfs.name]: list[datetime] = []
923
- running[wfs.name]: list[datetime] = []
995
+ queue[sch_wf.name]: list[datetime] = []
996
+ running[sch_wf.name]: list[datetime] = []
997
+
998
+ # IMPORTANT: Create the default 'on' value if it does not passing
999
+ # the on field to the Schedule object.
1000
+ ons: list[On] = wf.on.copy() if len(sch_wf.on) == 0 else sch_wf.on
924
1001
 
925
- # NOTE: Create default on if it does not passing on the Schedule.
926
- _ons: list[On] = wf.on.copy() if len(wfs.on) == 0 else wfs.on
1002
+ for on in ons:
1003
+ gen: CronRunner = on.generate(start_date)
1004
+ next_running_date = gen.next
927
1005
 
928
- for on in _ons:
929
- on_gen = on.generate(start_date)
930
- next_running_date = on_gen.next
931
- while next_running_date in queue[wfs.name]:
932
- next_running_date = on_gen.next
1006
+ while next_running_date in queue[sch_wf.name]:
1007
+ next_running_date = gen.next
933
1008
 
934
1009
  # NOTE: Push the next running date to queue list.
935
- heappush(queue[wfs.name], next_running_date)
1010
+ heappush(queue[sch_wf.name], next_running_date)
936
1011
 
937
1012
  workflow_tasks.append(
938
1013
  WorkflowTaskData(
939
1014
  workflow=wf,
940
1015
  on=on,
941
- params=wfs.params,
942
- queue=queue,
943
- running=running,
1016
+ params=sch_wf.params,
1017
+ queue=queue[sch_wf.name],
1018
+ running=running[sch_wf.name],
944
1019
  ),
945
1020
  )
946
1021
 
@@ -957,13 +1032,14 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
957
1032
 
958
1033
  :param cancel_on_failure: A flag that allow to return the CancelJob or not
959
1034
  it will raise.
960
- :rtype: Callable[P, Optional[CancelJob]]
1035
+
1036
+ :rtype: DecoratorCancelJob
961
1037
  """
962
1038
 
963
1039
  def decorator(func: ReturnCancelJob) -> ReturnCancelJob:
964
1040
  try:
965
1041
  # NOTE: Check the function that want to handle is method or not.
966
- if inspect.ismethod(func):
1042
+ if inspect.ismethod(func): # pragma: no cov
967
1043
 
968
1044
  @wraps(func)
969
1045
  def wrapper(self, *args, **kwargs):
@@ -977,7 +1053,7 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
977
1053
 
978
1054
  return wrapper
979
1055
 
980
- except Exception as err:
1056
+ except Exception as err: # pragma: no cov
981
1057
  logger.exception(err)
982
1058
  if cancel_on_failure:
983
1059
  return CancelJob
@@ -990,13 +1066,15 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
990
1066
  class WorkflowTaskData:
991
1067
  """Workflow task dataclass that use to keep mapping data and objects for
992
1068
  passing in multithreading task.
1069
+
1070
+ This dataclass will be 1-1 mapping with workflow and on objects.
993
1071
  """
994
1072
 
995
1073
  workflow: Workflow
996
1074
  on: On
997
1075
  params: DictData = field(compare=False, hash=False)
998
- queue: dict[str, list[datetime]] = field(compare=False, hash=False)
999
- running: dict[str, list[datetime]] = field(compare=False, hash=False)
1076
+ queue: list[datetime] = field(compare=False, hash=False)
1077
+ running: list[datetime] = field(compare=False, hash=False)
1000
1078
 
1001
1079
  @catch_exceptions(cancel_on_failure=True)
1002
1080
  def release(
@@ -1005,9 +1083,9 @@ class WorkflowTaskData:
1005
1083
  *,
1006
1084
  waiting_sec: int = 60,
1007
1085
  sleep_interval: int = 15,
1008
- ) -> None:
1009
- """Workflow release, it will use with the same logic of
1010
- `workflow.release` method.
1086
+ ) -> None: # pragma: no cov
1087
+ """Workflow task release that use the same logic of `workflow.release`
1088
+ method.
1011
1089
 
1012
1090
  :param log: A log object for saving result logging from workflow
1013
1091
  execution process.
@@ -1028,14 +1106,14 @@ class WorkflowTaskData:
1028
1106
  next_time: datetime = gen.next
1029
1107
 
1030
1108
  # NOTE: get next utils it does not running.
1031
- while log.is_pointed(wf.name, next_time, queue=self.running[wf.name]):
1109
+ while log.is_pointed(wf.name, next_time) or (next_time in self.running):
1032
1110
  next_time: datetime = gen.next
1033
1111
 
1034
1112
  logger.debug(
1035
1113
  f"({wf.run_id}) [CORE]: {wf.name!r} : {on.cronjob} : "
1036
1114
  f"{next_time:%Y-%m-%d %H:%M:%S}"
1037
1115
  )
1038
- heappush(self.running[wf.name], next_time)
1116
+ heappush(self.running, next_time)
1039
1117
 
1040
1118
  if get_diff_sec(next_time, tz=cron_tz) > waiting_sec:
1041
1119
  logger.debug(
@@ -1045,8 +1123,8 @@ class WorkflowTaskData:
1045
1123
 
1046
1124
  # NOTE: Add this next running datetime that not in period to queue
1047
1125
  # and remove it to running.
1048
- self.running[wf.name].remove(next_time)
1049
- heappush(self.queue[wf.name], next_time)
1126
+ self.running.remove(next_time)
1127
+ heappush(self.queue, next_time)
1050
1128
 
1051
1129
  time.sleep(0.2)
1052
1130
  return
@@ -1107,7 +1185,7 @@ class WorkflowTaskData:
1107
1185
  rs_log.save(excluded=None)
1108
1186
 
1109
1187
  # NOTE: remove this release date from running
1110
- self.running[wf.name].remove(next_time)
1188
+ self.running.remove(next_time)
1111
1189
 
1112
1190
  # IMPORTANT:
1113
1191
  # Add the next running datetime to workflow queue
@@ -1116,13 +1194,13 @@ class WorkflowTaskData:
1116
1194
  )
1117
1195
  future_running_time: datetime = gen.next
1118
1196
  while (
1119
- future_running_time in self.running[wf.name]
1120
- or future_running_time in self.queue[wf.name]
1197
+ future_running_time in self.running
1198
+ or future_running_time in self.queue
1121
1199
  or future_running_time < finish_time
1122
- ):
1200
+ ): # pragma: no cov
1123
1201
  future_running_time: datetime = gen.next
1124
1202
 
1125
- heappush(self.queue[wf.name], future_running_time)
1203
+ heappush(self.queue, future_running_time)
1126
1204
  logger.debug(f"[CORE]: {'-' * 100}")
1127
1205
 
1128
1206
  def __eq__(self, other) -> bool:
@@ -1134,7 +1212,7 @@ class WorkflowTaskData:
1134
1212
  return NotImplemented
1135
1213
 
1136
1214
 
1137
- @catch_exceptions(cancel_on_failure=True)
1215
+ @catch_exceptions(cancel_on_failure=True) # pragma: no cov
1138
1216
  def workflow_task(
1139
1217
  workflow_tasks: list[WorkflowTaskData],
1140
1218
  stop: datetime,
@@ -1233,7 +1311,7 @@ def workflow_task(
1233
1311
  logger.debug(f"[WORKFLOW]: {'=' * 100}")
1234
1312
 
1235
1313
 
1236
- def workflow_monitor(threads: dict[str, Thread]) -> None:
1314
+ def workflow_monitor(threads: dict[str, Thread]) -> None: # pragma: no cov
1237
1315
  """Workflow schedule for monitoring long running thread from the schedule
1238
1316
  control.
1239
1317
 
@@ -1255,7 +1333,7 @@ def workflow_control(
1255
1333
  schedules: list[str],
1256
1334
  stop: datetime | None = None,
1257
1335
  externals: DictData | None = None,
1258
- ) -> list[str]:
1336
+ ) -> list[str]: # pragma: no cov
1259
1337
  """Workflow scheduler control.
1260
1338
 
1261
1339
  :param schedules: A list of workflow names that want to schedule running.
@@ -1270,7 +1348,7 @@ def workflow_control(
1270
1348
  "Should install schedule package before use this module."
1271
1349
  ) from None
1272
1350
 
1273
- schedule: Scheduler = Scheduler()
1351
+ scheduler: Scheduler = Scheduler()
1274
1352
  start_date: datetime = datetime.now(tz=config.tz)
1275
1353
 
1276
1354
  # NOTE: Design workflow queue caching.
@@ -1288,9 +1366,11 @@ def workflow_control(
1288
1366
  # NOTE: Create pair of workflow and on from schedule model.
1289
1367
  workflow_tasks: list[WorkflowTaskData] = []
1290
1368
  for name in schedules:
1291
- sch: Schedule = Schedule.from_loader(name, externals=externals)
1369
+ schedule: Schedule = Schedule.from_loader(name, externals=externals)
1370
+
1371
+ # NOTE: Create a workflow task data instance from schedule object.
1292
1372
  workflow_tasks.extend(
1293
- sch.tasks(
1373
+ schedule.tasks(
1294
1374
  start_date_waiting,
1295
1375
  queue=wf_queue,
1296
1376
  running=wf_running,
@@ -1300,7 +1380,7 @@ def workflow_control(
1300
1380
 
1301
1381
  # NOTE: This schedule job will start every minute at :02 seconds.
1302
1382
  (
1303
- schedule.every(1)
1383
+ scheduler.every(1)
1304
1384
  .minutes.at(":02")
1305
1385
  .do(
1306
1386
  workflow_task,
@@ -1312,7 +1392,7 @@ def workflow_control(
1312
1392
  )
1313
1393
 
1314
1394
  # NOTE: Checking zombie task with schedule job will start every 5 minute.
1315
- schedule.every(5).minutes.at(":10").do(
1395
+ scheduler.every(5).minutes.at(":10").do(
1316
1396
  workflow_monitor,
1317
1397
  threads=thread_releases,
1318
1398
  ).tag("monitor")
@@ -1320,10 +1400,10 @@ def workflow_control(
1320
1400
  # NOTE: Start running schedule
1321
1401
  logger.info(f"[WORKFLOW]: Start schedule: {schedules}")
1322
1402
  while True:
1323
- schedule.run_pending()
1403
+ scheduler.run_pending()
1324
1404
  time.sleep(1)
1325
- if not schedule.get_jobs("control"):
1326
- schedule.clear("monitor")
1405
+ if not scheduler.get_jobs("control"):
1406
+ scheduler.clear("monitor")
1327
1407
  logger.warning(
1328
1408
  f"[WORKFLOW]: Workflow release thread: {thread_releases}"
1329
1409
  )
@@ -1343,7 +1423,7 @@ def workflow_runner(
1343
1423
  stop: datetime | None = None,
1344
1424
  externals: DictData | None = None,
1345
1425
  excluded: list[str] | None = None,
1346
- ) -> list[str]:
1426
+ ) -> list[str]: # pragma: no cov
1347
1427
  """Workflow application that running multiprocessing schedule with chunk of
1348
1428
  workflows that exists in config path.
1349
1429
 
@@ -1360,14 +1440,14 @@ def workflow_runner(
1360
1440
 
1361
1441
  The current workflow logic that split to process will be below diagram:
1362
1442
 
1363
- PIPELINES ==> process 01 ==> schedule 1 minute --> thread of release
1364
- workflow task 01 01
1365
- --> thread of release
1366
- workflow task 01 02
1367
- ==> process 02 ==> schedule 1 minute --> thread of release
1368
- workflow task 02 01
1369
- --> thread of release
1370
- workflow task 02 02
1443
+ PIPELINES ==> process 01 ==> schedule --> thread of release
1444
+ workflow task 01 01
1445
+ --> thread of release
1446
+ workflow task 01 02
1447
+ ==> process 02 ==> schedule --> thread of release
1448
+ workflow task 02 01
1449
+ --> thread of release
1450
+ workflow task 02 02
1371
1451
  ==> ...
1372
1452
  """
1373
1453
  excluded: list[str] = excluded or []
ddeutil/workflow/stage.py CHANGED
@@ -127,7 +127,6 @@ def handler_result(message: str | None = None) -> DecoratorResult:
127
127
  logger.error(
128
128
  f"({self.run_id}) [STAGE]: {err.__class__.__name__}: {err}"
129
129
  )
130
- print("Stage Raise error:", config.stage_raise_error)
131
130
  if config.stage_raise_error:
132
131
  # NOTE: If error that raise from stage execution course by
133
132
  # itself, it will return that error with previous
ddeutil/workflow/utils.py CHANGED
@@ -100,7 +100,7 @@ def gen_id(
100
100
  if not isinstance(value, str):
101
101
  value: str = str(value)
102
102
 
103
- if config.workflow_id_simple_mode:
103
+ if config.gen_id_simple_mode:
104
104
  return hash_str(f"{(value if sensitive else value.lower())}", n=10) + (
105
105
  f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}" if unique else ""
106
106
  )
@@ -445,6 +445,7 @@ FILTERS: dict[str, callable] = { # pragma: no cov
445
445
  "abs": abs,
446
446
  "str": str,
447
447
  "int": int,
448
+ "title": lambda x: x.title(),
448
449
  "upper": lambda x: x.upper(),
449
450
  "lower": lambda x: x.lower(),
450
451
  "rstr": [str, repr],
@@ -549,6 +550,30 @@ def get_args_const(
549
550
  return name.id, args, keywords
550
551
 
551
552
 
553
+ def get_args_from_filter(
554
+ ft: str,
555
+ filters: dict[str, FilterRegistry],
556
+ ) -> tuple[str, FilterRegistry, list[Any], dict[Any, Any]]: # pragma: no cov
557
+ """Get arguments and keyword-arguments from filter function calling string.
558
+ and validate it with the filter functions mapping dict.
559
+ """
560
+ func_name, _args, _kwargs = get_args_const(ft)
561
+ args: list[Any] = [arg.value for arg in _args]
562
+ kwargs: dict[Any, Any] = {k: v.value for k, v in _kwargs.items()}
563
+
564
+ if func_name not in filters:
565
+ raise UtilException(
566
+ f"The post-filter: {func_name} does not support yet."
567
+ )
568
+
569
+ if isinstance((f_func := filters[func_name]), list) and (args or kwargs):
570
+ raise UtilException(
571
+ "Chain filter function does not support for passing arguments."
572
+ )
573
+
574
+ return func_name, f_func, args, kwargs
575
+
576
+
552
577
  @custom_filter("fmt") # pragma: no cov
553
578
  def datetime_format(value: datetime, fmt: str = "%Y-%m-%d %H:%M:%S") -> str:
554
579
  """Format datetime object to string with the format."""
@@ -573,28 +598,16 @@ def map_post_filter(
573
598
 
574
599
  :rtype: T
575
600
  """
576
- for _filter in post_filter:
577
- func_name, _args, _kwargs = get_args_const(_filter)
578
- args: list = [arg.value for arg in _args]
579
- kwargs: dict = {k: v.value for k, v in _kwargs.items()}
580
-
581
- if func_name not in filters:
582
- raise UtilException(
583
- f"The post-filter: {func_name} does not support yet."
584
- )
585
-
601
+ for ft in post_filter:
602
+ func_name, f_func, args, kwargs = get_args_from_filter(ft, filters)
586
603
  try:
587
- if isinstance((f_func := filters[func_name]), list):
588
- if args or kwargs:
589
- raise UtilException(
590
- "Chain filter function does not support for passing "
591
- "arguments."
592
- )
604
+ if isinstance(f_func, list):
593
605
  for func in f_func:
594
- value: Any = func(value)
606
+ value: T = func(value)
595
607
  else:
596
- value: Any = f_func(value, *args, **kwargs)
597
- except UtilException:
608
+ value: T = f_func(value, *args, **kwargs)
609
+ except UtilException as err:
610
+ logger.warning(str(err))
598
611
  raise
599
612
  except Exception as err:
600
613
  logger.warning(str(err))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.17
3
+ Version: 0.0.19
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -24,6 +24,7 @@ Description-Content-Type: text/markdown
24
24
  License-File: LICENSE
25
25
  Requires-Dist: ddeutil >=0.4.3
26
26
  Requires-Dist: ddeutil-io[toml,yaml] >=0.2.3
27
+ Requires-Dist: pydantic ==2.9.2
27
28
  Requires-Dist: python-dotenv ==1.0.1
28
29
  Requires-Dist: typer <1.0.0,==0.12.5
29
30
  Requires-Dist: schedule <2.0.0,==1.2.2
@@ -45,7 +46,7 @@ for easy to make a simple metadata driven for data workflow orchestration.
45
46
  It can to use for data operator by a `.yaml` template.
46
47
 
47
48
  > [!WARNING]
48
- > This package provide only orchestration workload task. That mean you should not
49
+ > This package provide only orchestration workload. That mean you should not
49
50
  > use the workflow stage to process any large volume data which use lot of compute
50
51
  > resource. :cold_sweat:
51
52
 
@@ -57,10 +58,10 @@ configuration. It called **Metadata Driven Data Workflow**.
57
58
 
58
59
  **:pushpin: <u>Rules of This Workflow engine</u>**:
59
60
 
60
- 1. Minimum frequency unit of scheduling is **1 minute** :warning:
61
+ 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
61
62
  2. Can not re-run only failed stage and its pending downstream :rotating_light:
62
63
  3. All parallel tasks inside workflow engine use Multi-Threading
63
- (Because Python 3.13 unlock GIL :unlock:)
64
+ (Python 3.13 unlock GIL :unlock:)
64
65
 
65
66
  > [!NOTE]
66
67
  > _Disclaimer_: I inspire the dynamic statement from the [**GitHub Action**](https://github.com/features/actions)
@@ -182,7 +183,7 @@ application. If any configuration values do not set yet, it will use default val
182
183
  and do not raise any error to you.
183
184
 
184
185
  | Environment | Component | Default | Description | Remark |
185
- |:----------------------------------------|-----------|----------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
186
+ |:----------------------------------------|:----------|:---------------------------------|--------------------------------------------------------------------------------------------------------------------|--------|
186
187
  | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application. | |
187
188
  | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage. | |
188
189
  | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template. | |
@@ -194,7 +195,7 @@ and do not raise any error to you.
194
195
  | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
195
196
  | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
196
197
  | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
197
- | `WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE` | Core | true | . | |
198
+ | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
198
199
  | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
199
200
  | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
200
201
  | `WORKFLOW_APP_MAX_PROCESS` | Schedule | 2 | The maximum process worker number that run in scheduler app module. | |
@@ -0,0 +1,21 @@
1
+ ddeutil/workflow/__about__.py,sha256=GGHkQYD3vWi0C2DiWlU64oocfEx-Bn3LsXBorKFxtlM,28
2
+ ddeutil/workflow/__cron.py,sha256=KUCSdx30juyX6IE6Dal8T_qSudOiaD02r1SRHFJp7IM,25778
3
+ ddeutil/workflow/__init__.py,sha256=HA0tjGBXJItNPsAqvhnFUXU0fP0K6iMMfMtJ37tRwcw,1385
4
+ ddeutil/workflow/__types.py,sha256=yizLXzjQpBt_WPaof2pIyncitJvYeksw4Q1zYJeuCLA,3707
5
+ ddeutil/workflow/api.py,sha256=vUT2RVS9sF3hvY-IrzAEnahxwq4ZFYP0G3xfctHbNsw,4701
6
+ ddeutil/workflow/cli.py,sha256=baHhvtI8snbHYHeThoX401Cd6SMB2boyyCbCtTrIl3E,3278
7
+ ddeutil/workflow/conf.py,sha256=3xJPHIQcY4Q7rJoe0V8CUVHiEt_kww_bmr1f6MhcyCM,15420
8
+ ddeutil/workflow/exceptions.py,sha256=Uf1-Tn8rAzj0aiVHSqo4fBqO80W0za7UFZgKv24E-tg,706
9
+ ddeutil/workflow/job.py,sha256=Ww1zjviDCfTVUC_q7e3HHJwk3KXEFZxzGROQXoi_JS8,24349
10
+ ddeutil/workflow/on.py,sha256=slaNJr2RWBEmAmEUcW0S99qD45ENUUgAGka5XoZ6Yag,7937
11
+ ddeutil/workflow/repeat.py,sha256=s0azh-f5JQeow7kpxM8GKlqgAmKL7oU6St3L4Ggx4cY,4925
12
+ ddeutil/workflow/route.py,sha256=JALwOH6xKu5rnII7DgA1Lbp_E5ehCoBbOW_eKqB_Olk,6753
13
+ ddeutil/workflow/scheduler.py,sha256=0xE3bjIMe4eguo24rotOt6JfTy78tgst_qe7csSlt4k,50477
14
+ ddeutil/workflow/stage.py,sha256=6Ng3RiCSrnQ-FUsRRcuG2ClMD6ifiQlgyBFi6tohfxI,25455
15
+ ddeutil/workflow/utils.py,sha256=ETzixrfrXhacAm06agnvI1E8UZKjKCkKempJnW9KKes,25581
16
+ ddeutil_workflow-0.0.19.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
17
+ ddeutil_workflow-0.0.19.dist-info/METADATA,sha256=zTUO4MZi08V0upHMA8xmH8q7ms5M7Eot0lflQyGeaXw,13597
18
+ ddeutil_workflow-0.0.19.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
19
+ ddeutil_workflow-0.0.19.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
20
+ ddeutil_workflow-0.0.19.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
21
+ ddeutil_workflow-0.0.19.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.2.0)
2
+ Generator: setuptools (75.3.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,21 +0,0 @@
1
- ddeutil/workflow/__about__.py,sha256=z3f1GAF3VbZK1m4FWAXXMsWplP_jSe-X-wVlshvlDWU,28
2
- ddeutil/workflow/__cron.py,sha256=ZiuV4ASkXvAyFJYxEb9PKiAFNYnUt4AJozu_kH3pI4U,25777
3
- ddeutil/workflow/__init__.py,sha256=RNKME4FPMAjqtrBR-IBwQVEKeoY5yBAiHYcZw0k9cI4,729
4
- ddeutil/workflow/__types.py,sha256=yizLXzjQpBt_WPaof2pIyncitJvYeksw4Q1zYJeuCLA,3707
5
- ddeutil/workflow/api.py,sha256=vUT2RVS9sF3hvY-IrzAEnahxwq4ZFYP0G3xfctHbNsw,4701
6
- ddeutil/workflow/cli.py,sha256=baHhvtI8snbHYHeThoX401Cd6SMB2boyyCbCtTrIl3E,3278
7
- ddeutil/workflow/conf.py,sha256=SV4GMtjUc-Bor9BPi0yOtTIsiZ0FImsoRbuJysUIE9w,15395
8
- ddeutil/workflow/exceptions.py,sha256=Uf1-Tn8rAzj0aiVHSqo4fBqO80W0za7UFZgKv24E-tg,706
9
- ddeutil/workflow/job.py,sha256=dW9NXR_bttDGLwelVi7qXXlLd96KX-TKG8xnHejA6u0,24041
10
- ddeutil/workflow/on.py,sha256=rneZB5HyFWTBWriGef999bovA3glQIK6LTgC996q9Gc,7334
11
- ddeutil/workflow/repeat.py,sha256=9uKku5uMcQgzY5fWyaJMwJ0wPFX0oTwmu7vXKdgB_ec,4923
12
- ddeutil/workflow/route.py,sha256=JALwOH6xKu5rnII7DgA1Lbp_E5ehCoBbOW_eKqB_Olk,6753
13
- ddeutil/workflow/scheduler.py,sha256=Oa6bZpphjlGp0mXdBuLMk1m6G-dezaBNQxQX-SB3WJ0,47032
14
- ddeutil/workflow/stage.py,sha256=fMv_oFkoqpfoewzPUMdl3-BQcrJ8SE53cF7es8yGxfs,25525
15
- ddeutil/workflow/utils.py,sha256=lpnqGGd_Rw7eZo2wDbZ-NZNItBooFooPjwM4_40Csh8,25152
16
- ddeutil_workflow-0.0.17.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
17
- ddeutil_workflow-0.0.17.dist-info/METADATA,sha256=btmCr-yjy4gzhnZppfXjANfPH-3tKUJFGon2aOMUK30,13574
18
- ddeutil_workflow-0.0.17.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
19
- ddeutil_workflow-0.0.17.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
20
- ddeutil_workflow-0.0.17.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
21
- ddeutil_workflow-0.0.17.dist-info/RECORD,,