ddeutil-workflow 0.0.16__py3-none-any.whl → 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/job.py CHANGED
@@ -30,13 +30,12 @@ from pydantic.functional_validators import field_validator, model_validator
30
30
  from typing_extensions import Self
31
31
 
32
32
  from .__types import DictData, DictStr, Matrix, TupleStr
33
- from .conf import config
33
+ from .conf import config, get_logger
34
34
  from .exceptions import (
35
35
  JobException,
36
36
  StageException,
37
37
  UtilException,
38
38
  )
39
- from .log import get_logger
40
39
  from .stage import Stage
41
40
  from .utils import (
42
41
  Result,
@@ -111,6 +110,7 @@ def make(
111
110
  all(inc.get(k) == v for k, v in m.items()) for m in [*final, *add]
112
111
  ):
113
112
  continue
113
+
114
114
  add.append(inc)
115
115
 
116
116
  # NOTE: Merge all matrix together.
@@ -273,11 +273,32 @@ class Job(BaseModel):
273
273
 
274
274
  @field_validator("desc", mode="after")
275
275
  def ___prepare_desc__(cls, value: str) -> str:
276
- """Prepare description string that was created on a template."""
276
+ """Prepare description string that was created on a template.
277
+
278
+ :rtype: str
279
+ """
277
280
  return dedent(value)
278
281
 
282
+ @field_validator("stages", mode="after")
283
+ def __validate_stage_id__(cls, value: list[Stage]) -> list[Stage]:
284
+ """Validate a stage ID of all stage in stages field should not be
285
+ duplicate.
286
+
287
+ :rtype: list[Stage]
288
+ """
289
+ # VALIDATE: Validate stage id should not duplicate.
290
+ rs: list[str] = []
291
+ for stage in value:
292
+ name: str = stage.id or stage.name
293
+ if name in rs:
294
+ raise ValueError(
295
+ "Stage name in jobs object should not be duplicate."
296
+ )
297
+ rs.append(name)
298
+ return value
299
+
279
300
  @model_validator(mode="after")
280
- def __prepare_running_id__(self) -> Self:
301
+ def __prepare_running_id_and_stage_name__(self) -> Self:
281
302
  """Prepare the job running ID.
282
303
 
283
304
  :rtype: Self
@@ -355,7 +376,7 @@ class Job(BaseModel):
355
376
  to["jobs"][_id] = (
356
377
  {"strategies": output}
357
378
  if self.strategy.is_set()
358
- else output[next(iter(output))]
379
+ else output.get(next(iter(output), "DUMMY"), {})
359
380
  )
360
381
  return to
361
382
 
@@ -365,7 +386,6 @@ class Job(BaseModel):
365
386
  params: DictData,
366
387
  *,
367
388
  event: Event | None = None,
368
- raise_error: bool = True,
369
389
  ) -> Result:
370
390
  """Job Strategy execution with passing dynamic parameters from the
371
391
  workflow execution to strategy matrix.
@@ -374,19 +394,20 @@ class Job(BaseModel):
374
394
  It different with ``self.execute`` because this method run only one
375
395
  strategy and return with context of this strategy data.
376
396
 
377
- :raise JobException: If it has any error from StageException or
378
- UtilException.
397
+ :raise JobException: If it has any error from ``StageException`` or
398
+ ``UtilException``.
379
399
 
380
400
  :param strategy: A metrix strategy value.
381
401
  :param params: A dynamic parameters.
382
402
  :param event: An manger event that pass to the PoolThreadExecutor.
383
- :param raise_error: A flag that raise error instead catching to result
384
- if it get exception from stage execution.
403
+
385
404
  :rtype: Result
386
405
  """
387
406
  strategy_id: str = gen_id(strategy)
388
407
 
389
- # NOTE: Create strategy execution context and update a matrix and copied
408
+ # PARAGRAPH:
409
+ #
410
+ # Create strategy execution context and update a matrix and copied
390
411
  # of params. So, the context value will have structure like;
391
412
  #
392
413
  # {
@@ -405,14 +426,14 @@ class Job(BaseModel):
405
426
  # IMPORTANT: Change any stage running IDs to this job running ID.
406
427
  stage: Stage = stage.get_running_id(self.run_id)
407
428
 
408
- _st_name: str = stage.id or stage.name
429
+ name: str = stage.id or stage.name
409
430
 
410
431
  if stage.is_skipped(params=context):
411
- logger.info(f"({self.run_id}) [JOB]: Skip stage: {_st_name!r}")
432
+ logger.info(f"({self.run_id}) [JOB]: Skip stage: {name!r}")
412
433
  continue
413
434
 
414
435
  logger.info(
415
- f"({self.run_id}) [JOB]: Start execute the stage: {_st_name!r}"
436
+ f"({self.run_id}) [JOB]: Start execute the stage: {name!r}"
416
437
  )
417
438
 
418
439
  # NOTE: Logging a matrix that pass on this stage execution.
@@ -432,20 +453,20 @@ class Job(BaseModel):
432
453
  # ---
433
454
  # "stages": filter_func(context.pop("stages", {})),
434
455
  "stages": context.pop("stages", {}),
435
- # NOTE: Set the error keys.
436
456
  "error": JobException(
437
- "Process Event stopped before execution"
457
+ "Job strategy was canceled from trigger event "
458
+ "that had stopped before execution."
459
+ ),
460
+ "error_message": (
461
+ "Job strategy was canceled from trigger event "
462
+ "that had stopped before execution."
438
463
  ),
439
- "error_message": {
440
- "message": (
441
- "Process Event stopped before execution"
442
- ),
443
- },
444
464
  },
445
465
  },
446
466
  )
447
467
 
448
- # NOTE:
468
+ # PARAGRAPH:
469
+ #
449
470
  # I do not use below syntax because `params` dict be the
450
471
  # reference memory pointer and it was changed when I action
451
472
  # anything like update or re-construct this.
@@ -471,16 +492,25 @@ class Job(BaseModel):
471
492
  logger.error(
472
493
  f"({self.run_id}) [JOB]: {err.__class__.__name__}: {err}"
473
494
  )
474
- if raise_error:
495
+ if config.job_raise_error:
475
496
  raise JobException(
476
497
  f"Get stage execution error: {err.__class__.__name__}: "
477
498
  f"{err}"
478
499
  ) from None
479
- else:
480
- raise NotImplementedError() from None
500
+ return Result(
501
+ status=1,
502
+ context={
503
+ strategy_id: {
504
+ "matrix": strategy,
505
+ "stages": context.pop("stages", {}),
506
+ "error": err,
507
+ "error_message": f"{err.__class__.__name__}: {err}",
508
+ },
509
+ },
510
+ )
481
511
 
482
- # NOTE: Remove new stage object that was created from
483
- # ``get_running_id`` method.
512
+ # NOTE: Remove the current stage object that was created from
513
+ # ``get_running_id`` method for saving memory.
484
514
  del stage
485
515
 
486
516
  return Result(
@@ -523,6 +553,8 @@ class Job(BaseModel):
523
553
  # NOTE: Create event for cancel executor by trigger stop running event.
524
554
  event: Event = Event()
525
555
 
556
+ print("Job Run Fail-Fast:", self.strategy.fail_fast)
557
+
526
558
  # IMPORTANT: Start running strategy execution by multithreading because
527
559
  # it will running by strategy values without waiting previous
528
560
  # execution.
@@ -583,30 +615,34 @@ class Job(BaseModel):
583
615
  )
584
616
  logger.debug(f"({self.run_id}) [JOB]: Strategy is set Fail Fast{nd}")
585
617
 
586
- # NOTE: Stop all running tasks with setting the event manager and cancel
618
+ # NOTE:
619
+ # Stop all running tasks with setting the event manager and cancel
587
620
  # any scheduled tasks.
621
+ #
588
622
  if len(done) != len(futures):
589
623
  event.set()
590
- for future in futures:
624
+ for future in not_done:
591
625
  future.cancel()
592
626
 
593
- del future
594
-
627
+ future: Future
595
628
  for future in done:
596
- if future.exception():
597
- status = 1
629
+ if err := future.exception():
630
+ status: int = 1
598
631
  logger.error(
599
632
  f"({self.run_id}) [JOB]: One stage failed with: "
600
633
  f"{future.exception()}, shutting down this future."
601
634
  )
602
- elif future.cancelled():
635
+ context.update(
636
+ {
637
+ "error": err,
638
+ "error_message": f"{err.__class__.__name__}: {err}",
639
+ },
640
+ )
603
641
  continue
604
642
 
605
643
  # NOTE: Update the result context to main job context.
606
644
  context.update(future.result(timeout=result_timeout).context)
607
645
 
608
- del future
609
-
610
646
  return rs_final.catch(status=status, context=context)
611
647
 
612
648
  def __catch_all_completed(
@@ -631,7 +667,7 @@ class Job(BaseModel):
631
667
  for future in as_completed(futures, timeout=timeout):
632
668
  try:
633
669
  context.update(future.result(timeout=result_timeout).context)
634
- except TimeoutError:
670
+ except TimeoutError: # pragma: no cov
635
671
  status = 1
636
672
  logger.warning(
637
673
  f"({self.run_id}) [JOB]: Task is hanging. Attempting to "
@@ -653,6 +689,10 @@ class Job(BaseModel):
653
689
  f"fail-fast does not set;\n{err.__class__.__name__}:\n\t"
654
690
  f"{err}"
655
691
  )
656
- finally:
657
- del future
692
+ context.update(
693
+ {
694
+ "error": err,
695
+ "error_message": f"{err.__class__.__name__}: {err}",
696
+ },
697
+ )
658
698
  return rs_final.catch(status=status, context=context)
ddeutil/workflow/on.py CHANGED
@@ -14,9 +14,9 @@ from pydantic.functional_serializers import field_serializer
14
14
  from pydantic.functional_validators import field_validator, model_validator
15
15
  from typing_extensions import Self
16
16
 
17
+ from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner
17
18
  from .__types import DictData, DictStr, TupleStr
18
19
  from .conf import Loader
19
- from .cron import WEEKDAYS, CronJob, CronJobYear, CronRunner
20
20
 
21
21
  __all__: TupleStr = (
22
22
  "On",
@@ -109,7 +109,7 @@ class On(BaseModel):
109
109
  def from_loader(
110
110
  cls,
111
111
  name: str,
112
- externals: DictData,
112
+ externals: DictData | None = None,
113
113
  ) -> Self:
114
114
  """Constructor from the name of config that will use loader object for
115
115
  getting the data.
@@ -117,6 +117,7 @@ class On(BaseModel):
117
117
  :param name: A name of config that will getting from loader.
118
118
  :param externals: A extras external parameter that will keep in extras.
119
119
  """
120
+ externals: DictData = externals or {}
120
121
  loader: Loader = Loader(name, externals=externals)
121
122
 
122
123
  # NOTE: Validate the config type match with current connection model
@@ -139,7 +140,9 @@ class On(BaseModel):
139
140
  )
140
141
  )
141
142
  if "cronjob" not in loader_data:
142
- raise ValueError("Config does not set ``cronjob`` key")
143
+ raise ValueError(
144
+ "Config does not set ``cronjob`` or ``interval`` keys"
145
+ )
143
146
  return cls.model_validate(
144
147
  obj=dict(
145
148
  cronjob=loader_data.pop("cronjob"),
@@ -175,17 +178,17 @@ class On(BaseModel):
175
178
 
176
179
  def generate(self, start: str | datetime) -> CronRunner:
177
180
  """Return Cron runner object."""
178
- if not isinstance(start, datetime):
181
+ if isinstance(start, str):
179
182
  start: datetime = datetime.fromisoformat(start)
183
+ elif not isinstance(start, datetime):
184
+ raise TypeError("start value should be str or datetime type.")
180
185
  return self.cronjob.schedule(date=start, tz=self.tz)
181
186
 
182
187
  def next(self, start: str | datetime) -> datetime:
183
188
  """Return a next datetime from Cron runner object that start with any
184
189
  date that given from input.
185
190
  """
186
- if not isinstance(start, datetime):
187
- start: datetime = datetime.fromisoformat(start)
188
- return self.cronjob.schedule(date=start, tz=self.tz).next
191
+ return self.generate(start=start).next
189
192
 
190
193
 
191
194
  class YearOn(On):
@@ -12,9 +12,8 @@ from functools import wraps
12
12
 
13
13
  from starlette.concurrency import run_in_threadpool
14
14
 
15
- from .conf import config
15
+ from .conf import config, get_logger
16
16
  from .cron import CronJob
17
- from .log import get_logger
18
17
 
19
18
  logger = get_logger("ddeutil.workflow")
20
19
 
ddeutil/workflow/route.py CHANGED
@@ -16,8 +16,7 @@ from pydantic import BaseModel
16
16
 
17
17
  from . import Workflow
18
18
  from .__types import DictData
19
- from .conf import Loader, config
20
- from .log import get_logger
19
+ from .conf import Loader, config, get_logger
21
20
  from .scheduler import Schedule
22
21
  from .utils import Result
23
22
 
@@ -52,15 +52,14 @@ except ImportError:
52
52
 
53
53
  try:
54
54
  from schedule import CancelJob
55
- except ImportError:
55
+ except ImportError: # pragma: no cov
56
56
  CancelJob = None
57
57
 
58
+ from .__cron import CronRunner
58
59
  from .__types import DictData, TupleStr
59
- from .conf import Loader, config
60
- from .cron import CronRunner
60
+ from .conf import FileLog, Loader, Log, config, get_logger
61
61
  from .exceptions import JobException, WorkflowException
62
62
  from .job import Job
63
- from .log import FileLog, Log, get_logger
64
63
  from .on import On
65
64
  from .utils import (
66
65
  Param,
@@ -230,8 +229,8 @@ class Workflow(BaseModel):
230
229
  need for need in self.jobs[job].needs if need not in self.jobs
231
230
  ]:
232
231
  raise WorkflowException(
233
- f"This needed jobs: {not_exist} do not exist in this "
234
- f"workflow, {self.name!r}"
232
+ f"The needed jobs: {not_exist} do not found in "
233
+ f"{self.name!r}."
235
234
  )
236
235
 
237
236
  # NOTE: update a job id with its job id from workflow template
@@ -354,11 +353,11 @@ class Workflow(BaseModel):
354
353
  # NOTE: get next schedule time that generate from now.
355
354
  next_time: datetime = gen.next
356
355
 
357
- # NOTE: get next utils it does not logger.
356
+ # NOTE: While-loop to getting next until it does not logger.
358
357
  while log.is_pointed(self.name, next_time, queue=queue):
359
358
  next_time: datetime = gen.next
360
359
 
361
- # NOTE: push this next running time to log queue
360
+ # NOTE: Heap-push this next running time to log queue list.
362
361
  heappush(queue, next_time)
363
362
 
364
363
  # VALIDATE: Check the different time between the next schedule time and
@@ -709,7 +708,7 @@ class Workflow(BaseModel):
709
708
  raise WorkflowException(f"{err}")
710
709
  try:
711
710
  future.result(timeout=60)
712
- except TimeoutError as err:
711
+ except TimeoutError as err: # pragma: no cove
713
712
  raise WorkflowException(
714
713
  "Timeout when getting result from future"
715
714
  ) from err
ddeutil/workflow/stage.py CHANGED
@@ -13,8 +13,8 @@ handle stage error on this stage model. I think stage model should have a lot of
13
13
  usecase and it does not worry when I want to create a new one.
14
14
 
15
15
  Execution --> Ok --> Result with 0
16
- --> Error --> Raise StageException
17
- --> Result with 1 (if env var was set)
16
+ --> Error --> Result with 1 (if env var was set)
17
+ --> Raise StageException
18
18
 
19
19
  On the context I/O that pass to a stage object at execute process. The
20
20
  execute method receives a `params={"params": {...}}` value for mapping to
@@ -48,9 +48,8 @@ from pydantic.functional_validators import model_validator
48
48
  from typing_extensions import Self
49
49
 
50
50
  from .__types import DictData, DictStr, Re, TupleStr
51
- from .conf import config
51
+ from .conf import config, get_logger
52
52
  from .exceptions import StageException
53
- from .log import get_logger
54
53
  from .utils import (
55
54
  Registry,
56
55
  Result,
@@ -94,12 +93,12 @@ def handler_result(message: str | None = None) -> DecoratorResult:
94
93
  status: 0
95
94
  context:
96
95
  outputs: ...
97
- --> Error --> Raise StageException
98
- --> Result (if env var was set)
96
+ --> Error --> Result (if env var was set)
99
97
  status: 1
100
98
  context:
101
99
  error: ...
102
100
  error_message: ...
101
+ --> Error --> Raise StageException
103
102
 
104
103
  On the last step, it will set the running ID on a return result object
105
104
  from current stage ID before release the final result.
@@ -128,6 +127,7 @@ def handler_result(message: str | None = None) -> DecoratorResult:
128
127
  logger.error(
129
128
  f"({self.run_id}) [STAGE]: {err.__class__.__name__}: {err}"
130
129
  )
130
+ print("Stage Raise error:", config.stage_raise_error)
131
131
  if config.stage_raise_error:
132
132
  # NOTE: If error that raise from stage execution course by
133
133
  # itself, it will return that error with previous
@@ -190,6 +190,9 @@ class BaseStage(BaseModel, ABC):
190
190
  method will validate name and id fields should not contain any template
191
191
  parameter (exclude matrix template).
192
192
 
193
+ :raise ValueError: When the ID and name fields include matrix parameter
194
+ template with the 'matrix.' string value.
195
+
193
196
  :rtype: Self
194
197
  """
195
198
  if self.run_id is None:
@@ -199,7 +202,7 @@ class BaseStage(BaseModel, ABC):
199
202
  # template. (allow only matrix)
200
203
  if not_in_template(self.id) or not_in_template(self.name):
201
204
  raise ValueError(
202
- "Stage name and ID should only template with matrix."
205
+ "Stage name and ID should only template with 'matrix.'"
203
206
  )
204
207
 
205
208
  return self
@@ -236,10 +239,10 @@ class BaseStage(BaseModel, ABC):
236
239
  The result of the `to` variable will be;
237
240
 
238
241
  ... (iii) to: {
239
- 'stages': {
240
- '<stage-id>': {'outputs': {'foo': 'bar'}}
241
- }
242
+ 'stages': {
243
+ '<stage-id>': {'outputs': {'foo': 'bar'}}
242
244
  }
245
+ }
243
246
 
244
247
  :param output: A output data that want to extract to an output key.
245
248
  :param to: A context data that want to add output result.
@@ -273,6 +276,11 @@ class BaseStage(BaseModel, ABC):
273
276
  """Return true if condition of this stage do not correct. This process
274
277
  use build-in eval function to execute the if-condition.
275
278
 
279
+ :raise StageException: When it has any error raise from the eval
280
+ condition statement.
281
+ :raise StageException: When return type of the eval condition statement
282
+ does not return with boolean type.
283
+
276
284
  :param params: A parameters that want to pass to condition template.
277
285
  :rtype: bool
278
286
  """
@@ -460,6 +468,15 @@ class PyStage(BaseStage):
460
468
  ),
461
469
  )
462
470
 
471
+ @staticmethod
472
+ def pick_keys_from_locals(values: DictData) -> Iterator[str]:
473
+ from inspect import ismodule
474
+
475
+ for value in values:
476
+ if value == "__annotations__" or ismodule(values[value]):
477
+ continue
478
+ yield value
479
+
463
480
  def set_outputs(self, output: DictData, to: DictData) -> DictData:
464
481
  """Override set an outputs method for the Python execution process that
465
482
  extract output from all the locals values.
@@ -469,15 +486,19 @@ class PyStage(BaseStage):
469
486
  :rtype: DictData
470
487
  """
471
488
  # NOTE: The output will fileter unnecessary keys from locals.
472
- _locals: DictData = output["locals"]
489
+ lc: DictData = output.get("locals", {})
473
490
  super().set_outputs(
474
- {k: _locals[k] for k in _locals if k != "__annotations__"}, to=to
491
+ (
492
+ {k: lc[k] for k in self.pick_keys_from_locals(lc)}
493
+ | {k: output[k] for k in output if k.startswith("error")}
494
+ ),
495
+ to=to,
475
496
  )
476
497
 
477
- # NOTE:
478
- # Override value that changing from the globals that pass via exec.
479
- _globals: DictData = output["globals"]
480
- to.update({k: _globals[k] for k in to if k in _globals})
498
+ # NOTE: Override value that changing from the globals that pass via the
499
+ # exec function.
500
+ gb: DictData = output.get("globals", {})
501
+ to.update({k: gb[k] for k in to if k in gb})
481
502
  return to
482
503
 
483
504
  @handler_result()
@@ -495,15 +516,15 @@ class PyStage(BaseStage):
495
516
  _globals: DictData = (
496
517
  globals() | params | param2template(self.vars, params)
497
518
  )
498
- _locals: DictData = {}
519
+ lc: DictData = {}
499
520
 
500
521
  # NOTE: Start exec the run statement.
501
522
  logger.info(f"({self.run_id}) [STAGE]: Py-Execute: {self.name}")
502
- exec(run, _globals, _locals)
523
+ exec(run, _globals, lc)
503
524
 
504
525
  return Result(
505
526
  status=0,
506
- context={"locals": _locals, "globals": _globals},
527
+ context={"locals": lc, "globals": _globals},
507
528
  )
508
529
 
509
530
 
@@ -522,6 +543,11 @@ def extract_hook(hook: str) -> Callable[[], TagFunc]:
522
543
  """Extract Hook function from string value to hook partial function that
523
544
  does run it at runtime.
524
545
 
546
+ :raise NotImplementedError: When the searching hook's function result does
547
+ not exist in the registry.
548
+ :raise NotImplementedError: When the searching hook's tag result does not
549
+ exists in the registry with its function key.
550
+
525
551
  :param hook: A hook value that able to match with Task regex.
526
552
  :rtype: Callable[[], TagFunc]
527
553
  """
@@ -581,6 +607,11 @@ class HookStage(BaseStage):
581
607
  def execute(self, params: DictData) -> Result:
582
608
  """Execute the Hook function that already in the hook registry.
583
609
 
610
+ :raise ValueError: When the necessary arguments of hook function do not
611
+ set from the input params argument.
612
+ :raise TypeError: When the return type of hook function does not be
613
+ dict type.
614
+
584
615
  :param params: A parameter that want to pass before run any statement.
585
616
  :type params: DictData
586
617
  :rtype: Result