ddeutil-workflow 0.0.60__py3-none-any.whl → 0.0.61__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.60"
1
+ __version__: str = "0.0.61"
ddeutil/workflow/event.py CHANGED
@@ -314,3 +314,15 @@ class CrontabYear(Crontab):
314
314
  if isinstance(value, str)
315
315
  else value
316
316
  )
317
+
318
+
319
+ Event = Annotated[
320
+ Union[
321
+ CronJobYear,
322
+ CronJob,
323
+ ],
324
+ Field(
325
+ union_mode="smart",
326
+ description="An event models.",
327
+ ),
328
+ ] # pragma: no cov
ddeutil/workflow/logs.py CHANGED
@@ -23,7 +23,7 @@ from inspect import Traceback, currentframe, getframeinfo
23
23
  from pathlib import Path
24
24
  from threading import get_ident
25
25
  from types import FrameType
26
- from typing import ClassVar, Literal, Optional, TypeVar, Union
26
+ from typing import ClassVar, Final, Literal, Optional, TypeVar, Union
27
27
 
28
28
  from pydantic import BaseModel, ConfigDict, Field
29
29
  from pydantic.functional_validators import model_validator
@@ -74,7 +74,7 @@ def get_dt_tznow() -> datetime: # pragma: no cov
74
74
  return get_dt_now(tz=config.tz)
75
75
 
76
76
 
77
- PREFIX_LOGS: dict[str, dict] = {
77
+ PREFIX_LOGS: Final[dict[str, dict]] = {
78
78
  "CALLER": {
79
79
  "emoji": "📍",
80
80
  "desc": "logs from any usage from custom caller function.",
@@ -85,7 +85,7 @@ PREFIX_LOGS: dict[str, dict] = {
85
85
  "RELEASE": {"emoji": "📅", "desc": "logs from release workflow method."},
86
86
  "POKING": {"emoji": "⏰", "desc": "logs from poke workflow method."},
87
87
  } # pragma: no cov
88
- PREFIX_DEFAULT: str = "CALLER"
88
+ PREFIX_DEFAULT: Final[str] = "CALLER"
89
89
  PREFIX_LOGS_REGEX: re.Pattern[str] = re.compile(
90
90
  rf"(^\[(?P<name>{'|'.join(PREFIX_LOGS)})]:\s?)?(?P<message>.*)",
91
91
  re.MULTILINE | re.DOTALL | re.ASCII | re.VERBOSE,
@@ -103,6 +103,9 @@ class PrefixMsg(BaseModel):
103
103
  def prepare(self, extras: Optional[DictData] = None) -> str:
104
104
  """Prepare message with force add prefix before writing trace log.
105
105
 
106
+ :param extras: (DictData) An extra parameter that want to get the
107
+ `log_add_emoji` flag.
108
+
106
109
  :rtype: str
107
110
  """
108
111
  name: str = self.name or PREFIX_DEFAULT
@@ -332,9 +335,7 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
332
335
 
333
336
  :param message: (str) A message that want to log.
334
337
  """
335
- msg: str = prepare_newline(
336
- self.make_message(extract_msg_prefix(message).prepare(self.extras))
337
- )
338
+ msg: str = self.make_message(message)
338
339
 
339
340
  if mode != "debug" or (
340
341
  mode == "debug" and dynamic("debug", extras=self.extras)
@@ -391,9 +392,7 @@ class BaseTrace(BaseModel, ABC): # pragma: no cov
391
392
 
392
393
  :param message: (str) A message that want to log.
393
394
  """
394
- msg: str = prepare_newline(
395
- self.make_message(extract_msg_prefix(message).prepare(self.extras))
396
- )
395
+ msg: str = self.make_message(message)
397
396
 
398
397
  if mode != "debug" or (
399
398
  mode == "debug" and dynamic("debug", extras=self.extras)
@@ -514,13 +513,15 @@ class FileTrace(BaseTrace): # pragma: no cov
514
513
  return f"{cut_parent_run_id} -> {cut_run_id}"
515
514
 
516
515
  def make_message(self, message: str) -> str:
517
- """Prepare and Make a message before write and log processes.
516
+ """Prepare and Make a message before write and log steps.
518
517
 
519
518
  :param message: (str) A message that want to prepare and make before.
520
519
 
521
520
  :rtype: str
522
521
  """
523
- return f"({self.cut_id}) {message}"
522
+ return prepare_newline(
523
+ f"({self.cut_id}) {extract_msg_prefix(message).prepare(self.extras)}"
524
+ )
524
525
 
525
526
  def writer(self, message: str, level: str, is_err: bool = False) -> None:
526
527
  """Write a trace message after making to target file and write metadata
@@ -20,6 +20,7 @@ from typing import Annotated, Any, Literal, Optional, TypeVar, Union
20
20
  from ddeutil.core import str2dict, str2list
21
21
  from pydantic import BaseModel, Field
22
22
 
23
+ from .__types import StrOrInt
23
24
  from .exceptions import ParamValueException
24
25
  from .utils import get_d_now, get_dt_now
25
26
 
@@ -159,10 +160,11 @@ class StrParam(DefaultParam):
159
160
 
160
161
  type: Literal["str"] = "str"
161
162
 
162
- def receive(self, value: Optional[str] = None) -> Optional[str]:
163
+ def receive(self, value: Optional[Any] = None) -> Optional[str]:
163
164
  """Receive value that match with str.
164
165
 
165
- :param value: A value that want to validate with string parameter type.
166
+ :param value: (Any) A value that want to validate with string parameter
167
+ type.
166
168
  :rtype: Optional[str]
167
169
  """
168
170
  if value is None:
@@ -175,7 +177,7 @@ class IntParam(DefaultParam):
175
177
 
176
178
  type: Literal["int"] = "int"
177
179
 
178
- def receive(self, value: Optional[int] = None) -> Optional[int]:
180
+ def receive(self, value: Optional[StrOrInt] = None) -> Optional[int]:
179
181
  """Receive value that match with int.
180
182
 
181
183
  :param value: A value that want to validate with integer parameter type.
@@ -200,13 +202,24 @@ class FloatParam(DefaultParam): # pragma: no cov
200
202
  precision: int = 6
201
203
 
202
204
  def rounding(self, value: float) -> float:
203
- """Rounding float value with the specific precision field."""
205
+ """Rounding float value with the specific precision field.
206
+
207
+ :param value: A float value that want to round with the precision value.
208
+
209
+ :rtype: float
210
+ """
204
211
  round_str: str = f"{{0:.{self.precision}f}}"
205
212
  return float(round_str.format(round(value, self.precision)))
206
213
 
207
- def receive(self, value: Optional[Union[float, int, str]] = None) -> float:
214
+ def receive(
215
+ self, value: Optional[Union[float, int, str]] = None
216
+ ) -> Optional[float]:
217
+ """Receive value that match with float.
208
218
 
209
- if value in None:
219
+ :param value: A value that want to validate with float parameter type.
220
+ :rtype: float | None
221
+ """
222
+ if value is None:
210
223
  return self.default
211
224
 
212
225
  if isinstance(value, float):
@@ -217,11 +230,7 @@ class FloatParam(DefaultParam): # pragma: no cov
217
230
  raise TypeError(
218
231
  "Received value type does not math with str, float, or int."
219
232
  )
220
-
221
- try:
222
- return self.rounding(float(value))
223
- except Exception:
224
- raise
233
+ return self.rounding(float(value))
225
234
 
226
235
 
227
236
  class DecimalParam(DefaultParam): # pragma: no cov
@@ -231,12 +240,28 @@ class DecimalParam(DefaultParam): # pragma: no cov
231
240
  precision: int = 6
232
241
 
233
242
  def rounding(self, value: Decimal) -> Decimal:
234
- """Rounding float value with the specific precision field."""
243
+ """Rounding float value with the specific precision field.
244
+
245
+ :param value: (Decimal) A Decimal value that want to round with the
246
+ precision value.
247
+
248
+ :rtype: Decimal
249
+ """
235
250
  return value.quantize(Decimal(10) ** -self.precision)
236
251
 
237
- def receive(self, value: float | Decimal | None = None) -> Decimal:
252
+ def receive(
253
+ self, value: Optional[Union[float, int, str, Decimal]] = None
254
+ ) -> Decimal:
255
+ """Receive value that match with decimal.
238
256
 
239
- if isinstance(value, float):
257
+ :param value: (float | Decimal) A value that want to validate with
258
+ decimal parameter type.
259
+ :rtype: Decimal | None
260
+ """
261
+ if value is None:
262
+ return self.default
263
+
264
+ if isinstance(value, (float, int)):
240
265
  return self.rounding(Decimal(value))
241
266
  elif isinstance(value, Decimal):
242
267
  return self.rounding(value)
@@ -261,11 +286,12 @@ class ChoiceParam(BaseParam):
261
286
  description="A list of choice parameters that able be str or int.",
262
287
  )
263
288
 
264
- def receive(self, value: Union[str, int] | None = None) -> Union[str, int]:
289
+ def receive(self, value: Optional[StrOrInt] = None) -> StrOrInt:
265
290
  """Receive value that match with options.
266
291
 
267
- :param value: A value that want to select from the options field.
268
- :rtype: str
292
+ :param value: (str | int) A value that want to select from the options
293
+ field.
294
+ :rtype: str | int
269
295
  """
270
296
  # NOTE:
271
297
  # Return the first value in options if it does not pass any input
@@ -279,7 +305,7 @@ class ChoiceParam(BaseParam):
279
305
  return value
280
306
 
281
307
 
282
- class MapParam(DefaultParam): # pragma: no cov
308
+ class MapParam(DefaultParam):
283
309
  """Map parameter."""
284
310
 
285
311
  type: Literal["map"] = "map"
@@ -295,6 +321,7 @@ class MapParam(DefaultParam): # pragma: no cov
295
321
  """Receive value that match with map type.
296
322
 
297
323
  :param value: A value that want to validate with map parameter type.
324
+
298
325
  :rtype: dict[Any, Any]
299
326
  """
300
327
  if value is None:
@@ -316,7 +343,7 @@ class MapParam(DefaultParam): # pragma: no cov
316
343
  return value
317
344
 
318
345
 
319
- class ArrayParam(DefaultParam): # pragma: no cov
346
+ class ArrayParam(DefaultParam):
320
347
  """Array parameter."""
321
348
 
322
349
  type: Literal["array"] = "array"
@@ -326,7 +353,7 @@ class ArrayParam(DefaultParam): # pragma: no cov
326
353
  )
327
354
 
328
355
  def receive(
329
- self, value: Optional[Union[list[T], tuple[T, ...], str]] = None
356
+ self, value: Optional[Union[list[T], tuple[T, ...], set[T], str]] = None
330
357
  ) -> list[T]:
331
358
  """Receive value that match with array type.
332
359
 
@@ -365,5 +392,11 @@ Param = Annotated[
365
392
  IntParam,
366
393
  StrParam,
367
394
  ],
368
- Field(discriminator="type"),
395
+ Field(
396
+ discriminator="type",
397
+ description=(
398
+ "A parameter models that use for validate and receive on the "
399
+ "workflow execution."
400
+ ),
401
+ ),
369
402
  ]
@@ -44,6 +44,8 @@ FILTERS: dict[str, Callable] = { # pragma: no cov
44
44
  "upper": lambda x: x.upper(),
45
45
  "lower": lambda x: x.lower(),
46
46
  "rstr": [str, repr],
47
+ "keys": lambda x: list(x.keys()),
48
+ "values": lambda x: list(x.values()),
47
49
  }
48
50
 
49
51
 
@@ -35,6 +35,7 @@ import json
35
35
  import subprocess
36
36
  import sys
37
37
  import time
38
+ import traceback
38
39
  import uuid
39
40
  from abc import ABC, abstractmethod
40
41
  from collections.abc import AsyncIterator, Iterator
@@ -65,12 +66,14 @@ from .result import CANCEL, FAILED, SUCCESS, WAIT, Result, Status
65
66
  from .reusables import TagFunc, extract_call, not_in_template, param2template
66
67
  from .utils import (
67
68
  delay,
69
+ dump_all,
68
70
  filter_func,
69
71
  gen_id,
70
72
  make_exec,
71
73
  )
72
74
 
73
75
  T = TypeVar("T")
76
+ DictOrModel = Union[DictData, BaseModel]
74
77
 
75
78
 
76
79
  class BaseStage(BaseModel, ABC):
@@ -221,7 +224,10 @@ class BaseStage(BaseModel, ABC):
221
224
  return self.execute(params, result=result, event=event)
222
225
  except Exception as e:
223
226
  e_name: str = e.__class__.__name__
224
- result.trace.error(f"[STAGE]: Error Handler:||{e_name}:||{e}")
227
+ result.trace.error(
228
+ f"[STAGE]: Error Handler:||{e_name}:||{e}||"
229
+ f"{traceback.format_exc()}"
230
+ )
225
231
  if dynamic("stage_raise_error", f=raise_error, extras=self.extras):
226
232
  if isinstance(e, StageException):
227
233
  raise
@@ -1170,13 +1176,12 @@ class CallStage(BaseAsyncStage):
1170
1176
  args.pop("result")
1171
1177
 
1172
1178
  args = self.parse_model_args(call_func, args, result)
1173
-
1174
1179
  if inspect.iscoroutinefunction(call_func):
1175
- rs: DictData = await call_func(
1180
+ rs: DictOrModel = await call_func(
1176
1181
  **param2template(args, params, extras=self.extras)
1177
1182
  )
1178
1183
  else:
1179
- rs: DictData = call_func(
1184
+ rs: DictOrModel = call_func(
1180
1185
  **param2template(args, params, extras=self.extras)
1181
1186
  )
1182
1187
 
@@ -1190,7 +1195,7 @@ class CallStage(BaseAsyncStage):
1190
1195
  f"serialize, you must set return be `dict` or Pydantic "
1191
1196
  f"model."
1192
1197
  )
1193
- return result.catch(status=SUCCESS, context=rs)
1198
+ return result.catch(status=SUCCESS, context=dump_all(rs, by_alias=True))
1194
1199
 
1195
1200
  @staticmethod
1196
1201
  def parse_model_args(
@@ -2528,7 +2533,11 @@ class VirtualPyStage(PyStage): # pragma: no cov
2528
2533
  deps: list[str],
2529
2534
  run_id: StrOrNone = None,
2530
2535
  ) -> Iterator[str]:
2531
- """Create the .py file with an input Python string statement.
2536
+ """Create the `.py` file and write an input Python statement and its
2537
+ Python dependency on the header of this file.
2538
+
2539
+ The format of Python dependency was followed by the `uv`
2540
+ recommended.
2532
2541
 
2533
2542
  :param py: A Python string statement.
2534
2543
  :param values: A variable that want to set before running this
@@ -2544,7 +2553,7 @@ class VirtualPyStage(PyStage): # pragma: no cov
2544
2553
  f"{var} = {value!r}" for var, value in values.items()
2545
2554
  )
2546
2555
 
2547
- # NOTE: uv supports PEP 723 — inline TOML metadata.
2556
+ # NOTE: `uv` supports PEP 723 — inline TOML metadata.
2548
2557
  f.write(
2549
2558
  dedent(
2550
2559
  f"""
@@ -2603,6 +2612,16 @@ class VirtualPyStage(PyStage): # pragma: no cov
2603
2612
  run_id=result.run_id,
2604
2613
  ) as py:
2605
2614
  result.trace.debug(f"[STAGE]: ... Create `{py}` file.")
2615
+ try:
2616
+ import uv
2617
+
2618
+ _ = uv
2619
+ except ImportError:
2620
+ raise ImportError(
2621
+ "The VirtualPyStage need you to install `uv` before"
2622
+ "execution."
2623
+ ) from None
2624
+
2606
2625
  rs: CompletedProcess = subprocess.run(
2607
2626
  ["uv", "run", py, "--no-cache"],
2608
2627
  # ["uv", "run", "--python", "3.9", py],
ddeutil/workflow/utils.py CHANGED
@@ -15,10 +15,11 @@ from inspect import isfunction
15
15
  from itertools import chain, islice, product
16
16
  from pathlib import Path
17
17
  from random import randrange
18
- from typing import Any, Final, Optional, TypeVar, Union
18
+ from typing import Any, Final, Optional, TypeVar, Union, overload
19
19
  from zoneinfo import ZoneInfo
20
20
 
21
21
  from ddeutil.core import hash_str
22
+ from pydantic import BaseModel
22
23
 
23
24
  from .__types import DictData, Matrix
24
25
 
@@ -289,3 +290,24 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
289
290
  dt, simple = run_id.split("T", maxsplit=1)
290
291
  return dt[:12] + simple[-num:]
291
292
  return run_id[:12] + run_id[-num:]
293
+
294
+
295
+ @overload
296
+ def dump_all(value: BaseModel, by_alias: bool = False) -> DictData: ...
297
+
298
+
299
+ @overload
300
+ def dump_all(value: T, by_alias: bool = False) -> T: ...
301
+
302
+
303
+ def dump_all(
304
+ value: Union[T, BaseModel], by_alias: bool = False
305
+ ) -> Union[T, DictData]:
306
+ """Dump all BaseModel object to dict."""
307
+ if isinstance(value, dict):
308
+ return {k: dump_all(value[k], by_alias=by_alias) for k in value}
309
+ elif isinstance(value, (list, tuple, set)):
310
+ return type(value)([dump_all(i, by_alias=by_alias) for i in value])
311
+ elif isinstance(value, BaseModel):
312
+ return value.model_dump(by_alias=by_alias)
313
+ return value
@@ -39,7 +39,7 @@ from pydantic.functional_validators import field_validator, model_validator
39
39
  from typing_extensions import Self
40
40
 
41
41
  from .__cron import CronRunner
42
- from .__types import DictData, TupleStr
42
+ from .__types import DictData
43
43
  from .conf import FileLoad, Loader, dynamic
44
44
  from .event import Crontab
45
45
  from .exceptions import WorkflowException
@@ -57,14 +57,6 @@ from .utils import (
57
57
  wait_until_next_minute,
58
58
  )
59
59
 
60
- __all__: TupleStr = (
61
- "Release",
62
- "ReleaseQueue",
63
- "ReleaseType",
64
- "Workflow",
65
- "WorkflowTask",
66
- )
67
-
68
60
 
69
61
  class ReleaseType(str, Enum):
70
62
  """Release Type Enum support the type field on the Release dataclass."""
@@ -711,198 +703,6 @@ class Workflow(BaseModel):
711
703
  },
712
704
  )
713
705
 
714
- def queue(
715
- self,
716
- offset: float,
717
- end_date: datetime,
718
- queue: ReleaseQueue,
719
- audit: type[Audit],
720
- *,
721
- force_run: bool = False,
722
- ) -> ReleaseQueue:
723
- """Generate Release from all on values from the on field and store them
724
- to the ReleaseQueue object.
725
-
726
- :param offset: An offset in second unit for time travel.
727
- :param end_date: An end datetime object.
728
- :param queue: A workflow queue object.
729
- :param audit: An audit class that want to make audit object.
730
- :param force_run: A flag that allow to release workflow if the audit
731
- with that release was pointed.
732
-
733
- :rtype: ReleaseQueue
734
- """
735
- for on in self.on:
736
-
737
- queue.gen(
738
- end_date,
739
- audit,
740
- on.next(get_dt_now(offset=offset).replace(microsecond=0)),
741
- self.name,
742
- force_run=force_run,
743
- )
744
-
745
- return queue
746
-
747
- def poke(
748
- self,
749
- params: Optional[DictData] = None,
750
- start_date: Optional[datetime] = None,
751
- *,
752
- run_id: Optional[str] = None,
753
- periods: int = 1,
754
- audit: Optional[Audit] = None,
755
- force_run: bool = False,
756
- timeout: int = 1800,
757
- max_poking_pool_worker: int = 2,
758
- ) -> Result:
759
- """Poke workflow with a start datetime value that will pass to its
760
- `on` field on the threading executor pool for execute the `release`
761
- method (It run all schedules that was set on the `on` values).
762
-
763
- This method will observe its `on` field that nearing to run with the
764
- `self.release()` method.
765
-
766
- The limitation of this method is not allow run a date that gather
767
- than the current date.
768
-
769
- :param params: (DictData) A parameter data.
770
- :param start_date: (datetime) A start datetime object.
771
- :param run_id: (str) A workflow running ID for this poke.
772
- :param periods: (int) A periods in minutes value that use to run this
773
- poking. (Default is 1)
774
- :param audit: (Audit) An audit object that want to use on this poking
775
- process.
776
- :param force_run: (bool) A flag that allow to release workflow if the
777
- audit with that release was pointed. (Default is False)
778
- :param timeout: (int) A second value for timeout while waiting all
779
- futures run completely.
780
- :param max_poking_pool_worker: (int) The maximum poking pool worker.
781
- (Default is 2 workers)
782
-
783
- :raise WorkflowException: If the periods parameter less or equal than 0.
784
-
785
- :rtype: Result
786
- :return: A list of all results that return from `self.release` method.
787
- """
788
- audit: type[Audit] = audit or get_audit(extras=self.extras)
789
- result: Result = Result(
790
- run_id=(run_id or gen_id(self.name, unique=True))
791
- )
792
-
793
- # VALIDATE: Check the periods value should gather than 0.
794
- if periods <= 0:
795
- raise WorkflowException(
796
- "The period of poking should be `int` and grater or equal "
797
- "than 1."
798
- )
799
-
800
- if len(self.on) == 0:
801
- result.trace.warning(
802
- f"[POKING]: {self.name!r} not have any schedule!!!"
803
- )
804
- return result.catch(status=SUCCESS, context={"outputs": []})
805
-
806
- # NOTE: Create the current date that change microsecond to 0
807
- current_date: datetime = datetime.now().replace(microsecond=0)
808
-
809
- if start_date is None:
810
- # NOTE: Force change start date if it gathers than the current date,
811
- # or it does not pass to this method.
812
- start_date: datetime = current_date
813
- offset: float = 0
814
- elif start_date <= current_date:
815
- start_date = start_date.replace(microsecond=0)
816
- offset: float = (current_date - start_date).total_seconds()
817
- else:
818
- raise WorkflowException(
819
- f"The start datetime should less than or equal the current "
820
- f"datetime, {current_date:%Y-%m-%d %H:%M:%S}."
821
- )
822
-
823
- # NOTE: The end date is using to stop generate queue with an input
824
- # periods value. It will change to MM:59.
825
- # For example:
826
- # (input) start_date = 12:04:12, offset = 2
827
- # (output) end_date = 12:06:59
828
- end_date: datetime = start_date.replace(second=0) + timedelta(
829
- minutes=periods + 1, seconds=-1
830
- )
831
-
832
- result.trace.info(
833
- f"[POKING]: Execute Poking: {self.name!r} ("
834
- f"{start_date:%Y-%m-%d %H:%M:%S} ==> {end_date:%Y-%m-%d %H:%M:%S})"
835
- )
836
-
837
- params: DictData = {} if params is None else params
838
- context: list[Result] = []
839
- q: ReleaseQueue = ReleaseQueue()
840
-
841
- # NOTE: Create reusable partial function and add Release to the release
842
- # queue object.
843
- partial_queue = partial(
844
- self.queue, offset, end_date, audit=audit, force_run=force_run
845
- )
846
- partial_queue(q)
847
- if not q.is_queued:
848
- result.trace.warning(
849
- f"[POKING]: Skip {self.name!r}, not have any queue!!!"
850
- )
851
- return result.catch(status=SUCCESS, context={"outputs": []})
852
-
853
- with ThreadPoolExecutor(
854
- max_workers=dynamic(
855
- "max_poking_pool_worker",
856
- f=max_poking_pool_worker,
857
- extras=self.extras,
858
- ),
859
- thread_name_prefix="wf_poking_",
860
- ) as executor:
861
-
862
- futures: list[Future] = []
863
-
864
- while q.is_queued:
865
-
866
- # NOTE: Pop the latest Release object from the release queue.
867
- release: Release = heappop(q.queue)
868
-
869
- if reach_next_minute(release.date, offset=offset):
870
- result.trace.debug(
871
- f"[POKING]: Skip Release: "
872
- f"{release.date:%Y-%m-%d %H:%M:%S}"
873
- )
874
- heappush(q.queue, release)
875
- wait_until_next_minute(get_dt_now(offset=offset))
876
-
877
- # WARNING: I already call queue poking again because issue
878
- # about the every minute crontab.
879
- partial_queue(q)
880
- continue
881
-
882
- heappush(q.running, release)
883
- futures.append(
884
- executor.submit(
885
- self.release,
886
- release=release,
887
- params=params,
888
- audit=audit,
889
- queue=q,
890
- parent_run_id=result.run_id,
891
- )
892
- )
893
-
894
- partial_queue(q)
895
-
896
- # WARNING: This poking method does not allow to use fail-fast
897
- # logic to catching parallel execution result.
898
- for future in as_completed(futures, timeout=timeout):
899
- context.append(future.result())
900
-
901
- return result.catch(
902
- status=SUCCESS,
903
- context={"outputs": context},
904
- )
905
-
906
706
  def execute_job(
907
707
  self,
908
708
  job: Job,
@@ -1140,6 +940,202 @@ class Workflow(BaseModel):
1140
940
  )
1141
941
 
1142
942
 
943
+ class WorkflowPoke(Workflow):
944
+ """Workflow Poke model that was implemented the poke method."""
945
+
946
+ def queue(
947
+ self,
948
+ offset: float,
949
+ end_date: datetime,
950
+ queue: ReleaseQueue,
951
+ audit: type[Audit],
952
+ *,
953
+ force_run: bool = False,
954
+ ) -> ReleaseQueue:
955
+ """Generate Release from all on values from the on field and store them
956
+ to the ReleaseQueue object.
957
+
958
+ :param offset: An offset in second unit for time travel.
959
+ :param end_date: An end datetime object.
960
+ :param queue: A workflow queue object.
961
+ :param audit: An audit class that want to make audit object.
962
+ :param force_run: A flag that allow to release workflow if the audit
963
+ with that release was pointed.
964
+
965
+ :rtype: ReleaseQueue
966
+ """
967
+ for on in self.on:
968
+
969
+ queue.gen(
970
+ end_date,
971
+ audit,
972
+ on.next(get_dt_now(offset=offset).replace(microsecond=0)),
973
+ self.name,
974
+ force_run=force_run,
975
+ )
976
+
977
+ return queue
978
+
979
+ def poke(
980
+ self,
981
+ params: Optional[DictData] = None,
982
+ start_date: Optional[datetime] = None,
983
+ *,
984
+ run_id: Optional[str] = None,
985
+ periods: int = 1,
986
+ audit: Optional[Audit] = None,
987
+ force_run: bool = False,
988
+ timeout: int = 1800,
989
+ max_poking_pool_worker: int = 2,
990
+ ) -> Result:
991
+ """Poke workflow with a start datetime value that will pass to its
992
+ `on` field on the threading executor pool for execute the `release`
993
+ method (It run all schedules that was set on the `on` values).
994
+
995
+ This method will observe its `on` field that nearing to run with the
996
+ `self.release()` method.
997
+
998
+ The limitation of this method is not allow run a date that gather
999
+ than the current date.
1000
+
1001
+ :param params: (DictData) A parameter data.
1002
+ :param start_date: (datetime) A start datetime object.
1003
+ :param run_id: (str) A workflow running ID for this poke.
1004
+ :param periods: (int) A periods in minutes value that use to run this
1005
+ poking. (Default is 1)
1006
+ :param audit: (Audit) An audit object that want to use on this poking
1007
+ process.
1008
+ :param force_run: (bool) A flag that allow to release workflow if the
1009
+ audit with that release was pointed. (Default is False)
1010
+ :param timeout: (int) A second value for timeout while waiting all
1011
+ futures run completely.
1012
+ :param max_poking_pool_worker: (int) The maximum poking pool worker.
1013
+ (Default is 2 workers)
1014
+
1015
+ :raise WorkflowException: If the periods parameter less or equal than 0.
1016
+
1017
+ :rtype: Result
1018
+ :return: A list of all results that return from `self.release` method.
1019
+ """
1020
+ audit: type[Audit] = audit or get_audit(extras=self.extras)
1021
+ result: Result = Result(
1022
+ run_id=(run_id or gen_id(self.name, unique=True))
1023
+ )
1024
+
1025
+ # VALIDATE: Check the periods value should gather than 0.
1026
+ if periods <= 0:
1027
+ raise WorkflowException(
1028
+ "The period of poking should be `int` and grater or equal "
1029
+ "than 1."
1030
+ )
1031
+
1032
+ if len(self.on) == 0:
1033
+ result.trace.warning(
1034
+ f"[POKING]: {self.name!r} not have any schedule!!!"
1035
+ )
1036
+ return result.catch(status=SUCCESS, context={"outputs": []})
1037
+
1038
+ # NOTE: Create the current date that change microsecond to 0
1039
+ current_date: datetime = datetime.now().replace(microsecond=0)
1040
+
1041
+ if start_date is None:
1042
+ # NOTE: Force change start date if it gathers than the current date,
1043
+ # or it does not pass to this method.
1044
+ start_date: datetime = current_date
1045
+ offset: float = 0
1046
+ elif start_date <= current_date:
1047
+ start_date = start_date.replace(microsecond=0)
1048
+ offset: float = (current_date - start_date).total_seconds()
1049
+ else:
1050
+ raise WorkflowException(
1051
+ f"The start datetime should less than or equal the current "
1052
+ f"datetime, {current_date:%Y-%m-%d %H:%M:%S}."
1053
+ )
1054
+
1055
+ # NOTE: The end date is using to stop generate queue with an input
1056
+ # periods value. It will change to MM:59.
1057
+ # For example:
1058
+ # (input) start_date = 12:04:12, offset = 2
1059
+ # (output) end_date = 12:06:59
1060
+ end_date: datetime = start_date.replace(second=0) + timedelta(
1061
+ minutes=periods + 1, seconds=-1
1062
+ )
1063
+
1064
+ result.trace.info(
1065
+ f"[POKING]: Execute Poking: {self.name!r} "
1066
+ f"({start_date:%Y-%m-%d %H:%M:%S} ==> {end_date:%Y-%m-%d %H:%M:%S})"
1067
+ )
1068
+
1069
+ params: DictData = {} if params is None else params
1070
+ context: list[Result] = []
1071
+ q: ReleaseQueue = ReleaseQueue()
1072
+
1073
+ # NOTE: Create reusable partial function and add Release to the release
1074
+ # queue object.
1075
+ partial_queue = partial(
1076
+ self.queue, offset, end_date, audit=audit, force_run=force_run
1077
+ )
1078
+ partial_queue(q)
1079
+ if not q.is_queued:
1080
+ result.trace.warning(
1081
+ f"[POKING]: Skip {self.name!r}, not have any queue!!!"
1082
+ )
1083
+ return result.catch(status=SUCCESS, context={"outputs": []})
1084
+
1085
+ with ThreadPoolExecutor(
1086
+ max_workers=dynamic(
1087
+ "max_poking_pool_worker",
1088
+ f=max_poking_pool_worker,
1089
+ extras=self.extras,
1090
+ ),
1091
+ thread_name_prefix="wf_poking_",
1092
+ ) as executor:
1093
+
1094
+ futures: list[Future] = []
1095
+
1096
+ while q.is_queued:
1097
+
1098
+ # NOTE: Pop the latest Release object from the release queue.
1099
+ release: Release = heappop(q.queue)
1100
+
1101
+ if reach_next_minute(release.date, offset=offset):
1102
+ result.trace.debug(
1103
+ f"[POKING]: Skip Release: "
1104
+ f"{release.date:%Y-%m-%d %H:%M:%S}"
1105
+ )
1106
+ heappush(q.queue, release)
1107
+ wait_until_next_minute(get_dt_now(offset=offset))
1108
+
1109
+ # WARNING: I already call queue poking again because issue
1110
+ # about the every minute crontab.
1111
+ partial_queue(q)
1112
+ continue
1113
+
1114
+ heappush(q.running, release)
1115
+ futures.append(
1116
+ executor.submit(
1117
+ self.release,
1118
+ release=release,
1119
+ params=params,
1120
+ audit=audit,
1121
+ queue=q,
1122
+ parent_run_id=result.run_id,
1123
+ )
1124
+ )
1125
+
1126
+ partial_queue(q)
1127
+
1128
+ # WARNING: This poking method does not allow to use fail-fast
1129
+ # logic to catching parallel execution result.
1130
+ for future in as_completed(futures, timeout=timeout):
1131
+ context.append(future.result())
1132
+
1133
+ return result.catch(
1134
+ status=SUCCESS,
1135
+ context={"outputs": context},
1136
+ )
1137
+
1138
+
1143
1139
  @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
1144
1140
  class WorkflowTask:
1145
1141
  """Workflow task Pydantic dataclass object that use to keep mapping data and
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.60
3
+ Version: 0.0.61
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -1,20 +1,20 @@
1
- ddeutil/workflow/__about__.py,sha256=sQSmxiDbXlnTI1qDQGcyxr1EGvwITzIX0PKi2dOg4LU,28
1
+ ddeutil/workflow/__about__.py,sha256=MeIsfJkRHwfi0J3-dkYVf-U5ewlNB1-grkzIr3wMoa0,28
2
2
  ddeutil/workflow/__cron.py,sha256=5DHQKejG-76L_oREW78RcwMzeyKddJxSMmBzYyMAeeY,28536
3
3
  ddeutil/workflow/__init__.py,sha256=NXEhjzKFdIGa-jtIq9HXChLCjSXNPd8VJ8ltggxbBO8,1371
4
4
  ddeutil/workflow/__main__.py,sha256=x-sYedl4T8p6054aySk-EQX6vhytvPR0HvaBNYxMzp0,364
5
5
  ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
6
6
  ddeutil/workflow/conf.py,sha256=NLvjZ8bpDsn4e0MG3m1vgMdAwtmii5hP1D0STKQyZeo,14907
7
- ddeutil/workflow/event.py,sha256=iAvd7TfAJaMndDhxbi1xNLzl4wNlgLqe1nIseaIm5-Y,10533
7
+ ddeutil/workflow/event.py,sha256=ATQhCgx4F3I2SPQesXxLgOREEGtwkX6uc6jjViQ5pQg,10716
8
8
  ddeutil/workflow/exceptions.py,sha256=TKHBIlfquz3yEb8_kg6UXpxVLKxstt3QA9a1XYsLPJk,2455
9
9
  ddeutil/workflow/job.py,sha256=Php1b3n6c-jddel8PTSa61kAW22QBTetzoLVR4XXM4E,35240
10
- ddeutil/workflow/logs.py,sha256=81wl83dbYDcMctGmWiptmFaoZoXFO2TS0E4sxOILOQk,31321
11
- ddeutil/workflow/params.py,sha256=tBjKe1_e0TlUrSrlMahDuAdNNBlGBAKMmMMQ9eV-YSs,11616
10
+ ddeutil/workflow/logs.py,sha256=iVtyl8i69y7t07tAuWkihc54WlkHCcBy_Ur0WtzJ_lM,31367
11
+ ddeutil/workflow/params.py,sha256=1u8gXs1ZyMq-2eD9H8L7Yjfu5t7b_OzjA0fJvhxdYWY,12505
12
12
  ddeutil/workflow/result.py,sha256=4M9VCcveI8Yz6ZrnI-67SZlry-Z8G7e0hziy1k-pklk,5906
13
- ddeutil/workflow/reusables.py,sha256=mw_Fi763B5am0EmntcjLBF7MDEhKqud2BYHcYyno5Ec,17663
13
+ ddeutil/workflow/reusables.py,sha256=JIXuAicRXhGuocQy71C7pjK4BTl2wo9lNK2p-zhEA6M,17743
14
14
  ddeutil/workflow/scheduler.py,sha256=OsEyj2zscQ-3bDMk2z7UtKlCWLlgoGjaRFt17o1B1ew,27263
15
- ddeutil/workflow/stages.py,sha256=N_DkEUGiwpglovtXx-Wg3zX_03eGBT650zRsZV7knKk,92640
16
- ddeutil/workflow/utils.py,sha256=ADJTt3kiF44qntsRnOUdCFihlB2WWbRE-Tojp5EOYbk,8898
17
- ddeutil/workflow/workflow.py,sha256=BFnaB_7mrYZ3KV07AV16xR9khsoSt9i3QLyEtrLNAqs,44877
15
+ ddeutil/workflow/stages.py,sha256=xH_f7IRohFCnUrtyD-QW86BlJ72p64JmpRTRrLPoF6A,93241
16
+ ddeutil/workflow/utils.py,sha256=rcaDwXaEs4SCdcBKWx4ZCEtpnNfPI8du7Er6b_rg8t4,9569
17
+ ddeutil/workflow/workflow.py,sha256=ty4PUhci7YCPlsNCH8qsaxbzHmBliCVmXczpwlAy_mk,44852
18
18
  ddeutil/workflow/api/__init__.py,sha256=kY30dL8HPY8tY_GBmm7y_3OdoXzB1-EA2a96PLU0AQw,5278
19
19
  ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
20
20
  ddeutil/workflow/api/utils.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
@@ -23,9 +23,9 @@ ddeutil/workflow/api/routes/job.py,sha256=8X5VLDJH6PumyNIY6JGRNBsf2gWN0eG9DzxRPS
23
23
  ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
24
24
  ddeutil/workflow/api/routes/schedules.py,sha256=14RnaJKEGMSJtncI1H_QQVZNBe_jDS40PPRO6qFc3i0,4805
25
25
  ddeutil/workflow/api/routes/workflows.py,sha256=GJu5PiXEylswrXylEImpncySjeU9chrvrtjhiMCw2RQ,4529
26
- ddeutil_workflow-0.0.60.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
- ddeutil_workflow-0.0.60.dist-info/METADATA,sha256=VixljHKK-7rmiv5UC_65sp2DAgb3kHjms1H7vvF0DyY,19427
28
- ddeutil_workflow-0.0.60.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
29
- ddeutil_workflow-0.0.60.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
30
- ddeutil_workflow-0.0.60.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
- ddeutil_workflow-0.0.60.dist-info/RECORD,,
26
+ ddeutil_workflow-0.0.61.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
27
+ ddeutil_workflow-0.0.61.dist-info/METADATA,sha256=LO7R_mpY7OkPg4J6epm2MwWn4qaI2HIMhTqw7T_LEhU,19427
28
+ ddeutil_workflow-0.0.61.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
29
+ ddeutil_workflow-0.0.61.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
30
+ ddeutil_workflow-0.0.61.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
+ ddeutil_workflow-0.0.61.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.3.1)
2
+ Generator: setuptools (80.4.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5