ddeutil-workflow 0.0.46__py3-none-any.whl → 0.0.48__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -52,7 +52,7 @@ except ImportError: # pragma: no cov
52
52
 
53
53
  from .__cron import CronRunner
54
54
  from .__types import DictData, TupleStr
55
- from .conf import Loader, SimLoad, config, get_logger
55
+ from .conf import Loader, SimLoad, config
56
56
  from .cron import On
57
57
  from .exceptions import ScheduleException, WorkflowException
58
58
  from .logs import Audit, get_audit
@@ -62,7 +62,6 @@ from .workflow import Release, ReleaseQueue, Workflow, WorkflowTask
62
62
 
63
63
  P = ParamSpec("P")
64
64
 
65
- logger = get_logger("ddeutil.workflow")
66
65
  logging.getLogger("schedule").setLevel(logging.INFO)
67
66
 
68
67
 
@@ -231,6 +230,11 @@ class Schedule(BaseModel):
231
230
  enhance the workflow object by adding the alias and values fields.
232
231
  """
233
232
 
233
+ extras: DictData = Field(
234
+ default_factory=dict,
235
+ description="An extra override config values.",
236
+ )
237
+
234
238
  desc: Optional[str] = Field(
235
239
  default=None,
236
240
  description=(
@@ -281,6 +285,9 @@ class Schedule(BaseModel):
281
285
  # NOTE: Add name to loader data
282
286
  loader_data["name"] = name.replace(" ", "_")
283
287
 
288
+ if extras:
289
+ loader_data["extras"] = extras
290
+
284
291
  return cls.model_validate(obj=loader_data)
285
292
 
286
293
  @classmethod
@@ -288,7 +295,7 @@ class Schedule(BaseModel):
288
295
  cls,
289
296
  name: str,
290
297
  path: Path,
291
- externals: DictData | None = None,
298
+ extras: DictData | None = None,
292
299
  ) -> Self:
293
300
  """Create Schedule instance from the SimLoad object that receive an
294
301
  input schedule name and conf path. The loader object will use this
@@ -297,7 +304,7 @@ class Schedule(BaseModel):
297
304
 
298
305
  :param name: (str) A schedule name that want to pass to Loader object.
299
306
  :param path: (Path) A config path that want to search.
300
- :param externals: An external parameters that want to pass to Loader
307
+ :param extras: An external parameters that want to pass to Loader
301
308
  object.
302
309
 
303
310
  :raise ValueError: If the type does not match with current object.
@@ -305,7 +312,7 @@ class Schedule(BaseModel):
305
312
  :rtype: Self
306
313
  """
307
314
  loader: SimLoad = SimLoad(
308
- name, conf_path=path, externals=(externals or {})
315
+ name, conf_path=path, externals=(extras or {})
309
316
  )
310
317
 
311
318
  # NOTE: Validate the config type match with current connection model
@@ -317,6 +324,9 @@ class Schedule(BaseModel):
317
324
  # NOTE: Add name to loader data
318
325
  loader_data["name"] = name.replace(" ", "_")
319
326
 
327
+ if extras:
328
+ loader_data["extras"] = extras
329
+
320
330
  return cls.model_validate(obj=loader_data)
321
331
 
322
332
  def tasks(
@@ -715,7 +725,7 @@ def schedule_control(
715
725
 
716
726
  :param schedules: A list of workflow names that want to schedule running.
717
727
  :param stop: A datetime value that use to stop running schedule.
718
- :param extras: An extra parameters that pass to Loader.
728
+ :param extras: An extra parameters that want to override core config.
719
729
  :param audit: An audit class that use on the workflow task release for
720
730
  writing its release audit context.
721
731
  :param parent_run_id: A parent workflow running ID for this release.
@@ -761,7 +771,7 @@ def schedule_control(
761
771
 
762
772
  def schedule_runner(
763
773
  stop: datetime | None = None,
764
- externals: DictData | None = None,
774
+ extras: DictData | None = None,
765
775
  excluded: list[str] | None = None,
766
776
  ) -> Result: # pragma: no cov
767
777
  """Schedule runner function it the multiprocess controller function for
@@ -770,7 +780,7 @@ def schedule_runner(
770
780
  path by `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` value.
771
781
 
772
782
  :param stop: A stop datetime object that force stop running scheduler.
773
- :param externals:
783
+ :param extras: An extra parameter that want to override core config.
774
784
  :param excluded: A list of schedule name that want to exclude from finding.
775
785
 
776
786
  This function will get all workflows that include on value that was
@@ -801,7 +811,7 @@ def schedule_runner(
801
811
  schedule_control,
802
812
  schedules=[load[0] for load in loader],
803
813
  stop=stop,
804
- externals=(externals or {}),
814
+ extras=(extras or {}),
805
815
  parent_run_id=result.parent_run_id,
806
816
  )
807
817
  for loader in batch(
@@ -43,7 +43,7 @@ from pathlib import Path
43
43
  from subprocess import CompletedProcess
44
44
  from textwrap import dedent
45
45
  from threading import Event
46
- from typing import Annotated, Optional, Union
46
+ from typing import Annotated, Any, Optional, Union, get_type_hints
47
47
 
48
48
  from pydantic import BaseModel, Field
49
49
  from pydantic.functional_validators import model_validator
@@ -205,6 +205,7 @@ class BaseStage(BaseModel, ABC):
205
205
  run_id=run_id,
206
206
  parent_run_id=parent_run_id,
207
207
  id_logic=self.iden,
208
+ extras=self.extras,
208
209
  )
209
210
 
210
211
  try:
@@ -388,14 +389,15 @@ class BaseAsyncStage(BaseStage):
388
389
  run_id=run_id,
389
390
  parent_run_id=parent_run_id,
390
391
  id_logic=self.iden,
392
+ extras=self.extras,
391
393
  )
392
394
 
393
395
  try:
394
396
  rs: Result = await self.axecute(params, result=result, event=event)
395
- if to is not None:
397
+ if to is not None: # pragma: no cov
396
398
  return self.set_outputs(rs.context, to=to)
397
399
  return rs
398
- except Exception as e:
400
+ except Exception as e: # pragma: no cov
399
401
  await result.trace.aerror(f"[STAGE]: {e.__class__.__name__}: {e}")
400
402
 
401
403
  if dynamic("stage_raise_error", f=raise_error, extras=self.extras):
@@ -814,9 +816,8 @@ class CallStage(BaseStage):
814
816
  run_id=gen_id(self.name + (self.id or ""), unique=True)
815
817
  )
816
818
 
817
- print("Extras in CallStage", self.extras)
818
-
819
- t_func: TagFunc = extract_call(
819
+ has_keyword: bool = False
820
+ call_func: TagFunc = extract_call(
820
821
  param2template(self.uses, params, extras=self.extras),
821
822
  registries=self.extras.get("regis_call"),
822
823
  )()
@@ -826,55 +827,96 @@ class CallStage(BaseStage):
826
827
  args: DictData = {"result": result} | param2template(
827
828
  self.args, params, extras=self.extras
828
829
  )
829
- ips = inspect.signature(t_func)
830
- necessary_params: list[str] = [
831
- k
832
- for k in ips.parameters
830
+ ips = inspect.signature(call_func)
831
+ necessary_params: list[str] = []
832
+ for k in ips.parameters:
833
833
  if (
834
- (v := ips.parameters[k]).default == Parameter.empty
835
- and (
836
- v.kind != Parameter.VAR_KEYWORD
837
- or v.kind != Parameter.VAR_POSITIONAL
838
- )
839
- )
840
- ]
834
+ v := ips.parameters[k]
835
+ ).default == Parameter.empty and v.kind not in (
836
+ Parameter.VAR_KEYWORD,
837
+ Parameter.VAR_POSITIONAL,
838
+ ):
839
+ necessary_params.append(k)
840
+ elif v.kind == Parameter.VAR_KEYWORD:
841
+ has_keyword = True
842
+
841
843
  if any(
842
844
  (k.removeprefix("_") not in args and k not in args)
843
845
  for k in necessary_params
844
846
  ):
845
847
  raise ValueError(
846
848
  f"Necessary params, ({', '.join(necessary_params)}, ), "
847
- f"does not set to args"
849
+ f"does not set to args, {list(args.keys())}."
848
850
  )
849
851
 
850
- # NOTE: add '_' prefix if it wants to use.
851
- for k in ips.parameters:
852
- if k.removeprefix("_") in args:
853
- args[k] = args.pop(k.removeprefix("_"))
854
-
855
- if "result" not in ips.parameters:
852
+ if "result" not in ips.parameters and not has_keyword:
856
853
  args.pop("result")
857
854
 
858
- result.trace.info(f"[STAGE]: Call-Execute: {t_func.name}@{t_func.tag}")
859
- if inspect.iscoroutinefunction(t_func): # pragma: no cov
855
+ result.trace.info(
856
+ f"[STAGE]: Call-Execute: {call_func.name}@{call_func.tag}"
857
+ )
858
+
859
+ args = self.parse_model_args(call_func, args, result)
860
+
861
+ if inspect.iscoroutinefunction(call_func):
860
862
  loop = asyncio.get_event_loop()
861
863
  rs: DictData = loop.run_until_complete(
862
- t_func(**param2template(args, params, extras=self.extras))
864
+ call_func(**param2template(args, params, extras=self.extras))
863
865
  )
864
866
  else:
865
- rs: DictData = t_func(
867
+ rs: DictData = call_func(
866
868
  **param2template(args, params, extras=self.extras)
867
869
  )
868
870
 
869
871
  # VALIDATE:
870
872
  # Check the result type from call function, it should be dict.
871
- if not isinstance(rs, dict):
873
+ if isinstance(rs, BaseModel):
874
+ rs: DictData = rs.model_dump()
875
+ elif not isinstance(rs, dict):
872
876
  raise TypeError(
873
- f"Return type: '{t_func.name}@{t_func.tag}' does not serialize "
874
- f"to result model, you change return type to `dict`."
877
+ f"Return type: '{call_func.name}@{call_func.tag}' does not "
878
+ f"serialize to result model, you change return type to `dict`."
875
879
  )
876
880
  return result.catch(status=SUCCESS, context=rs)
877
881
 
882
+ @staticmethod
883
+ def parse_model_args(
884
+ func: TagFunc,
885
+ args: DictData,
886
+ result: Result,
887
+ ) -> DictData:
888
+ """Parse Pydantic model from any dict data before parsing to target
889
+ caller function.
890
+ """
891
+ try:
892
+ type_hints: dict[str, Any] = get_type_hints(func)
893
+ except TypeError as e:
894
+ result.trace.warning(
895
+ f"[STAGE]: Get type hint raise TypeError: {e}, so, it skip "
896
+ f"parsing model args process."
897
+ )
898
+ return args
899
+
900
+ for arg in type_hints:
901
+
902
+ if arg == "return":
903
+ continue
904
+
905
+ if arg.removeprefix("_") in args:
906
+ args[arg] = args.pop(arg.removeprefix("_"))
907
+
908
+ t: Any = type_hints[arg]
909
+
910
+ # NOTE: Check Result argument was passed to this caller function.
911
+ #
912
+ # if is_dataclass(t) and t.__name__ == "Result" and arg not in args:
913
+ # args[arg] = result
914
+
915
+ if issubclass(t, BaseModel) and arg in args:
916
+ args[arg] = t.model_validate(obj=args[arg])
917
+
918
+ return args
919
+
878
920
 
879
921
  class TriggerStage(BaseStage):
880
922
  """Trigger Workflow execution stage that execute another workflow. This
@@ -3,7 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- # [x] Use dynamic config`
6
+ # [x] Use dynamic config
7
7
  """A Workflow module that is the core model of this package."""
8
8
  from __future__ import annotations
9
9
 
@@ -32,7 +32,7 @@ from typing_extensions import Self
32
32
 
33
33
  from .__cron import CronJob, CronRunner
34
34
  from .__types import DictData, TupleStr
35
- from .conf import Loader, SimLoad, dynamic, get_logger
35
+ from .conf import Loader, SimLoad, dynamic
36
36
  from .cron import On
37
37
  from .exceptions import JobException, WorkflowException
38
38
  from .job import Job
@@ -47,8 +47,6 @@ from .utils import (
47
47
  wait_to_next_minute,
48
48
  )
49
49
 
50
- logger = get_logger("ddeutil.workflow")
51
-
52
50
  __all__: TupleStr = (
53
51
  "Release",
54
52
  "ReleaseQueue",
@@ -75,10 +73,10 @@ class Release:
75
73
  that use with the `workflow.release` method.
76
74
  """
77
75
 
78
- date: datetime = field()
79
- offset: float = field()
80
- end_date: datetime = field()
81
- runner: CronRunner = field()
76
+ date: datetime
77
+ offset: float
78
+ end_date: datetime
79
+ runner: CronRunner
82
80
  type: ReleaseType = field(default=ReleaseType.DEFAULT)
83
81
 
84
82
  def __repr__(self) -> str:
@@ -94,13 +92,13 @@ class Release:
94
92
 
95
93
  @classmethod
96
94
  def from_dt(
97
- cls, dt: datetime | str, *, externals: Optional[DictData] = None
95
+ cls, dt: datetime | str, *, extras: Optional[DictData] = None
98
96
  ) -> Self:
99
97
  """Construct Release via datetime object only.
100
98
 
101
99
  :param dt: (datetime | str) A datetime object or string that want to
102
100
  construct to the Release object.
103
- :param externals: An external parameters that want to pass to override
101
+ :param extras: An extra parameters that want to pass to override
104
102
  config.
105
103
 
106
104
  :raise TypeError: If the type of the dt argument does not valid with
@@ -120,8 +118,10 @@ class Release:
120
118
  date=dt,
121
119
  offset=0,
122
120
  end_date=dt + timedelta(days=1),
123
- runner=CronJob("* * * * *").schedule(
124
- dt.replace(tzinfo=dynamic("tz", extras=externals))
121
+ runner=(
122
+ CronJob("* * * * *").schedule(
123
+ dt.replace(tzinfo=dynamic("tz", extras=extras))
124
+ )
125
125
  ),
126
126
  )
127
127
 
@@ -164,11 +164,16 @@ class ReleaseQueue:
164
164
 
165
165
  @classmethod
166
166
  def from_list(
167
- cls, queue: list[datetime] | list[Release] | None = None
167
+ cls,
168
+ queue: list[datetime] | list[Release] | None = None,
169
+ extras: Optional[DictData] = None,
168
170
  ) -> Self:
169
171
  """Construct ReleaseQueue object from an input queue value that passing
170
172
  with list of datetime or list of Release.
171
173
 
174
+ :param queue:
175
+ :param extras: An extra parameter that want to override core config.
176
+
172
177
  :raise TypeError: If the type of input queue does not valid.
173
178
 
174
179
  :rtype: ReleaseQueue
@@ -179,7 +184,11 @@ class ReleaseQueue:
179
184
  if isinstance(queue, list):
180
185
 
181
186
  if all(isinstance(q, datetime) for q in queue):
182
- return cls(queue=[Release.from_dt(q) for q in queue])
187
+ return cls(
188
+ queue=[
189
+ Release.from_dt(q, extras=(extras or {})) for q in queue
190
+ ]
191
+ )
183
192
 
184
193
  if all(isinstance(q, Release) for q in queue):
185
194
  return cls(queue=queue)
@@ -216,7 +225,7 @@ class ReleaseQueue:
216
225
  :rtype: bool
217
226
  """
218
227
  if isinstance(value, datetime):
219
- value = Release.from_dt(value)
228
+ value = Release.from_dt(value, extras=self.extras)
220
229
 
221
230
  return (
222
231
  (value in self.queue)
@@ -445,7 +454,7 @@ class Workflow(BaseModel):
445
454
  # "only one value in the on field."
446
455
  # )
447
456
 
448
- extras: DictData = info.data.get("extras", {})
457
+ extras: Optional[DictData] = info.data.get("extras")
449
458
  if len(set_ons) > (
450
459
  conf := dynamic("max_on_per_workflow", extras=extras)
451
460
  ):
@@ -596,13 +605,14 @@ class Workflow(BaseModel):
596
605
 
597
606
  :rtype: Result
598
607
  """
599
- audit: type[Audit] = audit or get_audit()
608
+ audit: type[Audit] = audit or get_audit(extras=self.extras)
600
609
  name: str = override_log_name or self.name
601
610
  result: Result = Result.construct_with_rs_or_id(
602
611
  result,
603
612
  run_id=run_id,
604
613
  parent_run_id=parent_run_id,
605
614
  id_logic=name,
615
+ extras=self.extras,
606
616
  )
607
617
 
608
618
  if queue is not None and not isinstance(queue, ReleaseQueue):
@@ -612,7 +622,7 @@ class Workflow(BaseModel):
612
622
 
613
623
  # VALIDATE: Change release value to Release object.
614
624
  if isinstance(release, datetime):
615
- release: Release = Release.from_dt(release)
625
+ release: Release = Release.from_dt(release, extras=self.extras)
616
626
 
617
627
  result.trace.debug(
618
628
  f"[RELEASE]: Start release - {name!r} : "
@@ -659,6 +669,7 @@ class Workflow(BaseModel):
659
669
  parent_run_id=result.parent_run_id,
660
670
  run_id=result.run_id,
661
671
  execution_time=result.alive_time(),
672
+ extras=self.extras,
662
673
  ).save(excluded=None)
663
674
  )
664
675
 
@@ -1042,6 +1053,7 @@ class Workflow(BaseModel):
1042
1053
  run_id=run_id,
1043
1054
  parent_run_id=parent_run_id,
1044
1055
  id_logic=self.name,
1056
+ extras=self.extras,
1045
1057
  )
1046
1058
 
1047
1059
  result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.46
3
+ Version: 0.0.48
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -9,7 +9,7 @@ Project-URL: Source Code, https://github.com/ddeutils/ddeutil-workflow/
9
9
  Keywords: orchestration,workflow
10
10
  Classifier: Topic :: Utilities
11
11
  Classifier: Natural Language :: English
12
- Classifier: Development Status :: 5 - Production/Stable
12
+ Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: Operating System :: OS Independent
15
15
  Classifier: Programming Language :: Python
@@ -260,31 +260,31 @@ it will use default value and do not raise any error to you.
260
260
  > The config value that you will set on the environment should combine with
261
261
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
262
262
 
263
- | Name | Component | Default | Override | Description |
264
- |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
265
- | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
266
- | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
267
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
268
- | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
269
- | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
270
- | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
271
- | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
272
- | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
273
- | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
274
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
275
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
276
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
277
- | **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
278
- | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
279
- | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
280
- | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
281
- | **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
282
- | **TRACE_ENABLE_WRITE** | Log | `false` | No | |
283
- | **AUDIT_PATH** | Log | `./audits` | No | |
284
- | **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
285
- | **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
286
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
287
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
263
+ | Name | Component | Default | Description |
264
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
265
+ | **ROOT_PATH** | Core | `.` | Root path or the project path for this workflow engine. |
266
+ | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
267
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
268
+ | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
269
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
270
+ | **STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. |
271
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
272
+ | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
273
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
274
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
275
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
276
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
277
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
278
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
279
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
280
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
281
+ | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
282
+ | **TRACE_ENABLE_WRITE** | Log | `false` | |
283
+ | **AUDIT_PATH** | Log | `./audits` | |
284
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
285
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
286
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
287
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
288
288
 
289
289
  **API Application**:
290
290
 
@@ -0,0 +1,31 @@
1
+ ddeutil/workflow/__about__.py,sha256=OFynARvYDKZ4fFNVea1bykjJJKDpblDyUNtdv9rywxE,28
2
+ ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
+ ddeutil/workflow/__init__.py,sha256=t7AaJ3gY7E8i2WeL3_8dYz-F5mzskUxsSAx7-Ny4Fhw,1927
4
+ ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
6
+ ddeutil/workflow/conf.py,sha256=sXN7epudr90I1gUAjwJqvWTQV39mhO6cJhuuOuoYCw0,12153
7
+ ddeutil/workflow/cron.py,sha256=80SijzMdDOBxTWRsiF-Fmuz7Ym7leY0XT2lzRAPGdXc,8781
8
+ ddeutil/workflow/exceptions.py,sha256=uLNxzav3HRcr4vaZnvbUIF_eTR6UXXZNaxroMWFOUL4,1418
9
+ ddeutil/workflow/job.py,sha256=nvcSH1vxQrq8tGDrOs-8wHifaOLrfZdTAUk1vD-QtRA,30762
10
+ ddeutil/workflow/logs.py,sha256=o_EziK1MgP-7fJIl6bwE58BZHt8FBTrsmrEBhu1XGTo,24670
11
+ ddeutil/workflow/params.py,sha256=xCtFEh0-G-G-f8y_SXxyf31bU6Ox5p5Z-WbBFXrjy8M,9960
12
+ ddeutil/workflow/result.py,sha256=6yqWXFE__xMr8VY8xchBhBd3lyU-XX1nHOpx_2V5VGU,5390
13
+ ddeutil/workflow/reusables.py,sha256=7uamdx0nnBnDHcc0xXqwucItFYHUXI4_O-SHdFHIZCo,17528
14
+ ddeutil/workflow/scheduler.py,sha256=jyTLML8ppwdCrcuVw9ZMcZ1JwJ1SW6wDrJg5soHDFAw,27681
15
+ ddeutil/workflow/stages.py,sha256=mnP07SLvGRfggOV1i9bZ7_j5K_ksHlorCYb2crU1pus,49170
16
+ ddeutil/workflow/utils.py,sha256=sblje9qOtejCHVt8EVrbC0KY98vKqvxccaR5HIkRiTA,7363
17
+ ddeutil/workflow/workflow.py,sha256=V8uJw16gtjTd8T5aCpvSUr9z_oGQrV-ycybvUjA8NHI,50073
18
+ ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
19
+ ddeutil/workflow/api/api.py,sha256=CWtPLgOv2Jus9E7nzG5mG2Z32ZEkUK3JWQ2htZyMRpA,5244
20
+ ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
21
+ ddeutil/workflow/api/repeat.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
22
+ ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
23
+ ddeutil/workflow/api/routes/job.py,sha256=oPwBVP0Mxwxv-bGPlfmxQQ9PcVl0ev9HoPzndpYDCCQ,1954
24
+ ddeutil/workflow/api/routes/logs.py,sha256=TeRDrEelbKS2Hu_EovgLh0bOdmSv9mfnrIZsrE7uPD4,5353
25
+ ddeutil/workflow/api/routes/schedules.py,sha256=EgUjyRGhsm6UNaMj5luh6TcY6l571sCHcla-BL1iOfY,4829
26
+ ddeutil/workflow/api/routes/workflows.py,sha256=JcDOrn1deK8ztFRcMTNATQejG6KMA7JxZLVc4QeBsP4,4527
27
+ ddeutil_workflow-0.0.48.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
28
+ ddeutil_workflow-0.0.48.dist-info/METADATA,sha256=wv_dQPbCSS_1TuKmZ2jl1cWTRu0sM2eGtV3pBHHdWmQ,18841
29
+ ddeutil_workflow-0.0.48.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
30
+ ddeutil_workflow-0.0.48.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
+ ddeutil_workflow-0.0.48.dist-info/RECORD,,
@@ -1,31 +0,0 @@
1
- ddeutil/workflow/__about__.py,sha256=yDFXJ7qonVJ1xTa_rhZ4F9WS1AblaopGDHPdNrytYtM,28
2
- ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
3
- ddeutil/workflow/__init__.py,sha256=m7ZTCuUOarcTKJuXOyuaXd5WTIO7NTkqCeCrNX3d5i8,1943
4
- ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
6
- ddeutil/workflow/conf.py,sha256=ZlaaLeZuBzqcnS-gfHQV58dJVMwQaRTjWxWZGCzX42s,12068
7
- ddeutil/workflow/cron.py,sha256=80SijzMdDOBxTWRsiF-Fmuz7Ym7leY0XT2lzRAPGdXc,8781
8
- ddeutil/workflow/exceptions.py,sha256=uLNxzav3HRcr4vaZnvbUIF_eTR6UXXZNaxroMWFOUL4,1418
9
- ddeutil/workflow/job.py,sha256=uDT_lxAmtWDk6OYm6E4_rz_ngMdS5S03YF4D3WZMP8k,30676
10
- ddeutil/workflow/logs.py,sha256=Ki1t6HkThwimzAe1OSxPPc7OQ4r-kXAc1kB63x2DsOg,21160
11
- ddeutil/workflow/params.py,sha256=xCtFEh0-G-G-f8y_SXxyf31bU6Ox5p5Z-WbBFXrjy8M,9960
12
- ddeutil/workflow/result.py,sha256=9tbCmP0Sjy7h9GKWyD5e1bjAzNOWZcnvBFuC6to_f-8,4929
13
- ddeutil/workflow/reusables.py,sha256=ZE8WfD0WyQUKRV5aujJpGG6g6ODJz-wtgwHbQiCrN-E,17536
14
- ddeutil/workflow/scheduler.py,sha256=_MDsEHbBVOeF-381U8DfIMDyca_nG3XNXmgX4229_EU,27437
15
- ddeutil/workflow/stages.py,sha256=G9TtXx2_HzcvgOetcncW0-GzMapqho6VmxENFyoYmt0,47829
16
- ddeutil/workflow/utils.py,sha256=sblje9qOtejCHVt8EVrbC0KY98vKqvxccaR5HIkRiTA,7363
17
- ddeutil/workflow/workflow.py,sha256=kEbPr2Wi9n5fDaCi5R26f4SHw7083_TdcIkZw-w7cEA,49716
18
- ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
19
- ddeutil/workflow/api/api.py,sha256=b-bMg0aRsEqt8Qb2hNUtamEt2Fq2CgNotF2oXSAdDu8,5226
20
- ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
21
- ddeutil/workflow/api/repeat.py,sha256=cycd1-91j-4v6uY1SkrZHd9l95e-YgVC4UCSNNFuGJ8,5277
22
- ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
23
- ddeutil/workflow/api/routes/job.py,sha256=YVta083i8vU8-o4WdKFwDpfdC9vN1dZ6goZSmNlQXHA,1954
24
- ddeutil/workflow/api/routes/logs.py,sha256=TeRDrEelbKS2Hu_EovgLh0bOdmSv9mfnrIZsrE7uPD4,5353
25
- ddeutil/workflow/api/routes/schedules.py,sha256=rUWBm5RgLS1PNBHSWwWXJ0l-c5mYWfl9os0BA9_OTEw,4810
26
- ddeutil/workflow/api/routes/workflows.py,sha256=ctgQGxXfpIV6bHFDM9IQ1_qaQHT6n5-HjJ1-D4GKWpc,4527
27
- ddeutil_workflow-0.0.46.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
28
- ddeutil_workflow-0.0.46.dist-info/METADATA,sha256=_HZpnC_wIzRdBFhLXDVsJpfPBevg5YXKDYll6KGg4pE,19129
29
- ddeutil_workflow-0.0.46.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
30
- ddeutil_workflow-0.0.46.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
- ddeutil_workflow-0.0.46.dist-info/RECORD,,