ddeutil-workflow 0.0.48__py3-none-any.whl → 0.0.49__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -91,7 +91,7 @@ def make_filter_registry(
91
91
  :rtype: dict[str, FilterRegistry]
92
92
  """
93
93
  rs: dict[str, FilterRegistry] = {}
94
- for module in dynamic("regis_filter", f=registers):
94
+ for module in dynamic("registry_filter", f=registers):
95
95
  # NOTE: try to sequential import task functions
96
96
  try:
97
97
  importer = import_module(module)
@@ -343,7 +343,7 @@ def param2template(
343
343
  :returns: An any getter value from the params input.
344
344
  """
345
345
  registers: Optional[list[str]] = (
346
- extras.get("regis_filter") if extras else None
346
+ extras.get("registry_filter") if extras else None
347
347
  )
348
348
  filters: dict[str, FilterRegistry] = filters or make_filter_registry(
349
349
  registers=registers
@@ -449,7 +449,7 @@ def make_registry(
449
449
  """
450
450
  rs: dict[str, Registry] = {}
451
451
  regis_calls: list[str] = dynamic(
452
- "regis_call", f=registries
452
+ "registry_caller", f=registries
453
453
  ) # pragma: no cov
454
454
  regis_calls.extend(["ddeutil.vendors"])
455
455
 
@@ -3,7 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- # [x] Use fix config
6
+ # [x] Use dynamic config
7
7
  """The main schedule running is `schedule_runner` function that trigger the
8
8
  multiprocess of `schedule_control` function for listing schedules on the
9
9
  config by `Loader.finds(Schedule)`.
@@ -36,7 +36,7 @@ from textwrap import dedent
36
36
  from threading import Thread
37
37
  from typing import Callable, Optional, TypedDict, Union
38
38
 
39
- from pydantic import BaseModel, Field
39
+ from pydantic import BaseModel, Field, ValidationInfo
40
40
  from pydantic.functional_validators import field_validator, model_validator
41
41
  from typing_extensions import Self
42
42
 
@@ -52,7 +52,7 @@ except ImportError: # pragma: no cov
52
52
 
53
53
  from .__cron import CronRunner
54
54
  from .__types import DictData, TupleStr
55
- from .conf import Loader, SimLoad, config
55
+ from .conf import Loader, SimLoad, dynamic
56
56
  from .cron import On
57
57
  from .exceptions import ScheduleException, WorkflowException
58
58
  from .logs import Audit, get_audit
@@ -89,6 +89,11 @@ class ScheduleWorkflow(BaseModel):
89
89
  not want to change on the workflow model.
90
90
  """
91
91
 
92
+ extras: DictData = Field(
93
+ default_factory=dict,
94
+ description="An extra parameters that want to override config values.",
95
+ )
96
+
92
97
  alias: Optional[str] = Field(
93
98
  default=None,
94
99
  description="An alias name of workflow that use for schedule model.",
@@ -121,14 +126,17 @@ class ScheduleWorkflow(BaseModel):
121
126
  if not values.get("alias"):
122
127
  values["alias"] = values.get("name")
123
128
 
124
- cls.__bypass_on(values)
129
+ cls.__bypass_on(values, extras=values.get("extras"))
125
130
  return values
126
131
 
127
132
  @classmethod
128
- def __bypass_on(cls, data: DictData) -> DictData:
133
+ def __bypass_on(
134
+ cls, data: DictData, *, extras: Optional[DictData] = None
135
+ ) -> DictData:
129
136
  """Bypass and prepare the on data to loaded config data.
130
137
 
131
138
  :param data: A data that want to validate for model initialization.
139
+ :param extras: An extra parameter that want to override core config.
132
140
 
133
141
  :rtype: DictData
134
142
  """
@@ -143,14 +151,14 @@ class ScheduleWorkflow(BaseModel):
143
151
  # NOTE: Pass on value to Loader and keep on model object to on
144
152
  # field.
145
153
  data["on"] = [
146
- Loader(n, externals={}).data if isinstance(n, str) else n
154
+ Loader(n, externals=extras).data if isinstance(n, str) else n
147
155
  for n in on
148
156
  ]
149
157
 
150
158
  return data
151
159
 
152
160
  @field_validator("on", mode="after")
153
- def __on_no_dup__(cls, value: list[On]) -> list[On]:
161
+ def __on_no_dup__(cls, value: list[On], info: ValidationInfo) -> list[On]:
154
162
  """Validate the on fields should not contain duplicate values and if it
155
163
  contains every minute value, it should have only one on value.
156
164
 
@@ -162,10 +170,12 @@ class ScheduleWorkflow(BaseModel):
162
170
  "The on fields should not contain duplicate on value."
163
171
  )
164
172
 
165
- if len(set_ons) > config.max_on_per_workflow:
173
+ extras: Optional[DictData] = info.data.get("extras")
174
+ if len(set_ons) > (
175
+ conf := dynamic("max_cron_per_workflow", extras=extras)
176
+ ):
166
177
  raise ValueError(
167
- f"The number of the on should not more than "
168
- f"{config.max_on_per_workflow} crontab."
178
+ f"The number of the on should not more than {conf} crontabs."
169
179
  )
170
180
 
171
181
  return value
@@ -174,8 +184,6 @@ class ScheduleWorkflow(BaseModel):
174
184
  self,
175
185
  start_date: datetime,
176
186
  queue: dict[str, ReleaseQueue],
177
- *,
178
- extras: DictData | None = None,
179
187
  ) -> list[WorkflowTask]:
180
188
  """Return the list of WorkflowTask object from the specific input
181
189
  datetime that mapping with the on field.
@@ -185,17 +193,15 @@ class ScheduleWorkflow(BaseModel):
185
193
 
186
194
  :param start_date: A start date that get from the workflow schedule.
187
195
  :param queue: A mapping of name and list of datetime for queue.
188
- :param extras: An extra parameters that pass to the Loader object.
189
196
 
190
197
  :rtype: list[WorkflowTask]
191
198
  :return: Return the list of WorkflowTask object from the specific
192
199
  input datetime that mapping with the on field.
193
200
  """
194
201
  workflow_tasks: list[WorkflowTask] = []
195
- extras: DictData = extras or {}
196
202
 
197
203
  # NOTE: Loading workflow model from the name of workflow.
198
- wf: Workflow = Workflow.from_conf(self.name, extras=extras)
204
+ wf: Workflow = Workflow.from_conf(self.name, extras=self.extras)
199
205
  wf_queue: ReleaseQueue = queue[self.alias]
200
206
 
201
207
  # IMPORTANT: Create the default 'on' value if it does not pass the `on`
@@ -217,6 +223,7 @@ class ScheduleWorkflow(BaseModel):
217
223
  workflow=wf,
218
224
  runner=runner,
219
225
  values=self.values,
226
+ extras=self.extras,
220
227
  ),
221
228
  )
222
229
 
@@ -232,7 +239,7 @@ class Schedule(BaseModel):
232
239
 
233
240
  extras: DictData = Field(
234
241
  default_factory=dict,
235
- description="An extra override config values.",
242
+ description="An extra parameters that want to override config values.",
236
243
  )
237
244
 
238
245
  desc: Optional[str] = Field(
@@ -333,8 +340,6 @@ class Schedule(BaseModel):
333
340
  self,
334
341
  start_date: datetime,
335
342
  queue: dict[str, ReleaseQueue],
336
- *,
337
- extras: DictData | None = None,
338
343
  ) -> list[WorkflowTask]:
339
344
  """Return the list of WorkflowTask object from the specific input
340
345
  datetime that mapping with the on field from workflow schedule model.
@@ -342,8 +347,6 @@ class Schedule(BaseModel):
342
347
  :param start_date: A start date that get from the workflow schedule.
343
348
  :param queue: (dict[str, ReleaseQueue]) A mapping of name and list of
344
349
  datetime for queue.
345
- :param extras: (DictData) An extra parameters that pass to the Loader
346
- object.
347
350
 
348
351
  :rtype: list[WorkflowTask]
349
352
  :return: Return the list of WorkflowTask object from the specific
@@ -352,13 +355,13 @@ class Schedule(BaseModel):
352
355
  workflow_tasks: list[WorkflowTask] = []
353
356
 
354
357
  for workflow in self.workflows:
358
+ if self.extras:
359
+ workflow.extras = self.extras
355
360
 
356
361
  if workflow.alias not in queue:
357
362
  queue[workflow.alias] = ReleaseQueue()
358
363
 
359
- workflow_tasks.extend(
360
- workflow.tasks(start_date, queue=queue, extras=extras)
361
- )
364
+ workflow_tasks.extend(workflow.tasks(start_date, queue=queue))
362
365
 
363
366
  return workflow_tasks
364
367
 
@@ -366,24 +369,26 @@ class Schedule(BaseModel):
366
369
  self,
367
370
  *,
368
371
  stop: datetime | None = None,
369
- extras: DictData | None = None,
370
372
  audit: type[Audit] | None = None,
371
373
  parent_run_id: str | None = None,
372
374
  ) -> Result: # pragma: no cov
373
375
  """Pending this schedule tasks with the schedule package.
374
376
 
375
377
  :param stop: A datetime value that use to stop running schedule.
376
- :param extras: An extra parameters that pass to Loader.
377
378
  :param audit: An audit class that use on the workflow task release for
378
379
  writing its release audit context.
379
380
  :param parent_run_id: A parent workflow running ID for this release.
380
381
  """
381
- audit: type[Audit] = audit or get_audit()
382
+ audit: type[Audit] = audit or get_audit(extras=self.extras)
382
383
  result: Result = Result().set_parent_run_id(parent_run_id)
383
384
 
384
385
  # NOTE: Create the start and stop datetime.
385
- start_date: datetime = datetime.now(tz=config.tz)
386
- stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
386
+ start_date: datetime = datetime.now(
387
+ tz=dynamic("tz", extras=self.extras)
388
+ )
389
+ stop_date: datetime = stop or (
390
+ start_date + dynamic("stop_boundary_delta", extras=self.extras)
391
+ )
387
392
 
388
393
  # IMPORTANT: Create main mapping of queue and thread object.
389
394
  queue: dict[str, ReleaseQueue] = {}
@@ -394,7 +399,7 @@ class Schedule(BaseModel):
394
399
  ) + timedelta(minutes=1)
395
400
 
396
401
  scheduler_pending(
397
- tasks=self.tasks(start_date_waiting, queue=queue, extras=extras),
402
+ tasks=self.tasks(start_date_waiting, queue=queue),
398
403
  stop=stop_date,
399
404
  queue=queue,
400
405
  threads=threads,
@@ -469,6 +474,7 @@ def schedule_task(
469
474
  audit: type[Audit],
470
475
  *,
471
476
  parent_run_id: str | None = None,
477
+ extras: Optional[DictData] = None,
472
478
  ) -> ResultOrCancel:
473
479
  """Schedule task function that generate thread of workflow task release
474
480
  method in background. This function do the same logic as the workflow poke
@@ -483,12 +489,13 @@ def schedule_task(
483
489
  :param threads: A mapping of alias name and Thread object.
484
490
  :param audit: An audit class that want to make audit object.
485
491
  :param parent_run_id: A parent workflow running ID for this release.
492
+ :param extras: An extra parameter that want to override the core config.
486
493
 
487
494
  :rtype: ResultOrCancel
488
495
  """
489
496
  result: Result = Result().set_parent_run_id(parent_run_id)
490
- current_date: datetime = datetime.now(tz=config.tz)
491
- if current_date > stop.replace(tzinfo=config.tz):
497
+ current_date: datetime = datetime.now(tz=dynamic("tz", extras=extras))
498
+ if current_date > stop.replace(tzinfo=dynamic("tz", extras=extras)):
492
499
  return CancelJob
493
500
 
494
501
  # IMPORTANT:
@@ -569,7 +576,7 @@ def schedule_task(
569
576
 
570
577
  threads[thread_name] = {
571
578
  "thread": thread,
572
- "start_date": datetime.now(tz=config.tz),
579
+ "start_date": datetime.now(tz=dynamic("tz", extras=extras)),
573
580
  "release_date": release.date,
574
581
  }
575
582
 
@@ -714,8 +721,8 @@ def scheduler_pending(
714
721
  def schedule_control(
715
722
  schedules: list[str],
716
723
  stop: datetime | None = None,
717
- extras: DictData | None = None,
718
724
  *,
725
+ extras: DictData | None = None,
719
726
  audit: type[Audit] | None = None,
720
727
  parent_run_id: str | None = None,
721
728
  ) -> Result: # pragma: no cov
@@ -732,12 +739,14 @@ def schedule_control(
732
739
 
733
740
  :rtype: Result
734
741
  """
735
- audit: type[Audit] = audit or get_audit()
742
+ audit: type[Audit] = audit or get_audit(extras=extras)
736
743
  result: Result = Result().set_parent_run_id(parent_run_id)
737
744
 
738
745
  # NOTE: Create the start and stop datetime.
739
- start_date: datetime = datetime.now(tz=config.tz)
740
- stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
746
+ start_date: datetime = datetime.now(tz=dynamic("tz", extras=extras))
747
+ stop_date: datetime = stop or (
748
+ start_date + dynamic("stop_boundary_delta", extras=extras)
749
+ )
741
750
 
742
751
  # IMPORTANT: Create main mapping of queue and thread object.
743
752
  queue: dict[str, ReleaseQueue] = {}
@@ -750,10 +759,10 @@ def schedule_control(
750
759
  tasks: list[WorkflowTask] = []
751
760
  for name in schedules:
752
761
  tasks.extend(
753
- Schedule.from_conf(name, extras=extras).tasks(
754
- start_date_waiting,
755
- queue=queue,
756
- extras=extras,
762
+ (
763
+ Schedule.from_conf(name, extras=extras).tasks(
764
+ start_date_waiting, queue=queue
765
+ )
757
766
  ),
758
767
  )
759
768
 
@@ -771,6 +780,7 @@ def schedule_control(
771
780
 
772
781
  def schedule_runner(
773
782
  stop: datetime | None = None,
783
+ *,
774
784
  extras: DictData | None = None,
775
785
  excluded: list[str] | None = None,
776
786
  ) -> Result: # pragma: no cov
@@ -803,7 +813,7 @@ def schedule_runner(
803
813
  context: DictData = {"schedules": [], "threads": []}
804
814
 
805
815
  with ProcessPoolExecutor(
806
- max_workers=config.max_schedule_process,
816
+ max_workers=dynamic("max_schedule_process", extras=extras),
807
817
  ) as executor:
808
818
 
809
819
  futures: list[Future] = [
@@ -811,12 +821,12 @@ def schedule_runner(
811
821
  schedule_control,
812
822
  schedules=[load[0] for load in loader],
813
823
  stop=stop,
814
- extras=(extras or {}),
824
+ extras=extras,
815
825
  parent_run_id=result.parent_run_id,
816
826
  )
817
827
  for loader in batch(
818
828
  Loader.finds(Schedule, excluded=excluded),
819
- n=config.max_schedule_per_process,
829
+ n=dynamic("max_schedule_per_process", extras=extras),
820
830
  )
821
831
  ]
822
832
 
@@ -831,4 +841,4 @@ def schedule_runner(
831
841
  context["schedule"].extend(rs.context.get("schedules", []))
832
842
  context["threads"].extend(rs.context.get("threads", []))
833
843
 
834
- return result.catch(status=0, context=context)
844
+ return result.catch(status=SUCCESS, context=context)