ddeutil-workflow 0.0.18__py3-none-any.whl → 0.0.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1084 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ """
7
+ The main schedule running is ``workflow_runner`` function that trigger the
8
+ multiprocess of ``workflow_control`` function for listing schedules on the
9
+ config by ``Loader.finds(Schedule)``.
10
+
11
+ The ``workflow_control`` is the scheduler function that release 2 schedule
12
+ functions; ``workflow_task``, and ``workflow_monitor``.
13
+
14
+ ``workflow_control`` --- Every minute at :02 --> ``workflow_task``
15
+ --- Every 5 minutes --> ``workflow_monitor``
16
+
17
+ The ``workflow_task`` will run ``task.release`` method in threading object
18
+ for multithreading strategy. This ``release`` method will run only one crontab
19
+ value with the on field.
20
+ """
21
+ from __future__ import annotations
22
+
23
+ import copy
24
+ import time
25
+ from concurrent.futures import (
26
+ Future,
27
+ ThreadPoolExecutor,
28
+ as_completed,
29
+ )
30
+ from dataclasses import field
31
+ from datetime import datetime, timedelta
32
+ from functools import total_ordering
33
+ from heapq import heappop, heappush
34
+ from queue import Queue
35
+ from textwrap import dedent
36
+ from typing import Optional
37
+
38
+ from pydantic import BaseModel, ConfigDict, Field
39
+ from pydantic.dataclasses import dataclass
40
+ from pydantic.functional_validators import field_validator, model_validator
41
+ from typing_extensions import Self
42
+
43
+ from .__cron import CronJob, CronRunner
44
+ from .__types import DictData, TupleStr
45
+ from .conf import FileLog, Loader, Log, config, get_logger
46
+ from .exceptions import JobException, WorkflowException
47
+ from .job import Job
48
+ from .on import On
49
+ from .utils import (
50
+ Param,
51
+ Result,
52
+ delay,
53
+ gen_id,
54
+ get_diff_sec,
55
+ get_dt_now,
56
+ has_template,
57
+ param2template,
58
+ )
59
+
60
+ logger = get_logger("ddeutil.workflow")
61
+
62
+ __all__: TupleStr = (
63
+ "Workflow",
64
+ "WorkflowRelease",
65
+ "WorkflowQueue",
66
+ "WorkflowTaskData",
67
+ )
68
+
69
+
70
+ @total_ordering
71
+ @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
72
+ class WorkflowRelease:
73
+ """Workflow release data dataclass object."""
74
+
75
+ date: datetime
76
+ offset: float
77
+ end_date: datetime
78
+ runner: CronRunner
79
+ type: str
80
+
81
+ def __repr__(self) -> str:
82
+ return repr(f"{self.date:%Y-%m-%d %H:%M:%S}")
83
+
84
+ def __str__(self) -> str:
85
+ return f"{self.date:%Y-%m-%d %H:%M:%S}"
86
+
87
+ @classmethod
88
+ def from_dt(cls, dt: datetime) -> Self:
89
+ return cls(
90
+ date=dt,
91
+ offset=0,
92
+ end_date=dt + timedelta(days=1),
93
+ runner=CronJob("* * * * *").schedule(dt.replace(tzinfo=config.tz)),
94
+ type="manual",
95
+ )
96
+
97
+ def __eq__(self, other: WorkflowRelease | datetime) -> bool:
98
+ if isinstance(other, self.__class__):
99
+ return self.date == other.date
100
+ elif isinstance(other, datetime):
101
+ return self.date == other
102
+ return NotImplemented
103
+
104
+ def __lt__(self, other: WorkflowRelease | datetime) -> bool:
105
+ if isinstance(other, self.__class__):
106
+ return self.date < other.date
107
+ elif isinstance(other, datetime):
108
+ return self.date < other
109
+ return NotImplemented
110
+
111
+
112
+ @dataclass
113
+ class WorkflowQueue:
114
+ """Workflow Queue object."""
115
+
116
+ queue: list[WorkflowRelease] = field(default_factory=list)
117
+ running: list[WorkflowRelease] = field(default_factory=list)
118
+ complete: list[WorkflowRelease] = field(default_factory=list)
119
+
120
+ @property
121
+ def is_queued(self) -> bool:
122
+ """Return True if it has data in the queue."""
123
+ return len(self.queue) > 0
124
+
125
+ def check_queue(self, data: WorkflowRelease) -> bool:
126
+ """Check a WorkflowRelease value already exists in list of tracking
127
+ queues.
128
+
129
+ :param data:
130
+ """
131
+ return (
132
+ (data in self.queue)
133
+ or (data in self.running)
134
+ or (data in self.complete)
135
+ )
136
+
137
+ def push_queue(self, data: WorkflowRelease) -> Self:
138
+ heappush(self.queue, data)
139
+ return self
140
+
141
+ def push_running(self, data: WorkflowRelease) -> Self:
142
+ heappush(self.running, data)
143
+ return self
144
+
145
+ def remove_running(self, data: WorkflowRelease) -> Self:
146
+ if data in self.running:
147
+ self.running.remove(data)
148
+
149
+
150
+ class Workflow(BaseModel):
151
+ """Workflow Pydantic Model this is the main future of this project because
152
+ it use to be workflow data for running everywhere that you want or using it
153
+ to scheduler task in background. It use lightweight coding line from
154
+ Pydantic Model and enhance execute method on it.
155
+ """
156
+
157
+ name: str = Field(description="A workflow name.")
158
+ desc: Optional[str] = Field(
159
+ default=None,
160
+ description=(
161
+ "A workflow description that can be string of markdown content."
162
+ ),
163
+ )
164
+ params: dict[str, Param] = Field(
165
+ default_factory=dict,
166
+ description="A parameters that need to use on this workflow.",
167
+ )
168
+ on: list[On] = Field(
169
+ default_factory=list,
170
+ description="A list of On instance for this workflow schedule.",
171
+ )
172
+ jobs: dict[str, Job] = Field(
173
+ default_factory=dict,
174
+ description="A mapping of job ID and job model that already loaded.",
175
+ )
176
+
177
+ @classmethod
178
+ def from_loader(
179
+ cls,
180
+ name: str,
181
+ externals: DictData | None = None,
182
+ ) -> Self:
183
+ """Create Workflow instance from the Loader object that only receive
184
+ an input workflow name. The loader object will use this workflow name to
185
+ searching configuration data of this workflow model in conf path.
186
+
187
+ :param name: A workflow name that want to pass to Loader object.
188
+ :param externals: An external parameters that want to pass to Loader
189
+ object.
190
+ :rtype: Self
191
+ """
192
+ loader: Loader = Loader(name, externals=(externals or {}))
193
+
194
+ # NOTE: Validate the config type match with current connection model
195
+ if loader.type != cls:
196
+ raise ValueError(f"Type {loader.type} does not match with {cls}")
197
+
198
+ loader_data: DictData = copy.deepcopy(loader.data)
199
+
200
+ # NOTE: Add name to loader data
201
+ loader_data["name"] = name.replace(" ", "_")
202
+
203
+ # NOTE: Prepare `on` data
204
+ cls.__bypass_on(loader_data, externals=externals)
205
+ return cls.model_validate(obj=loader_data)
206
+
207
+ @classmethod
208
+ def __bypass_on(
209
+ cls,
210
+ data: DictData,
211
+ externals: DictData | None = None,
212
+ ) -> DictData:
213
+ """Bypass the on data to loaded config data.
214
+
215
+ :param data:
216
+ :param externals:
217
+ :rtype: DictData
218
+ """
219
+ if on := data.pop("on", []):
220
+ if isinstance(on, str):
221
+ on = [on]
222
+ if any(not isinstance(i, (dict, str)) for i in on):
223
+ raise TypeError("The ``on`` key should be list of str or dict")
224
+
225
+ # NOTE: Pass on value to Loader and keep on model object to on field
226
+ data["on"] = [
227
+ (
228
+ Loader(n, externals=(externals or {})).data
229
+ if isinstance(n, str)
230
+ else n
231
+ )
232
+ for n in on
233
+ ]
234
+ return data
235
+
236
+ @model_validator(mode="before")
237
+ def __prepare_model_before__(cls, values: DictData) -> DictData:
238
+ """Prepare the params key."""
239
+ # NOTE: Prepare params type if it passing with only type value.
240
+ if params := values.pop("params", {}):
241
+ values["params"] = {
242
+ p: (
243
+ {"type": params[p]}
244
+ if isinstance(params[p], str)
245
+ else params[p]
246
+ )
247
+ for p in params
248
+ }
249
+ return values
250
+
251
+ @field_validator("desc", mode="after")
252
+ def __dedent_desc__(cls, value: str) -> str:
253
+ """Prepare description string that was created on a template.
254
+
255
+ :param value: A description string value that want to dedent.
256
+ :rtype: str
257
+ """
258
+ return dedent(value)
259
+
260
+ @field_validator("on", mode="after")
261
+ def __on_no_dup__(cls, value: list[On]) -> list[On]:
262
+ """Validate the on fields should not contain duplicate values and if it
263
+ contain every minute value, it should has only one on value."""
264
+ set_ons: set[str] = {str(on.cronjob) for on in value}
265
+ if len(set_ons) != len(value):
266
+ raise ValueError(
267
+ "The on fields should not contain duplicate on value."
268
+ )
269
+
270
+ # WARNING:
271
+ # if '* * * * *' in set_ons and len(set_ons) > 1:
272
+ # raise ValueError(
273
+ # "If it has every minute cronjob on value, it should has only "
274
+ # "one value in the on field."
275
+ # )
276
+ return value
277
+
278
+ @model_validator(mode="after")
279
+ def __validate_jobs_need__(self) -> Self:
280
+ """Validate each need job in any jobs should exists.
281
+
282
+ :rtype: Self
283
+ """
284
+ for job in self.jobs:
285
+ if not_exist := [
286
+ need for need in self.jobs[job].needs if need not in self.jobs
287
+ ]:
288
+ raise WorkflowException(
289
+ f"The needed jobs: {not_exist} do not found in "
290
+ f"{self.name!r}."
291
+ )
292
+
293
+ # NOTE: update a job id with its job id from workflow template
294
+ self.jobs[job].id = job
295
+
296
+ # VALIDATE: Validate workflow name should not dynamic with params
297
+ # template.
298
+ if has_template(self.name):
299
+ raise ValueError(
300
+ f"Workflow name should not has any template, please check, "
301
+ f"{self.name!r}."
302
+ )
303
+
304
+ return self
305
+
306
+ def job(self, name: str) -> Job:
307
+ """Return this workflow's jobs that passing with the Job model.
308
+
309
+ :param name: A job name that want to get from a mapping of job models.
310
+ :type name: str
311
+
312
+ :rtype: Job
313
+ :return: A job model that exists on this workflow by input name.
314
+ """
315
+ if name not in self.jobs:
316
+ raise ValueError(
317
+ f"A Job {name!r} does not exists in this workflow, "
318
+ f"{self.name!r}"
319
+ )
320
+ return self.jobs[name]
321
+
322
+ def parameterize(self, params: DictData) -> DictData:
323
+ """Prepare a passing parameters before use it in execution process.
324
+ This method will validate keys of an incoming params with this object
325
+ necessary params field and then create a jobs key to result mapping
326
+ that will keep any execution result from its job.
327
+
328
+ ... {
329
+ ... "params": <an-incoming-params>,
330
+ ... "jobs": {}
331
+ ... }
332
+
333
+ :param params: A parameter mapping that receive from workflow execution.
334
+ :type params: DictData
335
+
336
+ :raise WorkflowException: If parameter value that want to validate does
337
+ not include the necessary parameter that had required flag.
338
+
339
+ :rtype: DictData
340
+ :return: The parameter value that validate with its parameter fields and
341
+ adding jobs key to this parameter.
342
+ """
343
+ # VALIDATE: Incoming params should have keys that set on this workflow.
344
+ if check_key := tuple(
345
+ f"{k!r}"
346
+ for k in self.params
347
+ if (k not in params and self.params[k].required)
348
+ ):
349
+ raise WorkflowException(
350
+ f"Required Param on this workflow setting does not set: "
351
+ f"{', '.join(check_key)}."
352
+ )
353
+
354
+ # NOTE: Mapping type of param before adding it to the ``params`` key.
355
+ return {
356
+ "params": (
357
+ params
358
+ | {
359
+ k: self.params[k].receive(params[k])
360
+ for k in params
361
+ if k in self.params
362
+ }
363
+ ),
364
+ "jobs": {},
365
+ }
366
+
367
+ def release(
368
+ self,
369
+ release: datetime | WorkflowRelease,
370
+ params: DictData,
371
+ run_id: str | None = None,
372
+ *,
373
+ log: type[Log] = None,
374
+ queue: WorkflowQueue | list[datetime] | None = None,
375
+ ) -> Result:
376
+ """Release the workflow execution with overriding parameter with the
377
+ release templating that include logical date (release date), execution
378
+ date, or running id to the params.
379
+
380
+ This method allow workflow use log object to save the execution
381
+ result to log destination like file log to local `/logs` directory.
382
+
383
+ I will add sleep with 0.15 seconds on every step that interact with
384
+ the queue object.
385
+
386
+ :param release: A release datetime.
387
+ :param params: A workflow parameter that pass to execute method.
388
+ :param queue: A list of release time that already queue.
389
+ :param run_id: A workflow running ID for this release.
390
+ :param log: A log class that want to save the execution result.
391
+ :param queue: A WorkflowQueue object.
392
+
393
+ :rtype: Result
394
+ """
395
+ log: type[Log] = log or FileLog
396
+ run_id: str = run_id or gen_id(self.name, unique=True)
397
+
398
+ # VALIDATE: Change queue value to WorkflowQueue object.
399
+ if queue is None:
400
+ queue: WorkflowQueue = WorkflowQueue()
401
+ elif isinstance(queue, list):
402
+ queue: WorkflowQueue = WorkflowQueue(queue=queue)
403
+
404
+ # VALIDATE: Change release value to WorkflowRelease object.
405
+ if isinstance(release, datetime):
406
+ release: WorkflowRelease = WorkflowRelease.from_dt(release)
407
+
408
+ logger.debug(
409
+ f"({run_id}) [RELEASE]: {self.name!r} : "
410
+ f"Closely to run >> {release.date:%Y-%m-%d %H:%M:%S}"
411
+ )
412
+
413
+ # NOTE: Release parameter that use to change if params has templating.
414
+ release_params: DictData = {
415
+ "release": {
416
+ "logical_date": release.date,
417
+ "execute_date": datetime.now(tz=config.tz),
418
+ "run_id": run_id,
419
+ "timezone": config.tz,
420
+ }
421
+ }
422
+
423
+ # WARNING: Re-create workflow object that use new running workflow ID.
424
+ rs: Result = self.execute(
425
+ params=param2template(params, release_params),
426
+ run_id=run_id,
427
+ )
428
+ logger.debug(
429
+ f"({run_id}) [RELEASE]: {self.name!r} : "
430
+ f"End release {release.date:%Y-%m-%d %H:%M:%S}"
431
+ )
432
+
433
+ rs.set_parent_run_id(run_id)
434
+ rs_log: Log = log.model_validate(
435
+ {
436
+ "name": self.name,
437
+ "release": release.date,
438
+ "type": release.type,
439
+ "context": rs.context,
440
+ "parent_run_id": rs.parent_run_id,
441
+ "run_id": rs.run_id,
442
+ }
443
+ )
444
+
445
+ # NOTE: Saving execution result to destination of the input log object.
446
+ rs_log.save(excluded=None)
447
+
448
+ # NOTE: Remove this release from running.
449
+ queue.remove_running(release)
450
+ heappush(queue.complete, release)
451
+
452
+ return Result(
453
+ status=0,
454
+ context={
455
+ "params": params,
456
+ "release": {
457
+ "status": "success",
458
+ "logical_date": release.date,
459
+ },
460
+ },
461
+ run_id=run_id,
462
+ )
463
+
464
+ def queue_poking(
465
+ self,
466
+ offset: float,
467
+ end_date: datetime,
468
+ queue: WorkflowQueue,
469
+ log: type[Log],
470
+ ) -> WorkflowQueue:
471
+ """Generate queue of datetime from the cron runner that initialize from
472
+ the on field. with offset value.
473
+
474
+ :param offset:
475
+ :param end_date:
476
+ :param queue:
477
+ :param log:
478
+ """
479
+ for on in self.on:
480
+
481
+ runner: CronRunner = on.next(
482
+ get_dt_now(tz=config.tz, offset=offset).replace(microsecond=0)
483
+ )
484
+
485
+ if runner.date > end_date:
486
+ continue
487
+
488
+ workflow_release = WorkflowRelease(
489
+ date=runner.date,
490
+ offset=offset,
491
+ end_date=end_date,
492
+ runner=runner,
493
+ type="poking",
494
+ )
495
+
496
+ while queue.check_queue(data=workflow_release) or (
497
+ log.is_pointed(name=self.name, release=workflow_release.date)
498
+ ):
499
+ workflow_release = WorkflowRelease(
500
+ date=runner.next,
501
+ offset=offset,
502
+ end_date=end_date,
503
+ runner=runner,
504
+ type="poking",
505
+ )
506
+
507
+ if runner.date > end_date:
508
+ continue
509
+
510
+ queue.push_queue(workflow_release)
511
+ return queue
512
+
513
+ def poke(
514
+ self,
515
+ start_date: datetime | None = None,
516
+ params: DictData | None = None,
517
+ run_id: str | None = None,
518
+ periods: int = 1,
519
+ *,
520
+ log: Log | None = None,
521
+ ) -> list[Result]:
522
+ """Poke workflow with the ``on`` field with threading executor pool for
523
+ executing with all its schedules that was set on the `on` value.
524
+ This method will observe its schedule that nearing to run with the
525
+ ``self.release()`` method.
526
+
527
+ :param start_date: A start datetime object.
528
+ :param params: A parameters that want to pass to the release method.
529
+ :param run_id: A workflow running ID for this poke.
530
+ :param periods: A periods of minutes value to running poke.
531
+ :param log: A log object that want to use on this poking process.
532
+
533
+ :rtype: list[Result]
534
+ """
535
+ # NOTE: If this workflow does not set the on schedule, it will return
536
+ # empty result.
537
+ if len(self.on) == 0:
538
+ logger.info(
539
+ f"({run_id}) [POKING]: {self.name!r} does not have any "
540
+ f"schedule to run."
541
+ )
542
+ return []
543
+
544
+ if periods <= 0:
545
+ raise WorkflowException(
546
+ "The period of poking should be int and grater or equal than 1."
547
+ )
548
+
549
+ # NOTE: Create start_date and offset variables.
550
+ current_date: datetime = datetime.now(tz=config.tz)
551
+
552
+ if start_date and start_date <= current_date:
553
+ start_date = start_date.replace(tzinfo=config.tz)
554
+ offset: float = (current_date - start_date).total_seconds()
555
+ else:
556
+ start_date: datetime = current_date
557
+ offset: float = 0
558
+
559
+ end_date: datetime = start_date + timedelta(minutes=periods)
560
+
561
+ log: type[Log] = log or FileLog
562
+ run_id: str = run_id or gen_id(self.name, unique=True)
563
+ logger.info(
564
+ f"({run_id}) [POKING]: Start Poking: {self.name!r} from "
565
+ f"{start_date:%Y-%m-%d %H:%M:%S} to {end_date:%Y-%m-%d %H:%M:%S}"
566
+ )
567
+
568
+ params: DictData = params or {}
569
+ workflow_queue: WorkflowQueue = WorkflowQueue()
570
+ results: list[Result] = []
571
+ futures: list[Future] = []
572
+
573
+ self.queue_poking(
574
+ offset, end_date=end_date, queue=workflow_queue, log=log
575
+ )
576
+
577
+ if len(workflow_queue.queue) == 0:
578
+ logger.info(
579
+ f"({run_id}) [POKING]: {self.name!r} does not have any "
580
+ f"queue to run."
581
+ )
582
+ return []
583
+
584
+ with ThreadPoolExecutor(
585
+ max_workers=config.max_poking_pool_worker,
586
+ thread_name_prefix="workflow_poking_",
587
+ ) as executor:
588
+
589
+ while workflow_queue.is_queued:
590
+
591
+ wf_release: WorkflowRelease = heappop(workflow_queue.queue)
592
+ if (
593
+ wf_release.date - get_dt_now(tz=config.tz, offset=offset)
594
+ ).total_seconds() > 60:
595
+ logger.debug(
596
+ f"({run_id}) [POKING]: Waiting because the latest "
597
+ f"release has diff time more than 60 seconds "
598
+ )
599
+ heappush(workflow_queue.queue, wf_release)
600
+ delay(60)
601
+ self.queue_poking(
602
+ offset, end_date, queue=workflow_queue, log=log
603
+ )
604
+ continue
605
+
606
+ # NOTE: Push the workflow release to running queue
607
+ workflow_queue.push_running(wf_release)
608
+
609
+ futures.append(
610
+ executor.submit(
611
+ self.release,
612
+ release=wf_release,
613
+ params=params,
614
+ log=log,
615
+ queue=workflow_queue,
616
+ )
617
+ )
618
+
619
+ self.queue_poking(
620
+ offset, end_date, queue=workflow_queue, log=log
621
+ )
622
+
623
+ # WARNING: This poking method does not allow to use fail-fast
624
+ # logic to catching parallel execution result.
625
+ for future in as_completed(futures):
626
+ rs: Result = future.result(timeout=60)
627
+ results.append(rs.set_parent_run_id(run_id))
628
+
629
+ while len(workflow_queue.running) > 0: # pragma: no cov
630
+ logger.warning(
631
+ f"({run_id}) [POKING]: Running does empty when poking "
632
+ f"process was finishing."
633
+ )
634
+ delay(10)
635
+
636
+ return results
637
+
638
+ def execute_job(
639
+ self,
640
+ job_id: str,
641
+ params: DictData,
642
+ run_id: str | None = None,
643
+ *,
644
+ raise_error: bool = True,
645
+ ) -> Result:
646
+ """Workflow Job execution with passing dynamic parameters from the
647
+ workflow execution to the target job.
648
+
649
+ This execution is the minimum level of execution of this workflow
650
+ model. It different with ``self.execute`` because this method run only
651
+ one job and return with context of this job data.
652
+
653
+ :param job_id: A job ID that want to execute.
654
+ :param params: A params that was parameterized from workflow execution.
655
+ :param run_id: A workflow running ID for this job execution.
656
+ :param raise_error: A flag that raise error instead catching to result
657
+ if it get exception from job execution.
658
+
659
+ :rtype: Result
660
+ """
661
+ run_id: str = run_id or gen_id(self.name, unique=True)
662
+
663
+ # VALIDATE: check a job ID that exists in this workflow or not.
664
+ if job_id not in self.jobs:
665
+ raise WorkflowException(
666
+ f"The job ID: {job_id} does not exists in {self.name!r} "
667
+ f"workflow."
668
+ )
669
+
670
+ logger.info(f"({run_id}) [WORKFLOW]: Start execute: {job_id!r}")
671
+
672
+ # IMPORTANT:
673
+ # Change any job running IDs to this workflow running ID.
674
+ #
675
+ try:
676
+ job: Job = self.jobs[job_id]
677
+ job.set_outputs(
678
+ job.execute(params=params, run_id=run_id).context,
679
+ to=params,
680
+ )
681
+ except JobException as err:
682
+ logger.error(
683
+ f"({run_id}) [WORKFLOW]: {err.__class__.__name__}: {err}"
684
+ )
685
+ if raise_error:
686
+ raise WorkflowException(
687
+ f"Get job execution error {job_id}: JobException: {err}"
688
+ ) from None
689
+ else:
690
+ raise NotImplementedError() from None
691
+
692
+ return Result(status=0, context=params).set_run_id(run_id)
693
+
694
+ def execute(
695
+ self,
696
+ params: DictData,
697
+ run_id: str | None = None,
698
+ *,
699
+ timeout: int = 60,
700
+ ) -> Result:
701
+ """Execute workflow with passing a dynamic parameters to all jobs that
702
+ included in this workflow model with ``jobs`` field.
703
+
704
+ The result of execution process for each jobs and stages on this
705
+ workflow will keeping in dict which able to catch out with all jobs and
706
+ stages by dot annotation.
707
+
708
+ For example, when I want to use the output from previous stage, I
709
+ can access it with syntax:
710
+
711
+ ... ${job-name}.stages.${stage-id}.outputs.${key}
712
+
713
+ :param params: An input parameters that use on workflow execution that
714
+ will parameterize before using it. Default is None.
715
+ :type params: DictData | None
716
+ :param run_id: A workflow running ID for this job execution.
717
+ :type run_id: str | None
718
+ :param timeout: A workflow execution time out in second unit that use
719
+ for limit time of execution and waiting job dependency. Default is
720
+ 60 seconds.
721
+ :type timeout: int
722
+
723
+ :rtype: Result
724
+ """
725
+ run_id: str = run_id or gen_id(self.name, unique=True)
726
+ logger.info(f"({run_id}) [WORKFLOW]: Start Execute: {self.name!r} ...")
727
+
728
+ # NOTE: I use this condition because this method allow passing empty
729
+ # params and I do not want to create new dict object.
730
+ ts: float = time.monotonic()
731
+ rs: Result = Result(run_id=run_id)
732
+
733
+ # NOTE: It should not do anything if it does not have job.
734
+ if not self.jobs:
735
+ logger.warning(
736
+ f"({run_id}) [WORKFLOW]: This workflow: {self.name!r} "
737
+ f"does not have any jobs"
738
+ )
739
+ return rs.catch(status=0, context=params)
740
+
741
+ # NOTE: Create a job queue that keep the job that want to running after
742
+ # it dependency condition.
743
+ jq: Queue = Queue()
744
+ for job_id in self.jobs:
745
+ jq.put(job_id)
746
+
747
+ # NOTE: Create data context that will pass to any job executions
748
+ # on this workflow.
749
+ #
750
+ # {
751
+ # 'params': <input-params>,
752
+ # 'jobs': {},
753
+ # }
754
+ #
755
+ context: DictData = self.parameterize(params)
756
+ status: int = 0
757
+ try:
758
+ if config.max_job_parallel == 1:
759
+ self.__exec_non_threading(
760
+ run_id=run_id,
761
+ context=context,
762
+ ts=ts,
763
+ job_queue=jq,
764
+ timeout=timeout,
765
+ )
766
+ else:
767
+ self.__exec_threading(
768
+ run_id=run_id,
769
+ context=context,
770
+ ts=ts,
771
+ job_queue=jq,
772
+ worker=config.max_job_parallel,
773
+ timeout=timeout,
774
+ )
775
+ except WorkflowException as err:
776
+ context.update(
777
+ {
778
+ "error": err,
779
+ "error_message": f"{err.__class__.__name__}: {err}",
780
+ },
781
+ )
782
+ status = 1
783
+ return rs.catch(status=status, context=context)
784
+
785
+ def __exec_threading(
786
+ self,
787
+ run_id: str,
788
+ context: DictData,
789
+ ts: float,
790
+ job_queue: Queue,
791
+ *,
792
+ worker: int = 2,
793
+ timeout: int = 600,
794
+ ) -> DictData:
795
+ """Workflow execution by threading strategy.
796
+
797
+ If a job need dependency, it will check dependency job ID from
798
+ context data before allow it run.
799
+
800
+ :param context: A context workflow data that want to downstream passing.
801
+ :param ts: A start timestamp that use for checking execute time should
802
+ timeout.
803
+ :param job_queue: A job queue object.
804
+ :param timeout: A second value unit that bounding running time.
805
+ :param worker: A number of threading executor pool size.
806
+ :rtype: DictData
807
+ """
808
+ not_time_out_flag: bool = True
809
+ logger.debug(
810
+ f"({run_id}): [WORKFLOW]: Run {self.name} with threading job "
811
+ f"executor"
812
+ )
813
+
814
+ # IMPORTANT: The job execution can run parallel and waiting by
815
+ # needed.
816
+ with ThreadPoolExecutor(max_workers=worker) as executor:
817
+ futures: list[Future] = []
818
+
819
+ while not job_queue.empty() and (
820
+ not_time_out_flag := ((time.monotonic() - ts) < timeout)
821
+ ):
822
+ job_id: str = job_queue.get()
823
+ job: Job = self.jobs[job_id]
824
+
825
+ if any(need not in context["jobs"] for need in job.needs):
826
+ job_queue.task_done()
827
+ job_queue.put(job_id)
828
+ time.sleep(0.25)
829
+ continue
830
+
831
+ # NOTE: Start workflow job execution with deep copy context data
832
+ # before release.
833
+ #
834
+ # {
835
+ # 'params': <input-params>,
836
+ # 'jobs': {},
837
+ # }
838
+ futures.append(
839
+ executor.submit(
840
+ self.execute_job,
841
+ job_id,
842
+ params=context,
843
+ ),
844
+ )
845
+
846
+ # NOTE: Mark this job queue done.
847
+ job_queue.task_done()
848
+
849
+ # NOTE: Wait for all items to finish processing
850
+ job_queue.join()
851
+
852
+ for future in as_completed(futures, timeout=1800):
853
+ if err := future.exception():
854
+ logger.error(f"({run_id}) [WORKFLOW]: {err}")
855
+ raise WorkflowException(f"{err}")
856
+ try:
857
+ future.result(timeout=60)
858
+ except TimeoutError as err: # pragma: no cove
859
+ raise WorkflowException(
860
+ "Timeout when getting result from future"
861
+ ) from err
862
+
863
+ if not_time_out_flag:
864
+ return context
865
+
866
+ # NOTE: Raise timeout error.
867
+ logger.warning( # pragma: no cov
868
+ f"({run_id}) [WORKFLOW]: Execution of workflow, {self.name!r} "
869
+ f", was timeout"
870
+ )
871
+ raise WorkflowException( # pragma: no cov
872
+ f"Execution of workflow: {self.name} was timeout"
873
+ )
874
+
875
+ def __exec_non_threading(
876
+ self,
877
+ run_id: str,
878
+ context: DictData,
879
+ ts: float,
880
+ job_queue: Queue,
881
+ *,
882
+ timeout: int = 600,
883
+ ) -> DictData:
884
+ """Workflow execution with non-threading strategy that use sequential
885
+ job running and waiting previous job was run successful.
886
+
887
+ If a job need dependency, it will check dependency job ID from
888
+ context data before allow it run.
889
+
890
+ :param context: A context workflow data that want to downstream passing.
891
+ :param ts: A start timestamp that use for checking execute time should
892
+ timeout.
893
+ :param timeout: A second value unit that bounding running time.
894
+ :rtype: DictData
895
+ """
896
+ not_time_out_flag: bool = True
897
+ logger.debug(
898
+ f"({run_id}) [WORKFLOW]: Run {self.name} with non-threading job "
899
+ f"executor"
900
+ )
901
+
902
+ while not job_queue.empty() and (
903
+ not_time_out_flag := ((time.monotonic() - ts) < timeout)
904
+ ):
905
+ job_id: str = job_queue.get()
906
+ job: Job = self.jobs[job_id]
907
+
908
+ # NOTE: Waiting dependency job run successful before release.
909
+ if any(need not in context["jobs"] for need in job.needs):
910
+ job_queue.task_done()
911
+ job_queue.put(job_id)
912
+ time.sleep(0.05)
913
+ continue
914
+
915
+ # NOTE: Start workflow job execution with deep copy context data
916
+ # before release. This job execution process will running until
917
+ # done before checking all execution timeout or not.
918
+ #
919
+ # {
920
+ # 'params': <input-params>,
921
+ # 'jobs': {},
922
+ # }
923
+ self.execute_job(job_id=job_id, params=context, run_id=run_id)
924
+
925
+ # NOTE: Mark this job queue done.
926
+ job_queue.task_done()
927
+
928
+ # NOTE: Wait for all items to finish processing
929
+ job_queue.join()
930
+
931
+ if not_time_out_flag:
932
+ return context
933
+
934
+ # NOTE: Raise timeout error.
935
+ logger.warning( # pragma: no cov
936
+ f"({run_id}) [WORKFLOW]: Execution of workflow was timeout"
937
+ )
938
+ raise WorkflowException( # pragma: no cov
939
+ f"Execution of workflow: {self.name} was timeout"
940
+ )
941
+
942
+
943
+ @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
944
+ class WorkflowTaskData:
945
+ """Workflow task dataclass that use to keep mapping data and objects for
946
+ passing in multithreading task.
947
+
948
+ This dataclass will be 1-1 mapping with workflow and cron runner
949
+ objects.
950
+ """
951
+
952
+ alias: str
953
+ workflow: Workflow
954
+ runner: CronRunner
955
+ params: DictData
956
+
957
+ def release(
958
+ self,
959
+ queue: dict[str, list[datetime]],
960
+ log: Log | None = None,
961
+ run_id: str | None = None,
962
+ *,
963
+ waiting_sec: int = 60,
964
+ sleep_interval: int = 15,
965
+ ) -> None: # pragma: no cov
966
+ """Workflow task release that use the same logic of `workflow.release`
967
+ method.
968
+
969
+ :param queue:
970
+ :param log: A log object for saving result logging from workflow
971
+ execution process.
972
+ :param run_id: A workflow running ID for this release.
973
+ :param waiting_sec: A second period value that allow workflow execute.
974
+ :param sleep_interval: A second value that want to waiting until time
975
+ to execute.
976
+ """
977
+ log: Log = log or FileLog
978
+ run_id: str = run_id or gen_id(self.workflow.name, unique=True)
979
+ runner: CronRunner = self.runner
980
+
981
+ # NOTE: get next schedule time that generate from now.
982
+ next_time: datetime = runner.date
983
+
984
+ # NOTE: get next utils it does not running.
985
+ while log.is_pointed(self.workflow.name, next_time) or (
986
+ next_time in queue[self.alias]
987
+ ):
988
+ next_time: datetime = runner.next
989
+
990
+ logger.debug(
991
+ f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} : "
992
+ f"{next_time:%Y-%m-%d %H:%M:%S}"
993
+ )
994
+ heappush(queue[self.alias], next_time)
995
+ start_sec: float = time.monotonic()
996
+
997
+ if get_diff_sec(next_time, tz=runner.tz) > waiting_sec:
998
+ logger.debug(
999
+ f"({run_id}) [WORKFLOW]: {self.workflow.name!r} : "
1000
+ f"{runner.cron} "
1001
+ f": Does not closely >> {next_time:%Y-%m-%d %H:%M:%S}"
1002
+ )
1003
+
1004
+ # NOTE: Add this next running datetime that not in period to queue
1005
+ # and remove it to running.
1006
+ queue[self.alias].remove(next_time)
1007
+
1008
+ time.sleep(0.2)
1009
+ return
1010
+
1011
+ logger.debug(
1012
+ f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} : "
1013
+ f"Closely to run >> {next_time:%Y-%m-%d %H:%M:%S}"
1014
+ )
1015
+
1016
+ # NOTE: Release when the time is nearly to schedule time.
1017
+ while (duration := get_diff_sec(next_time, tz=config.tz)) > (
1018
+ sleep_interval + 5
1019
+ ):
1020
+ logger.debug(
1021
+ f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} "
1022
+ f": Sleep until: {duration}"
1023
+ )
1024
+ time.sleep(15)
1025
+
1026
+ time.sleep(0.5)
1027
+
1028
+ # NOTE: Release parameter that use to change if params has
1029
+ # templating.
1030
+ release_params: DictData = {
1031
+ "release": {
1032
+ "logical_date": next_time,
1033
+ },
1034
+ }
1035
+
1036
+ # WARNING: Re-create workflow object that use new running workflow ID.
1037
+ rs: Result = self.workflow.execute(
1038
+ params=param2template(self.params, release_params),
1039
+ )
1040
+ logger.debug(
1041
+ f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} : "
1042
+ f"End release - {next_time:%Y-%m-%d %H:%M:%S}"
1043
+ )
1044
+
1045
+ # NOTE: Set parent ID on this result.
1046
+ rs.set_parent_run_id(run_id)
1047
+
1048
+ # NOTE: Save result to log object saving.
1049
+ rs_log: Log = log.model_validate(
1050
+ {
1051
+ "name": self.workflow.name,
1052
+ "type": "schedule",
1053
+ "release": next_time,
1054
+ "context": rs.context,
1055
+ "parent_run_id": rs.run_id,
1056
+ "run_id": rs.run_id,
1057
+ }
1058
+ )
1059
+ rs_log.save(excluded=None)
1060
+
1061
+ # NOTE: Remove the current release date from the running.
1062
+ queue[self.alias].remove(next_time)
1063
+ total_sec: float = time.monotonic() - start_sec
1064
+
1065
+ # IMPORTANT:
1066
+ # Add the next running datetime to workflow task queue.
1067
+ future_running_time: datetime = runner.next
1068
+
1069
+ while (
1070
+ future_running_time in queue[self.alias]
1071
+ or (future_running_time - next_time).total_seconds() < total_sec
1072
+ ): # pragma: no cov
1073
+ future_running_time: datetime = runner.next
1074
+
1075
+ # NOTE: Queue next release date.
1076
+ logger.debug(f"[CORE]: {'-' * 100}")
1077
+
1078
+ def __eq__(self, other) -> bool:
1079
+ if isinstance(other, WorkflowTaskData):
1080
+ return (
1081
+ self.workflow.name == other.workflow.name
1082
+ and self.runner.cron == other.runner.cron
1083
+ )
1084
+ return NotImplemented