ddeutil-workflow 0.0.32__py3-none-any.whl → 0.0.34__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +20 -12
- ddeutil/workflow/api/api.py +2 -2
- ddeutil/workflow/api/route.py +4 -3
- ddeutil/workflow/audit.py +252 -0
- ddeutil/workflow/{hook.py → call.py} +27 -27
- ddeutil/workflow/conf.py +163 -271
- ddeutil/workflow/job.py +113 -144
- ddeutil/workflow/result.py +199 -46
- ddeutil/workflow/scheduler.py +167 -151
- ddeutil/workflow/{stage.py → stages.py} +174 -89
- ddeutil/workflow/utils.py +20 -2
- ddeutil/workflow/workflow.py +172 -148
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.34.dist-info}/METADATA +43 -38
- ddeutil_workflow-0.0.34.dist-info/RECORD +26 -0
- ddeutil_workflow-0.0.32.dist-info/RECORD +0 -25
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.34.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.34.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.32.dist-info → ddeutil_workflow-0.0.34.dist-info}/top_level.txt +0 -0
ddeutil/workflow/scheduler.py
CHANGED
@@ -5,16 +5,16 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
"""
|
7
7
|
The main schedule running is ``schedule_runner`` function that trigger the
|
8
|
-
multiprocess of ``
|
8
|
+
multiprocess of ``schedule_control`` function for listing schedules on the
|
9
9
|
config by ``Loader.finds(Schedule)``.
|
10
10
|
|
11
|
-
The ``
|
11
|
+
The ``schedule_control`` is the scheduler function that release 2 schedule
|
12
12
|
functions; ``workflow_task``, and ``workflow_monitor``.
|
13
13
|
|
14
|
-
``
|
15
|
-
--- Every 5 minutes --> ``
|
14
|
+
``schedule_control`` --- Every minute at :02 --> ``schedule_task``
|
15
|
+
--- Every 5 minutes --> ``monitor``
|
16
16
|
|
17
|
-
The ``
|
17
|
+
The ``schedule_task`` will run ``task.release`` method in threading object
|
18
18
|
for multithreading strategy. This ``release`` method will run only one crontab
|
19
19
|
value with the on field.
|
20
20
|
"""
|
@@ -51,10 +51,11 @@ except ImportError: # pragma: no cov
|
|
51
51
|
|
52
52
|
from .__cron import CronRunner
|
53
53
|
from .__types import DictData, TupleStr
|
54
|
-
from .
|
54
|
+
from .audit import Audit, get_audit
|
55
|
+
from .conf import Loader, config, get_logger
|
55
56
|
from .cron import On
|
56
57
|
from .exceptions import ScheduleException, WorkflowException
|
57
|
-
from .result import Result
|
58
|
+
from .result import Result, Status
|
58
59
|
from .utils import batch, delay
|
59
60
|
from .workflow import Release, ReleaseQueue, Workflow, WorkflowTask
|
60
61
|
|
@@ -313,25 +314,19 @@ class Schedule(BaseModel):
|
|
313
314
|
*,
|
314
315
|
stop: datetime | None = None,
|
315
316
|
externals: DictData | None = None,
|
316
|
-
|
317
|
-
|
317
|
+
audit: type[Audit] | None = None,
|
318
|
+
parent_run_id: str | None = None,
|
319
|
+
) -> Result: # pragma: no cov
|
318
320
|
"""Pending this schedule tasks with the schedule package.
|
319
321
|
|
320
322
|
:param stop: A datetime value that use to stop running schedule.
|
321
323
|
:param externals: An external parameters that pass to Loader.
|
322
|
-
:param
|
323
|
-
writing its release
|
324
|
+
:param audit: An audit class that use on the workflow task release for
|
325
|
+
writing its release audit context.
|
326
|
+
:param parent_run_id: A parent workflow running ID for this release.
|
324
327
|
"""
|
325
|
-
|
326
|
-
|
327
|
-
except ImportError:
|
328
|
-
raise ImportError(
|
329
|
-
"Should install schedule package before use this method."
|
330
|
-
) from None
|
331
|
-
|
332
|
-
# NOTE: Get default logging.
|
333
|
-
log: type[Log] = log or get_log()
|
334
|
-
scheduler: Scheduler = Scheduler()
|
328
|
+
audit: type[Audit] = audit or get_audit()
|
329
|
+
result: Result = Result().set_parent_run_id(parent_run_id)
|
335
330
|
|
336
331
|
# NOTE: Create the start and stop datetime.
|
337
332
|
start_date: datetime = datetime.now(tz=config.tz)
|
@@ -345,66 +340,23 @@ class Schedule(BaseModel):
|
|
345
340
|
second=0, microsecond=0
|
346
341
|
) + timedelta(minutes=1)
|
347
342
|
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
stop=stop_date,
|
358
|
-
queue=queue,
|
359
|
-
threads=threads,
|
360
|
-
log=log,
|
361
|
-
)
|
362
|
-
.tag("control")
|
363
|
-
)
|
364
|
-
|
365
|
-
# NOTE: Checking zombie task with schedule job will start every 5 minute at
|
366
|
-
# :10 seconds.
|
367
|
-
(
|
368
|
-
scheduler.every(5)
|
369
|
-
.minutes.at(":10")
|
370
|
-
.do(
|
371
|
-
monitor,
|
372
|
-
threads=threads,
|
373
|
-
)
|
374
|
-
.tag("monitor")
|
375
|
-
)
|
376
|
-
|
377
|
-
# NOTE: Start running schedule
|
378
|
-
logger.info(
|
379
|
-
f"[SCHEDULE]: Schedule with stopper: {stop_date:%Y-%m-%d %H:%M:%S}"
|
343
|
+
scheduler_pending(
|
344
|
+
tasks=self.tasks(
|
345
|
+
start_date_waiting, queue=queue, externals=externals
|
346
|
+
),
|
347
|
+
stop_date=stop_date,
|
348
|
+
queue=queue,
|
349
|
+
threads=threads,
|
350
|
+
result=result,
|
351
|
+
audit=audit,
|
380
352
|
)
|
381
353
|
|
382
|
-
|
383
|
-
scheduler.run_pending()
|
384
|
-
time.sleep(1)
|
354
|
+
return result.catch(status=Status.SUCCESS)
|
385
355
|
|
386
|
-
# NOTE: Break the scheduler when the control job does not exist.
|
387
|
-
if not scheduler.get_jobs("control"):
|
388
|
-
scheduler.clear("monitor")
|
389
356
|
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
"running in background."
|
394
|
-
)
|
395
|
-
delay(10)
|
396
|
-
monitor(threads)
|
397
|
-
|
398
|
-
break
|
399
|
-
|
400
|
-
logger.warning(
|
401
|
-
f"[SCHEDULE]: Queue: {[list(queue[wf].queue) for wf in queue]}"
|
402
|
-
)
|
403
|
-
|
404
|
-
|
405
|
-
ResultOrCancelJob = Union[type[CancelJob], Result]
|
406
|
-
ReturnCancelJob = Callable[P, ResultOrCancelJob]
|
407
|
-
DecoratorCancelJob = Callable[[ReturnCancelJob], ReturnCancelJob]
|
357
|
+
ResultOrCancel = Union[type[CancelJob], Result]
|
358
|
+
ReturnResultOrCancel = Callable[P, ResultOrCancel]
|
359
|
+
DecoratorCancelJob = Callable[[ReturnResultOrCancel], ReturnResultOrCancel]
|
408
360
|
|
409
361
|
|
410
362
|
def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
|
@@ -417,10 +369,12 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
|
|
417
369
|
:rtype: DecoratorCancelJob
|
418
370
|
"""
|
419
371
|
|
420
|
-
def decorator(
|
372
|
+
def decorator(
|
373
|
+
func: ReturnResultOrCancel,
|
374
|
+
) -> ReturnResultOrCancel: # pragma: no cov
|
421
375
|
|
422
376
|
@wraps(func)
|
423
|
-
def wrapper(*args: P.args, **kwargs: P.kwargs) ->
|
377
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> ResultOrCancel:
|
424
378
|
try:
|
425
379
|
return func(*args, **kwargs)
|
426
380
|
except Exception as err:
|
@@ -437,8 +391,9 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
|
|
437
391
|
class ReleaseThread(TypedDict):
|
438
392
|
"""TypeDict for the release thread."""
|
439
393
|
|
440
|
-
thread: Thread
|
394
|
+
thread: Optional[Thread]
|
441
395
|
start_date: datetime
|
396
|
+
release_date: datetime
|
442
397
|
|
443
398
|
|
444
399
|
ReleaseThreads = dict[str, ReleaseThread]
|
@@ -450,8 +405,9 @@ def schedule_task(
|
|
450
405
|
stop: datetime,
|
451
406
|
queue: dict[str, ReleaseQueue],
|
452
407
|
threads: ReleaseThreads,
|
453
|
-
|
454
|
-
|
408
|
+
audit: type[Audit],
|
409
|
+
parent_run_id: str | None = None,
|
410
|
+
) -> ResultOrCancel:
|
455
411
|
"""Schedule task function that generate thread of workflow task release
|
456
412
|
method in background. This function do the same logic as the workflow poke
|
457
413
|
method, but it runs with map of schedules and the on values.
|
@@ -463,10 +419,12 @@ def schedule_task(
|
|
463
419
|
:param stop: A stop datetime object that force stop running scheduler.
|
464
420
|
:param queue: A mapping of alias name and ReleaseQueue object.
|
465
421
|
:param threads: A mapping of alias name and Thread object.
|
466
|
-
:param
|
422
|
+
:param audit: An audit class that want to make audit object.
|
423
|
+
:param parent_run_id: A parent workflow running ID for this release.
|
467
424
|
|
468
|
-
:rtype:
|
425
|
+
:rtype: ResultOrCancel
|
469
426
|
"""
|
427
|
+
result: Result = Result().set_parent_run_id(parent_run_id)
|
470
428
|
current_date: datetime = datetime.now(tz=config.tz)
|
471
429
|
if current_date > stop.replace(tzinfo=config.tz):
|
472
430
|
return CancelJob
|
@@ -489,14 +447,16 @@ def schedule_task(
|
|
489
447
|
q: ReleaseQueue = queue[task.alias]
|
490
448
|
|
491
449
|
# NOTE: Start adding queue and move the runner date in the WorkflowTask.
|
492
|
-
task.queue(stop, q,
|
450
|
+
task.queue(stop, q, audit=audit)
|
493
451
|
|
494
452
|
# NOTE: Get incoming datetime queue.
|
495
|
-
|
453
|
+
result.trace.debug(
|
454
|
+
f"[WORKFLOW]: Queue: {task.alias!r} : {list(q.queue)}"
|
455
|
+
)
|
496
456
|
|
497
457
|
# VALIDATE: Check the queue is empty or not.
|
498
458
|
if not q.is_queued:
|
499
|
-
|
459
|
+
result.trace.warning(
|
500
460
|
f"[WORKFLOW]: Queue is empty for : {task.alias!r} : "
|
501
461
|
f"{task.runner.cron}"
|
502
462
|
)
|
@@ -507,7 +467,7 @@ def schedule_task(
|
|
507
467
|
second=0, microsecond=0
|
508
468
|
)
|
509
469
|
if (first_date := q.first_queue.date) > current_release:
|
510
|
-
|
470
|
+
result.trace.debug(
|
511
471
|
f"[WORKFLOW]: Skip schedule "
|
512
472
|
f"{first_date:%Y-%m-%d %H:%M:%S} for : {task.alias!r}"
|
513
473
|
)
|
@@ -522,7 +482,7 @@ def schedule_task(
|
|
522
482
|
release: Release = heappop(q.queue)
|
523
483
|
heappush(q.running, release)
|
524
484
|
|
525
|
-
|
485
|
+
result.trace.info(
|
526
486
|
f"[WORKFLOW]: Start thread: '{task.alias}|"
|
527
487
|
f"{release.date:%Y%m%d%H%M}'"
|
528
488
|
)
|
@@ -532,7 +492,7 @@ def schedule_task(
|
|
532
492
|
thread_name: str = f"{task.alias}|{release.date:%Y%m%d%H%M}"
|
533
493
|
thread: Thread = Thread(
|
534
494
|
target=catch_exceptions(cancel_on_failure=True)(task.release),
|
535
|
-
kwargs={"release": release, "queue": q, "
|
495
|
+
kwargs={"release": release, "queue": q, "audit": audit},
|
536
496
|
name=thread_name,
|
537
497
|
daemon=True,
|
538
498
|
)
|
@@ -540,13 +500,20 @@ def schedule_task(
|
|
540
500
|
threads[thread_name] = {
|
541
501
|
"thread": thread,
|
542
502
|
"start_date": datetime.now(tz=config.tz),
|
503
|
+
"release_date": release.date,
|
543
504
|
}
|
544
505
|
|
545
506
|
thread.start()
|
546
507
|
|
547
508
|
delay()
|
548
509
|
|
549
|
-
|
510
|
+
result.trace.debug(
|
511
|
+
f"[SCHEDULE]: End schedule task at {current_date:%Y-%m-%d %H:%M:%S} "
|
512
|
+
f"{'=' * 80}"
|
513
|
+
)
|
514
|
+
return result.catch(
|
515
|
+
status=Status.SUCCESS, context={"task_date": current_date}
|
516
|
+
)
|
550
517
|
|
551
518
|
|
552
519
|
def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
|
@@ -559,69 +526,44 @@ def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
|
|
559
526
|
logger.debug("[MONITOR]: Start checking long running schedule task.")
|
560
527
|
|
561
528
|
snapshot_threads: list[str] = list(threads.keys())
|
562
|
-
for
|
529
|
+
for thread_name in snapshot_threads:
|
563
530
|
|
564
|
-
thread_release: ReleaseThread = threads[
|
531
|
+
thread_release: ReleaseThread = threads[thread_name]
|
565
532
|
|
566
533
|
# NOTE: remove the thread that running success.
|
567
|
-
|
568
|
-
|
534
|
+
thread = thread_release["thread"]
|
535
|
+
if thread and (not thread_release["thread"].is_alive()):
|
536
|
+
thread_release["thread"] = None
|
569
537
|
|
570
538
|
|
571
|
-
def
|
572
|
-
|
573
|
-
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
the background.
|
539
|
+
def scheduler_pending(
|
540
|
+
tasks: list[WorkflowTask],
|
541
|
+
stop_date,
|
542
|
+
queue,
|
543
|
+
threads,
|
544
|
+
result: Result,
|
545
|
+
audit: type[Audit],
|
546
|
+
) -> Result: # pragma: no cov
|
547
|
+
"""
|
581
548
|
|
582
|
-
:param
|
583
|
-
:param
|
584
|
-
:param
|
585
|
-
:param
|
586
|
-
|
549
|
+
:param tasks:
|
550
|
+
:param stop_date:
|
551
|
+
:param queue:
|
552
|
+
:param threads:
|
553
|
+
:param result:
|
554
|
+
:param audit:
|
587
555
|
|
588
|
-
:rtype:
|
556
|
+
:rtype: Result
|
589
557
|
"""
|
590
|
-
# NOTE: Lazy import Scheduler object from the schedule package.
|
591
558
|
try:
|
592
559
|
from schedule import Scheduler
|
593
560
|
except ImportError:
|
594
561
|
raise ImportError(
|
595
|
-
"Should install schedule package before use this
|
562
|
+
"Should install schedule package before use this method."
|
596
563
|
) from None
|
597
564
|
|
598
|
-
# NOTE: Get default logging.
|
599
|
-
log: type[Log] = log or get_log()
|
600
565
|
scheduler: Scheduler = Scheduler()
|
601
566
|
|
602
|
-
# NOTE: Create the start and stop datetime.
|
603
|
-
start_date: datetime = datetime.now(tz=config.tz)
|
604
|
-
stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
|
605
|
-
|
606
|
-
# IMPORTANT: Create main mapping of queue and thread object.
|
607
|
-
queue: dict[str, ReleaseQueue] = {}
|
608
|
-
threads: ReleaseThreads = {}
|
609
|
-
|
610
|
-
start_date_waiting: datetime = start_date.replace(
|
611
|
-
second=0, microsecond=0
|
612
|
-
) + timedelta(minutes=1)
|
613
|
-
|
614
|
-
tasks: list[WorkflowTask] = []
|
615
|
-
for name in schedules:
|
616
|
-
schedule: Schedule = Schedule.from_loader(name, externals=externals)
|
617
|
-
tasks.extend(
|
618
|
-
schedule.tasks(
|
619
|
-
start_date_waiting,
|
620
|
-
queue=queue,
|
621
|
-
externals=externals,
|
622
|
-
),
|
623
|
-
)
|
624
|
-
|
625
567
|
# NOTE: This schedule job will start every minute at :02 seconds.
|
626
568
|
(
|
627
569
|
scheduler.every(1)
|
@@ -632,7 +574,8 @@ def schedule_control(
|
|
632
574
|
stop=stop_date,
|
633
575
|
queue=queue,
|
634
576
|
threads=threads,
|
635
|
-
|
577
|
+
audit=audit,
|
578
|
+
parent_run_id=result.parent_run_id,
|
636
579
|
)
|
637
580
|
.tag("control")
|
638
581
|
)
|
@@ -650,9 +593,8 @@ def schedule_control(
|
|
650
593
|
)
|
651
594
|
|
652
595
|
# NOTE: Start running schedule
|
653
|
-
|
654
|
-
f"[SCHEDULE]: Schedule
|
655
|
-
f"{stop_date:%Y-%m-%d %H:%M:%S}"
|
596
|
+
result.trace.info(
|
597
|
+
f"[SCHEDULE]: Schedule with stopper: {stop_date:%Y-%m-%d %H:%M:%S}"
|
656
598
|
)
|
657
599
|
|
658
600
|
while True:
|
@@ -663,8 +605,8 @@ def schedule_control(
|
|
663
605
|
if not scheduler.get_jobs("control"):
|
664
606
|
scheduler.clear("monitor")
|
665
607
|
|
666
|
-
while len(threads) > 0:
|
667
|
-
|
608
|
+
while len([t for t in threads.values() if t["thread"]]) > 0:
|
609
|
+
result.trace.warning(
|
668
610
|
"[SCHEDULE]: Waiting schedule release thread that still "
|
669
611
|
"running in background."
|
670
612
|
)
|
@@ -673,17 +615,87 @@ def schedule_control(
|
|
673
615
|
|
674
616
|
break
|
675
617
|
|
676
|
-
|
618
|
+
result.trace.warning(
|
677
619
|
f"[SCHEDULE]: Queue: {[list(queue[wf].queue) for wf in queue]}"
|
678
620
|
)
|
679
|
-
return
|
621
|
+
return result.catch(
|
622
|
+
status=Status.SUCCESS,
|
623
|
+
context={
|
624
|
+
"threads": [
|
625
|
+
{
|
626
|
+
"name": thread,
|
627
|
+
"start_date": threads[thread]["start_date"],
|
628
|
+
"release_date": threads[thread]["release_date"],
|
629
|
+
}
|
630
|
+
for thread in threads
|
631
|
+
],
|
632
|
+
},
|
633
|
+
)
|
634
|
+
|
635
|
+
|
636
|
+
def schedule_control(
|
637
|
+
schedules: list[str],
|
638
|
+
stop: datetime | None = None,
|
639
|
+
externals: DictData | None = None,
|
640
|
+
*,
|
641
|
+
audit: type[Audit] | None = None,
|
642
|
+
parent_run_id: str | None = None,
|
643
|
+
) -> Result: # pragma: no cov
|
644
|
+
"""Scheduler control function that run the chuck of schedules every minute
|
645
|
+
and this function release monitoring thread for tracking undead thread in
|
646
|
+
the background.
|
647
|
+
|
648
|
+
:param schedules: A list of workflow names that want to schedule running.
|
649
|
+
:param stop: A datetime value that use to stop running schedule.
|
650
|
+
:param externals: An external parameters that pass to Loader.
|
651
|
+
:param audit: An audit class that use on the workflow task release for
|
652
|
+
writing its release audit context.
|
653
|
+
:param parent_run_id: A parent workflow running ID for this release.
|
654
|
+
|
655
|
+
:rtype: Result
|
656
|
+
"""
|
657
|
+
audit: type[Audit] = audit or get_audit()
|
658
|
+
result: Result = Result().set_parent_run_id(parent_run_id)
|
659
|
+
|
660
|
+
# NOTE: Create the start and stop datetime.
|
661
|
+
start_date: datetime = datetime.now(tz=config.tz)
|
662
|
+
stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
|
663
|
+
|
664
|
+
# IMPORTANT: Create main mapping of queue and thread object.
|
665
|
+
queue: dict[str, ReleaseQueue] = {}
|
666
|
+
threads: ReleaseThreads = {}
|
667
|
+
|
668
|
+
start_date_waiting: datetime = start_date.replace(
|
669
|
+
second=0, microsecond=0
|
670
|
+
) + timedelta(minutes=1)
|
671
|
+
|
672
|
+
tasks: list[WorkflowTask] = []
|
673
|
+
for name in schedules:
|
674
|
+
tasks.extend(
|
675
|
+
Schedule.from_loader(name, externals=externals).tasks(
|
676
|
+
start_date_waiting,
|
677
|
+
queue=queue,
|
678
|
+
externals=externals,
|
679
|
+
),
|
680
|
+
)
|
681
|
+
|
682
|
+
scheduler_pending(
|
683
|
+
tasks=tasks,
|
684
|
+
stop_date=stop_date,
|
685
|
+
queue=queue,
|
686
|
+
threads=threads,
|
687
|
+
result=result,
|
688
|
+
audit=audit,
|
689
|
+
)
|
690
|
+
|
691
|
+
return result.catch(status=Status.SUCCESS, context={"schedules": schedules})
|
680
692
|
|
681
693
|
|
682
694
|
def schedule_runner(
|
683
695
|
stop: datetime | None = None,
|
684
696
|
externals: DictData | None = None,
|
685
697
|
excluded: list[str] | None = None,
|
686
|
-
) ->
|
698
|
+
) -> Result: # pragma: no cov
|
687
699
|
"""Schedule runner function it the multiprocess controller function for
|
688
700
|
split the setting schedule to the `schedule_control` function on the
|
689
701
|
process pool. It chunks schedule configs that exists in config
|
@@ -706,9 +718,10 @@ def schedule_runner(
|
|
706
718
|
--> thread of release task 02 02
|
707
719
|
==> process 02 ==> ...
|
708
720
|
|
709
|
-
:rtype:
|
721
|
+
:rtype: Result
|
710
722
|
"""
|
711
|
-
|
723
|
+
result: Result = Result()
|
724
|
+
context: DictData = {"schedules": [], "threads": []}
|
712
725
|
|
713
726
|
with ProcessPoolExecutor(
|
714
727
|
max_workers=config.max_schedule_process,
|
@@ -720,6 +733,7 @@ def schedule_runner(
|
|
720
733
|
schedules=[load[0] for load in loader],
|
721
734
|
stop=stop,
|
722
735
|
externals=(externals or {}),
|
736
|
+
parent_run_id=result.parent_run_id,
|
723
737
|
)
|
724
738
|
for loader in batch(
|
725
739
|
Loader.finds(Schedule, excluded=excluded),
|
@@ -734,6 +748,8 @@ def schedule_runner(
|
|
734
748
|
logger.error(str(err))
|
735
749
|
raise WorkflowException(str(err)) from err
|
736
750
|
|
737
|
-
|
751
|
+
rs: Result = future.result(timeout=1)
|
752
|
+
context["schedule"].extend(rs.context.get("schedules", []))
|
753
|
+
context["threads"].extend(rs.context.get("threads", []))
|
738
754
|
|
739
|
-
return
|
755
|
+
return result.catch(status=0, context=context)
|