ddeutil-workflow 0.0.58__tar.gz → 0.0.59__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/PKG-INFO +6 -3
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/pyproject.toml +5 -2
- ddeutil_workflow-0.0.59/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/__cron.py +3 -3
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/conf.py +6 -4
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/event.py +2 -2
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/exceptions.py +3 -3
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/job.py +35 -34
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/logs.py +78 -51
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/params.py +9 -5
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/result.py +18 -18
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/reusables.py +9 -9
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/scheduler.py +8 -8
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/stages.py +70 -70
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/utils.py +6 -6
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/workflow.py +31 -31
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil_workflow.egg-info/PKG-INFO +6 -3
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil_workflow.egg-info/requires.txt +5 -2
- ddeutil_workflow-0.0.58/src/ddeutil/workflow/__about__.py +0 -1
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/LICENSE +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/README.md +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/__main__.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/logs.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/routes/job.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/routes/logs.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/routes/schedules.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil/workflow/api/utils.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_event.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_job_exec.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_logs_audit.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_logs_trace.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_release.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_release_queue.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_reusables_call_tag.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_reusables_template.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_reusables_template_filter.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_schedule.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_schedule_pending.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_schedule_tasks.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_schedule_workflow.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_scheduler_control.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_stage_handler_exec.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_strategy.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_workflow_exec_job.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_workflow_poke.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_workflow_release.py +0 -0
- {ddeutil_workflow-0.0.58 → ddeutil_workflow-0.0.59}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.59
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -23,17 +23,20 @@ Requires-Python: >=3.9.13
|
|
23
23
|
Description-Content-Type: text/markdown
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil[checksum]>=0.4.8
|
26
|
-
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.
|
27
|
-
Requires-Dist: pydantic==2.11.
|
26
|
+
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.13
|
27
|
+
Requires-Dist: pydantic==2.11.4
|
28
28
|
Requires-Dist: python-dotenv==1.1.0
|
29
29
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
30
30
|
Provides-Extra: all
|
31
31
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
|
32
|
+
Requires-Dist: uvicorn; extra == "all"
|
32
33
|
Requires-Dist: httpx; extra == "all"
|
34
|
+
Requires-Dist: ujson; extra == "all"
|
33
35
|
Requires-Dist: aiofiles; extra == "all"
|
34
36
|
Requires-Dist: aiohttp; extra == "all"
|
35
37
|
Provides-Extra: api
|
36
38
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
|
39
|
+
Requires-Dist: uvicorn; extra == "api"
|
37
40
|
Requires-Dist: httpx; extra == "api"
|
38
41
|
Requires-Dist: ujson; extra == "api"
|
39
42
|
Provides-Extra: async
|
@@ -26,8 +26,8 @@ classifiers = [
|
|
26
26
|
requires-python = ">=3.9.13"
|
27
27
|
dependencies = [
|
28
28
|
"ddeutil[checksum]>=0.4.8",
|
29
|
-
"ddeutil-io[yaml,toml]>=0.2.
|
30
|
-
"pydantic==2.11.
|
29
|
+
"ddeutil-io[yaml,toml]>=0.2.13",
|
30
|
+
"pydantic==2.11.4",
|
31
31
|
"python-dotenv==1.1.0",
|
32
32
|
"schedule==1.2.2,<2.0.0",
|
33
33
|
]
|
@@ -36,12 +36,15 @@ dynamic = ["version"]
|
|
36
36
|
[project.optional-dependencies]
|
37
37
|
all = [
|
38
38
|
"fastapi>=0.115.0,<1.0.0",
|
39
|
+
"uvicorn",
|
39
40
|
"httpx",
|
41
|
+
"ujson",
|
40
42
|
"aiofiles",
|
41
43
|
"aiohttp",
|
42
44
|
]
|
43
45
|
api = [
|
44
46
|
"fastapi>=0.115.0,<1.0.0",
|
47
|
+
"uvicorn",
|
45
48
|
"httpx",
|
46
49
|
"ujson",
|
47
50
|
]
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.59"
|
@@ -699,9 +699,9 @@ class CronJob:
|
|
699
699
|
|
700
700
|
def schedule(
|
701
701
|
self,
|
702
|
-
date: datetime
|
702
|
+
date: Optional[datetime] = None,
|
703
703
|
*,
|
704
|
-
tz: str
|
704
|
+
tz: Optional[str] = None,
|
705
705
|
) -> CronRunner:
|
706
706
|
"""Returns CronRunner instance that be datetime runner with this
|
707
707
|
cronjob. It can use `next`, `prev`, or `reset` methods to generate
|
@@ -766,7 +766,7 @@ class CronRunner:
|
|
766
766
|
def __init__(
|
767
767
|
self,
|
768
768
|
cron: CronJob | CronJobYear,
|
769
|
-
date: datetime
|
769
|
+
date: Optional[datetime] = None,
|
770
770
|
*,
|
771
771
|
tz: str | ZoneInfo | None = None,
|
772
772
|
) -> None:
|
@@ -5,6 +5,7 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
import copy
|
8
9
|
import json
|
9
10
|
import os
|
10
11
|
from abc import ABC, abstractmethod
|
@@ -26,13 +27,13 @@ T = TypeVar("T")
|
|
26
27
|
PREFIX: Final[str] = "WORKFLOW"
|
27
28
|
|
28
29
|
|
29
|
-
def env(var: str, default: str
|
30
|
+
def env(var: str, default: Optional[str] = None) -> Optional[str]:
|
30
31
|
"""Get environment variable with uppercase and adding prefix string.
|
31
32
|
|
32
33
|
:param var: (str) A env variable name.
|
33
|
-
:param default: (str
|
34
|
+
:param default: (Optional[str]) A default value if an env var does not set.
|
34
35
|
|
35
|
-
:rtype: str
|
36
|
+
:rtype: Optional[str]
|
36
37
|
"""
|
37
38
|
return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
|
38
39
|
|
@@ -298,6 +299,7 @@ class FileLoad(BaseLoad):
|
|
298
299
|
f"Multi-config paths does not support for type: {type(paths)}"
|
299
300
|
)
|
300
301
|
else:
|
302
|
+
paths: list[Path] = copy.deepcopy(paths)
|
301
303
|
paths.append(path)
|
302
304
|
|
303
305
|
all_data: list[tuple[float, DictData]] = []
|
@@ -398,7 +400,7 @@ class FileLoad(BaseLoad):
|
|
398
400
|
return is_ignored(file, read_ignore(path / ignore_filename))
|
399
401
|
|
400
402
|
@classmethod
|
401
|
-
def filter_yaml(cls, file: Path, name: str
|
403
|
+
def filter_yaml(cls, file: Path, name: Optional[str] = None) -> DictData:
|
402
404
|
"""Read a YAML file context from an input file path and specific name.
|
403
405
|
|
404
406
|
:param file: (Path) A file path that want to extract YAML context.
|
@@ -10,7 +10,7 @@ from __future__ import annotations
|
|
10
10
|
|
11
11
|
from dataclasses import fields
|
12
12
|
from datetime import datetime
|
13
|
-
from typing import Annotated, Any, Literal, Union
|
13
|
+
from typing import Annotated, Any, Literal, Optional, Union
|
14
14
|
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
15
15
|
|
16
16
|
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
|
@@ -28,7 +28,7 @@ Interval = Literal["daily", "weekly", "monthly"]
|
|
28
28
|
def interval2crontab(
|
29
29
|
interval: Interval,
|
30
30
|
*,
|
31
|
-
day: str
|
31
|
+
day: Optional[str] = None,
|
32
32
|
time: str = "00:00",
|
33
33
|
) -> str:
|
34
34
|
"""Return the crontab string that was generated from specific values.
|
@@ -9,7 +9,7 @@ annotate for handle error only.
|
|
9
9
|
"""
|
10
10
|
from __future__ import annotations
|
11
11
|
|
12
|
-
from typing import Literal, TypedDict, overload
|
12
|
+
from typing import Literal, Optional, TypedDict, overload
|
13
13
|
|
14
14
|
|
15
15
|
class ErrorData(TypedDict):
|
@@ -39,9 +39,9 @@ class BaseWorkflowException(Exception):
|
|
39
39
|
making an error context to the result context.
|
40
40
|
"""
|
41
41
|
|
42
|
-
def __init__(self, message: str, *, refs: str
|
42
|
+
def __init__(self, message: str, *, refs: Optional[str] = None):
|
43
43
|
super().__init__(message)
|
44
|
-
self.refs: str
|
44
|
+
self.refs: Optional[str] = refs
|
45
45
|
|
46
46
|
@overload
|
47
47
|
def to_dict(
|
@@ -19,6 +19,7 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
import copy
|
21
21
|
import time
|
22
|
+
from collections.abc import Iterator
|
22
23
|
from concurrent.futures import (
|
23
24
|
FIRST_EXCEPTION,
|
24
25
|
CancelledError,
|
@@ -67,8 +68,8 @@ def make(
|
|
67
68
|
|
68
69
|
:param matrix: (Matrix) A matrix values that want to cross product to
|
69
70
|
possible parallelism values.
|
70
|
-
:param include:
|
71
|
-
:param exclude:
|
71
|
+
:param include: A list of additional matrix that want to adds-in.
|
72
|
+
:param exclude: A list of exclude matrix that want to filter-out.
|
72
73
|
|
73
74
|
:rtype: list[DictStr]
|
74
75
|
"""
|
@@ -191,22 +192,22 @@ class Strategy(BaseModel):
|
|
191
192
|
class Rule(str, Enum):
|
192
193
|
"""Rule enum object for assign trigger option."""
|
193
194
|
|
194
|
-
ALL_SUCCESS
|
195
|
-
ALL_FAILED
|
196
|
-
ALL_DONE
|
197
|
-
ONE_FAILED
|
198
|
-
ONE_SUCCESS
|
199
|
-
NONE_FAILED
|
200
|
-
NONE_SKIPPED
|
195
|
+
ALL_SUCCESS = "all_success"
|
196
|
+
ALL_FAILED = "all_failed"
|
197
|
+
ALL_DONE = "all_done"
|
198
|
+
ONE_FAILED = "one_failed"
|
199
|
+
ONE_SUCCESS = "one_success"
|
200
|
+
NONE_FAILED = "none_failed"
|
201
|
+
NONE_SKIPPED = "none_skipped"
|
201
202
|
|
202
203
|
|
203
204
|
class RunsOn(str, Enum):
|
204
205
|
"""Runs-On enum object."""
|
205
206
|
|
206
|
-
LOCAL
|
207
|
-
SELF_HOSTED
|
208
|
-
AZ_BATCH
|
209
|
-
DOCKER
|
207
|
+
LOCAL = "local"
|
208
|
+
SELF_HOSTED = "self_hosted"
|
209
|
+
AZ_BATCH = "azure_batch"
|
210
|
+
DOCKER = "docker"
|
210
211
|
|
211
212
|
|
212
213
|
class BaseRunsOn(BaseModel): # pragma: no cov
|
@@ -566,7 +567,7 @@ class Job(BaseModel):
|
|
566
567
|
:param output: (DictData) A result data context that want to extract
|
567
568
|
and transfer to the `strategies` key in receive context.
|
568
569
|
:param to: (DictData) A received context data.
|
569
|
-
:param job_id: (str
|
570
|
+
:param job_id: (Optional[str]) A job ID if the `id` field does not set.
|
570
571
|
|
571
572
|
:rtype: DictData
|
572
573
|
"""
|
@@ -606,9 +607,9 @@ class Job(BaseModel):
|
|
606
607
|
self,
|
607
608
|
params: DictData,
|
608
609
|
*,
|
609
|
-
run_id: str
|
610
|
-
parent_run_id: str
|
611
|
-
event: Event
|
610
|
+
run_id: Optional[str] = None,
|
611
|
+
parent_run_id: Optional[str] = None,
|
612
|
+
event: Optional[Event] = None,
|
612
613
|
) -> Result:
|
613
614
|
"""Job execution with passing dynamic parameters from the workflow
|
614
615
|
execution. It will generate matrix values at the first step and run
|
@@ -676,8 +677,8 @@ def local_execute_strategy(
|
|
676
677
|
strategy: DictData,
|
677
678
|
params: DictData,
|
678
679
|
*,
|
679
|
-
result: Result
|
680
|
-
event: Event
|
680
|
+
result: Optional[Result] = None,
|
681
|
+
event: Optional[Event] = None,
|
681
682
|
) -> Result:
|
682
683
|
"""Local strategy execution with passing dynamic parameters from the
|
683
684
|
job execution and strategy matrix.
|
@@ -799,9 +800,9 @@ def local_execute(
|
|
799
800
|
job: Job,
|
800
801
|
params: DictData,
|
801
802
|
*,
|
802
|
-
run_id: str
|
803
|
-
parent_run_id: str
|
804
|
-
event: Event
|
803
|
+
run_id: Optional[str] = None,
|
804
|
+
parent_run_id: Optional[str] = None,
|
805
|
+
event: Optional[Event] = None,
|
805
806
|
) -> Result:
|
806
807
|
"""Local job execution with passing dynamic parameters from the workflow
|
807
808
|
execution or directly. It will generate matrix values at the first
|
@@ -874,10 +875,10 @@ def local_execute(
|
|
874
875
|
status: Status = SUCCESS
|
875
876
|
|
876
877
|
if not fail_fast_flag:
|
877
|
-
done:
|
878
|
+
done: Iterator[Future] = as_completed(futures)
|
878
879
|
else:
|
879
880
|
done, not_done = wait(futures, return_when=FIRST_EXCEPTION)
|
880
|
-
if len(done) != len(futures):
|
881
|
+
if len(list(done)) != len(futures):
|
881
882
|
result.trace.warning(
|
882
883
|
"[JOB]: Handler Fail-Fast: Got exception and set event."
|
883
884
|
)
|
@@ -895,7 +896,7 @@ def local_execute(
|
|
895
896
|
else ""
|
896
897
|
)
|
897
898
|
result.trace.debug(f"[JOB]: ... Job was set Fail-Fast{nd}")
|
898
|
-
done:
|
899
|
+
done: Iterator[Future] = as_completed(futures)
|
899
900
|
|
900
901
|
for future in done:
|
901
902
|
try:
|
@@ -918,9 +919,9 @@ def self_hosted_execute(
|
|
918
919
|
job: Job,
|
919
920
|
params: DictData,
|
920
921
|
*,
|
921
|
-
run_id: str
|
922
|
-
parent_run_id: str
|
923
|
-
event: Event
|
922
|
+
run_id: Optional[str] = None,
|
923
|
+
parent_run_id: Optional[str] = None,
|
924
|
+
event: Optional[Event] = None,
|
924
925
|
) -> Result: # pragma: no cov
|
925
926
|
"""Self-Hosted job execution with passing dynamic parameters from the
|
926
927
|
workflow execution or itself execution. It will make request to the
|
@@ -981,9 +982,9 @@ def azure_batch_execute(
|
|
981
982
|
job: Job,
|
982
983
|
params: DictData,
|
983
984
|
*,
|
984
|
-
run_id: str
|
985
|
-
parent_run_id: str
|
986
|
-
event: Event
|
985
|
+
run_id: Optional[str] = None,
|
986
|
+
parent_run_id: Optional[str] = None,
|
987
|
+
event: Optional[Event] = None,
|
987
988
|
) -> Result: # pragma: no cov
|
988
989
|
"""Azure Batch job execution that will run all job's stages on the Azure
|
989
990
|
Batch Node and extract the result file to be returning context result.
|
@@ -1035,9 +1036,9 @@ def docker_execution(
|
|
1035
1036
|
job: Job,
|
1036
1037
|
params: DictData,
|
1037
1038
|
*,
|
1038
|
-
run_id: str
|
1039
|
-
parent_run_id: str
|
1040
|
-
event: Event
|
1039
|
+
run_id: Optional[str] = None,
|
1040
|
+
parent_run_id: Optional[str] = None,
|
1041
|
+
event: Optional[Event] = None,
|
1041
1042
|
): # pragma: no cov
|
1042
1043
|
"""Docker job execution.
|
1043
1044
|
|