fractal-server 2.11.1__py3-none-any.whl → 2.12.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/__init__.py +0 -2
- fractal_server/app/models/linkuserproject.py +0 -9
- fractal_server/app/routes/aux/_job.py +1 -3
- fractal_server/app/runner/executors/slurm/ssh/executor.py +9 -6
- fractal_server/app/runner/executors/slurm/sudo/executor.py +1 -5
- fractal_server/app/runner/filenames.py +0 -2
- fractal_server/app/runner/shutdown.py +3 -27
- fractal_server/app/schemas/_validators.py +0 -19
- fractal_server/config.py +1 -15
- fractal_server/main.py +1 -12
- fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +67 -0
- fractal_server/string_tools.py +0 -21
- fractal_server/tasks/utils.py +0 -28
- {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/METADATA +1 -1
- {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/RECORD +19 -63
- fractal_server/app/models/v1/__init__.py +0 -13
- fractal_server/app/models/v1/dataset.py +0 -71
- fractal_server/app/models/v1/job.py +0 -101
- fractal_server/app/models/v1/project.py +0 -29
- fractal_server/app/models/v1/state.py +0 -34
- fractal_server/app/models/v1/task.py +0 -85
- fractal_server/app/models/v1/workflow.py +0 -133
- fractal_server/app/routes/admin/v1.py +0 -377
- fractal_server/app/routes/api/v1/__init__.py +0 -26
- fractal_server/app/routes/api/v1/_aux_functions.py +0 -478
- fractal_server/app/routes/api/v1/dataset.py +0 -554
- fractal_server/app/routes/api/v1/job.py +0 -195
- fractal_server/app/routes/api/v1/project.py +0 -475
- fractal_server/app/routes/api/v1/task.py +0 -203
- fractal_server/app/routes/api/v1/task_collection.py +0 -239
- fractal_server/app/routes/api/v1/workflow.py +0 -355
- fractal_server/app/routes/api/v1/workflowtask.py +0 -187
- fractal_server/app/runner/async_wrap_v1.py +0 -27
- fractal_server/app/runner/v1/__init__.py +0 -415
- fractal_server/app/runner/v1/_common.py +0 -620
- fractal_server/app/runner/v1/_local/__init__.py +0 -186
- fractal_server/app/runner/v1/_local/_local_config.py +0 -105
- fractal_server/app/runner/v1/_local/_submit_setup.py +0 -48
- fractal_server/app/runner/v1/_local/executor.py +0 -100
- fractal_server/app/runner/v1/_slurm/__init__.py +0 -312
- fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -81
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -163
- fractal_server/app/runner/v1/common.py +0 -117
- fractal_server/app/runner/v1/handle_failed_job.py +0 -141
- fractal_server/app/schemas/v1/__init__.py +0 -37
- fractal_server/app/schemas/v1/applyworkflow.py +0 -161
- fractal_server/app/schemas/v1/dataset.py +0 -165
- fractal_server/app/schemas/v1/dumps.py +0 -64
- fractal_server/app/schemas/v1/manifest.py +0 -126
- fractal_server/app/schemas/v1/project.py +0 -66
- fractal_server/app/schemas/v1/state.py +0 -18
- fractal_server/app/schemas/v1/task.py +0 -167
- fractal_server/app/schemas/v1/task_collection.py +0 -110
- fractal_server/app/schemas/v1/workflow.py +0 -212
- fractal_server/tasks/v1/_TaskCollectPip.py +0 -103
- fractal_server/tasks/v1/__init__.py +0 -0
- fractal_server/tasks/v1/background_operations.py +0 -352
- fractal_server/tasks/v1/endpoint_operations.py +0 -156
- fractal_server/tasks/v1/get_collection_data.py +0 -14
- fractal_server/tasks/v1/utils.py +0 -67
- {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/LICENSE +0 -0
- {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/WHEEL +0 -0
- {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a1.dist-info}/entry_points.txt +0 -0
@@ -1,71 +0,0 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Any
|
3
|
-
from typing import Optional
|
4
|
-
|
5
|
-
from sqlalchemy import Column
|
6
|
-
from sqlalchemy.ext.orderinglist import ordering_list
|
7
|
-
from sqlalchemy.types import DateTime
|
8
|
-
from sqlalchemy.types import JSON
|
9
|
-
from sqlmodel import Field
|
10
|
-
from sqlmodel import Relationship
|
11
|
-
from sqlmodel import SQLModel
|
12
|
-
|
13
|
-
from ....utils import get_timestamp
|
14
|
-
from ...schemas.v1.dataset import _DatasetBaseV1
|
15
|
-
from ...schemas.v1.dataset import _ResourceBaseV1
|
16
|
-
|
17
|
-
|
18
|
-
class Resource(_ResourceBaseV1, SQLModel, table=True):
|
19
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
20
|
-
dataset_id: int = Field(foreign_key="dataset.id")
|
21
|
-
|
22
|
-
|
23
|
-
class Dataset(_DatasetBaseV1, SQLModel, table=True):
|
24
|
-
"""
|
25
|
-
Represent a dataset
|
26
|
-
|
27
|
-
Attributes:
|
28
|
-
id:
|
29
|
-
Primary key
|
30
|
-
project_id:
|
31
|
-
ID of the project the workflow belongs to.
|
32
|
-
meta:
|
33
|
-
Metadata of the Dataset
|
34
|
-
history:
|
35
|
-
History of the Dataset
|
36
|
-
resource_list:
|
37
|
-
(Mapper attribute)
|
38
|
-
|
39
|
-
"""
|
40
|
-
|
41
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
42
|
-
project_id: int = Field(foreign_key="project.id")
|
43
|
-
project: "Project" = Relationship( # noqa: F821
|
44
|
-
sa_relationship_kwargs=dict(lazy="selectin"),
|
45
|
-
)
|
46
|
-
|
47
|
-
resource_list: list[Resource] = Relationship(
|
48
|
-
sa_relationship_kwargs={
|
49
|
-
"lazy": "selectin",
|
50
|
-
"order_by": "Resource.id",
|
51
|
-
"collection_class": ordering_list("id"),
|
52
|
-
"cascade": "all, delete-orphan",
|
53
|
-
}
|
54
|
-
)
|
55
|
-
|
56
|
-
meta: dict[str, Any] = Field(sa_column=Column(JSON), default={})
|
57
|
-
history: list[dict[str, Any]] = Field(
|
58
|
-
sa_column=Column(JSON, server_default="[]", nullable=False)
|
59
|
-
)
|
60
|
-
|
61
|
-
timestamp_created: datetime = Field(
|
62
|
-
default_factory=get_timestamp,
|
63
|
-
sa_column=Column(DateTime(timezone=True), nullable=False),
|
64
|
-
)
|
65
|
-
|
66
|
-
class Config:
|
67
|
-
arbitrary_types_allowed = True
|
68
|
-
|
69
|
-
@property
|
70
|
-
def paths(self) -> list[str]:
|
71
|
-
return [r.path for r in self.resource_list]
|
@@ -1,101 +0,0 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Any
|
3
|
-
from typing import Optional
|
4
|
-
|
5
|
-
from sqlalchemy import Column
|
6
|
-
from sqlalchemy.types import DateTime
|
7
|
-
from sqlalchemy.types import JSON
|
8
|
-
from sqlmodel import Field
|
9
|
-
from sqlmodel import SQLModel
|
10
|
-
|
11
|
-
from ....utils import get_timestamp
|
12
|
-
from ...schemas.v1 import JobStatusTypeV1
|
13
|
-
from ...schemas.v1.applyworkflow import _ApplyWorkflowBaseV1
|
14
|
-
|
15
|
-
|
16
|
-
class ApplyWorkflow(_ApplyWorkflowBaseV1, SQLModel, table=True):
|
17
|
-
"""
|
18
|
-
Represent a workflow run
|
19
|
-
|
20
|
-
This table is responsible for storing the state of a workflow execution in
|
21
|
-
the database.
|
22
|
-
|
23
|
-
Attributes:
|
24
|
-
id:
|
25
|
-
Primary key.
|
26
|
-
project_id:
|
27
|
-
ID of the project the workflow belongs to, or `None` if the project
|
28
|
-
was deleted.
|
29
|
-
input_dataset_id:
|
30
|
-
ID of the input dataset, or `None` if the dataset was deleted.
|
31
|
-
output_dataset_id:
|
32
|
-
ID of the output dataset, or `None` if the dataset was deleted.
|
33
|
-
workflow_id:
|
34
|
-
ID of the workflow being applied, or `None` if the workflow was
|
35
|
-
deleted.
|
36
|
-
status:
|
37
|
-
Job status
|
38
|
-
workflow_dump:
|
39
|
-
Copy of the submitted workflow at submission.
|
40
|
-
input_dataset_dump:
|
41
|
-
Copy of the input_dataset at submission.
|
42
|
-
output_dataset_dump:
|
43
|
-
Copy of the output_dataset at submission.
|
44
|
-
start_timestamp:
|
45
|
-
Timestamp of when the run began.
|
46
|
-
end_timestamp:
|
47
|
-
Timestamp of when the run ended or failed.
|
48
|
-
status:
|
49
|
-
Status of the run.
|
50
|
-
log:
|
51
|
-
Forward of the workflow logs.
|
52
|
-
user_email:
|
53
|
-
Email address of the user who submitted the job.
|
54
|
-
slurm_account:
|
55
|
-
Account to be used when submitting the job to SLURM (see "account"
|
56
|
-
option in [`sbatch`
|
57
|
-
documentation](https://slurm.schedmd.com/sbatch.html#SECTION_OPTIONS)).
|
58
|
-
first_task_index:
|
59
|
-
last_task_index:
|
60
|
-
"""
|
61
|
-
|
62
|
-
class Config:
|
63
|
-
arbitrary_types_allowed = True
|
64
|
-
|
65
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
66
|
-
|
67
|
-
project_id: Optional[int] = Field(foreign_key="project.id")
|
68
|
-
workflow_id: Optional[int] = Field(foreign_key="workflow.id")
|
69
|
-
input_dataset_id: Optional[int] = Field(foreign_key="dataset.id")
|
70
|
-
output_dataset_id: Optional[int] = Field(foreign_key="dataset.id")
|
71
|
-
|
72
|
-
user_email: str = Field(nullable=False)
|
73
|
-
slurm_account: Optional[str]
|
74
|
-
|
75
|
-
input_dataset_dump: dict[str, Any] = Field(
|
76
|
-
sa_column=Column(JSON, nullable=False)
|
77
|
-
)
|
78
|
-
output_dataset_dump: dict[str, Any] = Field(
|
79
|
-
sa_column=Column(JSON, nullable=False)
|
80
|
-
)
|
81
|
-
workflow_dump: dict[str, Any] = Field(
|
82
|
-
sa_column=Column(JSON, nullable=False)
|
83
|
-
)
|
84
|
-
project_dump: dict[str, Any] = Field(
|
85
|
-
sa_column=Column(JSON, nullable=False)
|
86
|
-
)
|
87
|
-
|
88
|
-
working_dir: Optional[str]
|
89
|
-
working_dir_user: Optional[str]
|
90
|
-
first_task_index: int
|
91
|
-
last_task_index: int
|
92
|
-
|
93
|
-
start_timestamp: datetime = Field(
|
94
|
-
default_factory=get_timestamp,
|
95
|
-
sa_column=Column(DateTime(timezone=True), nullable=False),
|
96
|
-
)
|
97
|
-
end_timestamp: Optional[datetime] = Field(
|
98
|
-
default=None, sa_column=Column(DateTime(timezone=True))
|
99
|
-
)
|
100
|
-
status: str = JobStatusTypeV1.SUBMITTED
|
101
|
-
log: Optional[str] = None
|
@@ -1,29 +0,0 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Optional
|
3
|
-
|
4
|
-
from sqlalchemy import Column
|
5
|
-
from sqlalchemy.types import DateTime
|
6
|
-
from sqlmodel import Field
|
7
|
-
from sqlmodel import Relationship
|
8
|
-
from sqlmodel import SQLModel
|
9
|
-
|
10
|
-
from . import LinkUserProject
|
11
|
-
from ....utils import get_timestamp
|
12
|
-
from ...schemas.v1.project import _ProjectBaseV1
|
13
|
-
from fractal_server.app.models import UserOAuth
|
14
|
-
|
15
|
-
|
16
|
-
class Project(_ProjectBaseV1, SQLModel, table=True):
|
17
|
-
|
18
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
19
|
-
timestamp_created: datetime = Field(
|
20
|
-
default_factory=get_timestamp,
|
21
|
-
sa_column=Column(DateTime(timezone=True), nullable=False),
|
22
|
-
)
|
23
|
-
|
24
|
-
user_list: list[UserOAuth] = Relationship(
|
25
|
-
link_model=LinkUserProject,
|
26
|
-
sa_relationship_kwargs={
|
27
|
-
"lazy": "selectin",
|
28
|
-
},
|
29
|
-
)
|
@@ -1,34 +0,0 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Any
|
3
|
-
from typing import Optional
|
4
|
-
|
5
|
-
from sqlalchemy import Column
|
6
|
-
from sqlalchemy.types import DateTime
|
7
|
-
from sqlalchemy.types import JSON
|
8
|
-
from sqlmodel import Field
|
9
|
-
from sqlmodel import SQLModel
|
10
|
-
|
11
|
-
from ....utils import get_timestamp
|
12
|
-
|
13
|
-
|
14
|
-
class State(SQLModel, table=True):
|
15
|
-
"""
|
16
|
-
Store arbitrary data in the database
|
17
|
-
|
18
|
-
This table is just a state interchange that allows the system to store
|
19
|
-
arbitrary data for later retrieval. This is particuarly important for long
|
20
|
-
background tasks, in which it is not possible to return a meaningful
|
21
|
-
response to the client within a single request lifespan.
|
22
|
-
|
23
|
-
Attributes:
|
24
|
-
id: Primary key
|
25
|
-
data: Content of the `State`
|
26
|
-
timestamp: Timestap of the `State`
|
27
|
-
"""
|
28
|
-
|
29
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
30
|
-
data: dict[str, Any] = Field(sa_column=Column(JSON), default={})
|
31
|
-
timestamp: datetime = Field(
|
32
|
-
default_factory=get_timestamp,
|
33
|
-
sa_column=Column(DateTime(timezone=True)),
|
34
|
-
)
|
@@ -1,85 +0,0 @@
|
|
1
|
-
import json
|
2
|
-
import logging
|
3
|
-
from typing import Any
|
4
|
-
from typing import Optional
|
5
|
-
|
6
|
-
from pydantic import HttpUrl
|
7
|
-
from sqlalchemy import Column
|
8
|
-
from sqlalchemy.types import JSON
|
9
|
-
from sqlmodel import Field
|
10
|
-
from sqlmodel import SQLModel
|
11
|
-
|
12
|
-
from ...schemas.v1.task import _TaskBaseV1
|
13
|
-
|
14
|
-
|
15
|
-
class Task(_TaskBaseV1, SQLModel, table=True):
|
16
|
-
"""
|
17
|
-
Task model
|
18
|
-
|
19
|
-
Attributes:
|
20
|
-
id: Primary key
|
21
|
-
command: Executable command
|
22
|
-
input_type: Expected type of input `Dataset`
|
23
|
-
output_type: Expected type of output `Dataset`
|
24
|
-
meta:
|
25
|
-
Additional metadata related to execution (e.g. computational
|
26
|
-
resources)
|
27
|
-
source: inherited from `_TaskBase`
|
28
|
-
name: inherited from `_TaskBase`
|
29
|
-
args_schema: JSON schema of task arguments
|
30
|
-
args_schema_version:
|
31
|
-
label pointing at how the JSON schema of task arguments was
|
32
|
-
generated
|
33
|
-
"""
|
34
|
-
|
35
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
36
|
-
name: str
|
37
|
-
command: str
|
38
|
-
source: str = Field(unique=True)
|
39
|
-
input_type: str
|
40
|
-
output_type: str
|
41
|
-
meta: Optional[dict[str, Any]] = Field(sa_column=Column(JSON), default={})
|
42
|
-
owner: Optional[str] = None
|
43
|
-
version: Optional[str] = None
|
44
|
-
args_schema: Optional[dict[str, Any]] = Field(
|
45
|
-
sa_column=Column(JSON), default=None
|
46
|
-
)
|
47
|
-
args_schema_version: Optional[str]
|
48
|
-
docs_info: Optional[str] = None
|
49
|
-
docs_link: Optional[HttpUrl] = None
|
50
|
-
|
51
|
-
@property
|
52
|
-
def parallelization_level(self) -> Optional[str]:
|
53
|
-
try:
|
54
|
-
return self.meta["parallelization_level"]
|
55
|
-
except KeyError:
|
56
|
-
return None
|
57
|
-
|
58
|
-
@property
|
59
|
-
def is_parallel(self) -> bool:
|
60
|
-
return bool(self.parallelization_level)
|
61
|
-
|
62
|
-
@property
|
63
|
-
def default_args_from_args_schema(self) -> dict[str, Any]:
|
64
|
-
"""
|
65
|
-
Extract default arguments from args_schema
|
66
|
-
"""
|
67
|
-
# Return {} if there is no args_schema
|
68
|
-
if self.args_schema is None:
|
69
|
-
return {}
|
70
|
-
# Try to construct default_args
|
71
|
-
try:
|
72
|
-
default_args = {}
|
73
|
-
properties = self.args_schema["properties"]
|
74
|
-
for prop_name, prop_schema in properties.items():
|
75
|
-
default_value = prop_schema.get("default", None)
|
76
|
-
if default_value is not None:
|
77
|
-
default_args[prop_name] = default_value
|
78
|
-
return default_args
|
79
|
-
except KeyError as e:
|
80
|
-
logging.warning(
|
81
|
-
"Cannot set default_args from args_schema="
|
82
|
-
f"{json.dumps(self.args_schema)}\n"
|
83
|
-
f"Original KeyError: {str(e)}"
|
84
|
-
)
|
85
|
-
return {}
|
@@ -1,133 +0,0 @@
|
|
1
|
-
from datetime import datetime
|
2
|
-
from typing import Any
|
3
|
-
from typing import Optional
|
4
|
-
from typing import Union
|
5
|
-
|
6
|
-
from pydantic import validator
|
7
|
-
from sqlalchemy import Column
|
8
|
-
from sqlalchemy.ext.orderinglist import ordering_list
|
9
|
-
from sqlalchemy.types import DateTime
|
10
|
-
from sqlalchemy.types import JSON
|
11
|
-
from sqlmodel import Field
|
12
|
-
from sqlmodel import Relationship
|
13
|
-
from sqlmodel import SQLModel
|
14
|
-
|
15
|
-
from ....utils import get_timestamp
|
16
|
-
from ...schemas.v1.workflow import _WorkflowBaseV1
|
17
|
-
from ...schemas.v1.workflow import _WorkflowTaskBaseV1
|
18
|
-
from .task import Task
|
19
|
-
|
20
|
-
|
21
|
-
class WorkflowTask(_WorkflowTaskBaseV1, SQLModel, table=True):
|
22
|
-
"""
|
23
|
-
A Task as part of a Workflow
|
24
|
-
|
25
|
-
This is a crossing table between Task and Workflow. In addition to the
|
26
|
-
foreign keys, it allows for parameter overriding and keeps the order
|
27
|
-
within the list of tasks of the workflow.
|
28
|
-
|
29
|
-
|
30
|
-
Attributes:
|
31
|
-
id:
|
32
|
-
Primary key
|
33
|
-
workflow_id:
|
34
|
-
ID of the `Workflow` the `WorkflowTask` belongs to
|
35
|
-
task_id:
|
36
|
-
ID of the task corresponding to the `WorkflowTask`
|
37
|
-
order:
|
38
|
-
Positional order of the `WorkflowTask` in `Workflow.task_list`
|
39
|
-
meta:
|
40
|
-
Additional parameters useful for execution
|
41
|
-
args:
|
42
|
-
Task arguments
|
43
|
-
task:
|
44
|
-
`Task` object associated with the current `WorkflowTask`
|
45
|
-
|
46
|
-
"""
|
47
|
-
|
48
|
-
class Config:
|
49
|
-
arbitrary_types_allowed = True
|
50
|
-
fields = {"parent": {"exclude": True}}
|
51
|
-
|
52
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
53
|
-
|
54
|
-
workflow_id: int = Field(foreign_key="workflow.id")
|
55
|
-
task_id: int = Field(foreign_key="task.id")
|
56
|
-
order: Optional[int]
|
57
|
-
meta: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
|
58
|
-
args: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
|
59
|
-
task: Task = Relationship(sa_relationship_kwargs=dict(lazy="selectin"))
|
60
|
-
|
61
|
-
@validator("args")
|
62
|
-
def validate_args(cls, value: dict = None):
|
63
|
-
"""
|
64
|
-
Prevent fractal task reserved parameter names from entering args
|
65
|
-
|
66
|
-
Forbidden argument names are `input_paths`, `output_path`, `metadata`,
|
67
|
-
`component`.
|
68
|
-
"""
|
69
|
-
if value is None:
|
70
|
-
return
|
71
|
-
forbidden_args_keys = {
|
72
|
-
"input_paths",
|
73
|
-
"output_path",
|
74
|
-
"metadata",
|
75
|
-
"component",
|
76
|
-
}
|
77
|
-
args_keys = set(value.keys())
|
78
|
-
intersect_keys = forbidden_args_keys.intersection(args_keys)
|
79
|
-
if intersect_keys:
|
80
|
-
raise ValueError(
|
81
|
-
"`args` contains the following forbidden keys: "
|
82
|
-
f"{intersect_keys}"
|
83
|
-
)
|
84
|
-
return value
|
85
|
-
|
86
|
-
@property
|
87
|
-
def is_parallel(self) -> bool:
|
88
|
-
return self.task.is_parallel
|
89
|
-
|
90
|
-
@property
|
91
|
-
def parallelization_level(self) -> Union[str, None]:
|
92
|
-
return self.task.parallelization_level
|
93
|
-
|
94
|
-
|
95
|
-
class Workflow(_WorkflowBaseV1, SQLModel, table=True):
|
96
|
-
"""
|
97
|
-
Workflow
|
98
|
-
|
99
|
-
Attributes:
|
100
|
-
id:
|
101
|
-
Primary key
|
102
|
-
project_id:
|
103
|
-
ID of the project the workflow belongs to.
|
104
|
-
task_list:
|
105
|
-
List of associations to tasks.
|
106
|
-
"""
|
107
|
-
|
108
|
-
id: Optional[int] = Field(default=None, primary_key=True)
|
109
|
-
project_id: int = Field(foreign_key="project.id")
|
110
|
-
project: "Project" = Relationship( # noqa: F821
|
111
|
-
sa_relationship_kwargs=dict(lazy="selectin"),
|
112
|
-
)
|
113
|
-
|
114
|
-
task_list: list[WorkflowTask] = Relationship(
|
115
|
-
sa_relationship_kwargs=dict(
|
116
|
-
lazy="selectin",
|
117
|
-
order_by="WorkflowTask.order",
|
118
|
-
collection_class=ordering_list("order"),
|
119
|
-
cascade="all, delete-orphan",
|
120
|
-
),
|
121
|
-
)
|
122
|
-
timestamp_created: datetime = Field(
|
123
|
-
default_factory=get_timestamp,
|
124
|
-
sa_column=Column(DateTime(timezone=True), nullable=False),
|
125
|
-
)
|
126
|
-
|
127
|
-
@property
|
128
|
-
def input_type(self):
|
129
|
-
return self.task_list[0].task.input_type
|
130
|
-
|
131
|
-
@property
|
132
|
-
def output_type(self):
|
133
|
-
return self.task_list[-1].task.output_type
|