fractal-server 2.11.1__py3-none-any.whl → 2.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +0 -2
  3. fractal_server/app/models/linkuserproject.py +0 -9
  4. fractal_server/app/routes/aux/_job.py +1 -3
  5. fractal_server/app/runner/executors/slurm/ssh/executor.py +9 -6
  6. fractal_server/app/runner/executors/slurm/sudo/executor.py +1 -5
  7. fractal_server/app/runner/filenames.py +0 -2
  8. fractal_server/app/runner/shutdown.py +3 -27
  9. fractal_server/app/schemas/_validators.py +0 -19
  10. fractal_server/config.py +1 -15
  11. fractal_server/main.py +1 -12
  12. fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +67 -0
  13. fractal_server/string_tools.py +0 -21
  14. fractal_server/tasks/utils.py +0 -28
  15. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0.dist-info}/METADATA +1 -1
  16. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0.dist-info}/RECORD +19 -63
  17. fractal_server/app/models/v1/__init__.py +0 -13
  18. fractal_server/app/models/v1/dataset.py +0 -71
  19. fractal_server/app/models/v1/job.py +0 -101
  20. fractal_server/app/models/v1/project.py +0 -29
  21. fractal_server/app/models/v1/state.py +0 -34
  22. fractal_server/app/models/v1/task.py +0 -85
  23. fractal_server/app/models/v1/workflow.py +0 -133
  24. fractal_server/app/routes/admin/v1.py +0 -377
  25. fractal_server/app/routes/api/v1/__init__.py +0 -26
  26. fractal_server/app/routes/api/v1/_aux_functions.py +0 -478
  27. fractal_server/app/routes/api/v1/dataset.py +0 -554
  28. fractal_server/app/routes/api/v1/job.py +0 -195
  29. fractal_server/app/routes/api/v1/project.py +0 -475
  30. fractal_server/app/routes/api/v1/task.py +0 -203
  31. fractal_server/app/routes/api/v1/task_collection.py +0 -239
  32. fractal_server/app/routes/api/v1/workflow.py +0 -355
  33. fractal_server/app/routes/api/v1/workflowtask.py +0 -187
  34. fractal_server/app/runner/async_wrap_v1.py +0 -27
  35. fractal_server/app/runner/v1/__init__.py +0 -415
  36. fractal_server/app/runner/v1/_common.py +0 -620
  37. fractal_server/app/runner/v1/_local/__init__.py +0 -186
  38. fractal_server/app/runner/v1/_local/_local_config.py +0 -105
  39. fractal_server/app/runner/v1/_local/_submit_setup.py +0 -48
  40. fractal_server/app/runner/v1/_local/executor.py +0 -100
  41. fractal_server/app/runner/v1/_slurm/__init__.py +0 -312
  42. fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -81
  43. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -163
  44. fractal_server/app/runner/v1/common.py +0 -117
  45. fractal_server/app/runner/v1/handle_failed_job.py +0 -141
  46. fractal_server/app/schemas/v1/__init__.py +0 -37
  47. fractal_server/app/schemas/v1/applyworkflow.py +0 -161
  48. fractal_server/app/schemas/v1/dataset.py +0 -165
  49. fractal_server/app/schemas/v1/dumps.py +0 -64
  50. fractal_server/app/schemas/v1/manifest.py +0 -126
  51. fractal_server/app/schemas/v1/project.py +0 -66
  52. fractal_server/app/schemas/v1/state.py +0 -18
  53. fractal_server/app/schemas/v1/task.py +0 -167
  54. fractal_server/app/schemas/v1/task_collection.py +0 -110
  55. fractal_server/app/schemas/v1/workflow.py +0 -212
  56. fractal_server/tasks/v1/_TaskCollectPip.py +0 -103
  57. fractal_server/tasks/v1/__init__.py +0 -0
  58. fractal_server/tasks/v1/background_operations.py +0 -352
  59. fractal_server/tasks/v1/endpoint_operations.py +0 -156
  60. fractal_server/tasks/v1/get_collection_data.py +0 -14
  61. fractal_server/tasks/v1/utils.py +0 -67
  62. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0.dist-info}/LICENSE +0 -0
  63. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0.dist-info}/WHEEL +0 -0
  64. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0.dist-info}/entry_points.txt +0 -0
@@ -1,141 +0,0 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
6
- # Marco Franzon <marco.franzon@exact-lab.it>
7
- #
8
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
9
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
10
- # Institute for Biomedical Research and Pelkmans Lab from the University of
11
- # Zurich.
12
- """
13
- Helper functions to handle Dataset history.
14
- """
15
- import json
16
- import logging
17
- from copy import deepcopy
18
- from pathlib import Path
19
- from typing import Any
20
- from typing import Optional
21
-
22
- from ...models.v1 import ApplyWorkflow
23
- from ...models.v1 import Dataset
24
- from ...models.v1 import Workflow
25
- from ...models.v1 import WorkflowTask
26
- from ...schemas.v1 import WorkflowTaskStatusTypeV1
27
- from ..filenames import HISTORY_FILENAME_V1
28
- from ..filenames import METADATA_FILENAME_V1
29
-
30
-
31
- def assemble_history_failed_job(
32
- job: ApplyWorkflow,
33
- output_dataset: Dataset,
34
- workflow: Workflow,
35
- logger: logging.Logger,
36
- failed_wftask: Optional[WorkflowTask] = None,
37
- ) -> list[dict[str, Any]]:
38
- """
39
- Assemble `history` after a workflow-execution job fails.
40
-
41
- Args:
42
- job:
43
- The failed `ApplyWorkflow` object.
44
- output_dataset:
45
- The `output_dataset` associated to `job`.
46
- workflow:
47
- The `workflow` associated to `job`.
48
- logger: A logger instance.
49
- failed_wftask:
50
- If set, append it to `history` during step 3; if `None`, infer
51
- it by comparing the job task list and the one in
52
- `tmp_metadata_file`.
53
-
54
- Returns:
55
- The new value of `history`, to be merged into
56
- `output_dataset.meta`.
57
- """
58
-
59
- # The final value of the history attribute should include up to three
60
- # parts, coming from: the database, the temporary file, the failed-task
61
- # information.
62
-
63
- # Part 1: Read exising history from DB
64
- new_history = output_dataset.history
65
-
66
- # Part 2: Extend history based on tmp_metadata_file
67
- tmp_history_file = Path(job.working_dir) / HISTORY_FILENAME_V1
68
- try:
69
- with tmp_history_file.open("r") as f:
70
- tmp_file_history = json.load(f)
71
- new_history.extend(tmp_file_history)
72
- except FileNotFoundError:
73
- tmp_file_history = []
74
-
75
- # Part 3/A: Identify failed task, if needed
76
- if failed_wftask is None:
77
- job_wftasks = workflow.task_list[
78
- job.first_task_index : (job.last_task_index + 1) # noqa
79
- ]
80
- tmp_file_wftasks = [
81
- history_item["workflowtask"] for history_item in tmp_file_history
82
- ]
83
- if len(job_wftasks) <= len(tmp_file_wftasks):
84
- n_tasks_job = len(job_wftasks)
85
- n_tasks_tmp = len(tmp_file_wftasks)
86
- logger.error(
87
- "Cannot identify the failed task based on job task list "
88
- f"(length {n_tasks_job}) and temporary-file task list "
89
- f"(length {n_tasks_tmp})."
90
- )
91
- logger.error("Failed task not appended to history.")
92
- else:
93
- failed_wftask = job_wftasks[len(tmp_file_wftasks)]
94
-
95
- # Part 3/B: Append failed task to history
96
- if failed_wftask is not None:
97
- failed_wftask_dump = failed_wftask.model_dump(exclude={"task"})
98
- failed_wftask_dump["task"] = failed_wftask.task.model_dump()
99
- new_history_item = dict(
100
- workflowtask=failed_wftask_dump,
101
- status=WorkflowTaskStatusTypeV1.FAILED,
102
- parallelization=dict(
103
- parallelization_level=failed_wftask.parallelization_level,
104
- ),
105
- )
106
- new_history.append(new_history_item)
107
-
108
- return new_history
109
-
110
-
111
- def assemble_meta_failed_job(
112
- job: ApplyWorkflow,
113
- output_dataset: Dataset,
114
- ) -> dict[str, Any]:
115
- """
116
- Assemble `Dataset.meta` (history excluded) for a failed workflow-execution.
117
-
118
- Assemble new value of `output_dataset.meta` based on the last successful
119
- task, i.e. based on the content of the temporary `METADATA_FILENAME` file.
120
-
121
- Args:
122
- job:
123
- The failed `ApplyWorkflow` object.
124
- output_dataset:
125
- The `output_dataset` associated to `job`.
126
-
127
- Returns:
128
- The new value of `output_dataset.meta`, apart from its `history` key.
129
- """
130
-
131
- new_meta = deepcopy(output_dataset.meta)
132
- metadata_file = Path(job.working_dir) / METADATA_FILENAME_V1
133
- try:
134
- with metadata_file.open("r") as f:
135
- metadata_update = json.load(f)
136
- for key, value in metadata_update.items():
137
- new_meta[key] = value
138
- except FileNotFoundError:
139
- pass
140
-
141
- return new_meta
@@ -1,37 +0,0 @@
1
- """
2
- Schemas for API request/response bodies
3
- """
4
- from .applyworkflow import ApplyWorkflowCreateV1 # noqa: F401
5
- from .applyworkflow import ApplyWorkflowReadV1 # noqa: F401
6
- from .applyworkflow import ApplyWorkflowUpdateV1 # noqa: F401
7
- from .applyworkflow import JobStatusTypeV1 # noqa: F401
8
- from .dataset import DatasetCreateV1 # noqa: F401
9
- from .dataset import DatasetReadV1 # noqa: F401
10
- from .dataset import DatasetStatusReadV1 # noqa: F401
11
- from .dataset import DatasetUpdateV1 # noqa: F401
12
- from .dataset import ResourceCreateV1 # noqa: F401
13
- from .dataset import ResourceReadV1 # noqa: F401
14
- from .dataset import ResourceUpdateV1 # noqa: F401
15
- from .manifest import ManifestV1 # noqa: F401
16
- from .manifest import TaskManifestV1 # noqa: F401
17
- from .project import ProjectCreateV1 # noqa: F401
18
- from .project import ProjectReadV1 # noqa: F401
19
- from .project import ProjectUpdateV1 # noqa: F401
20
- from .state import StateRead # noqa: F401
21
- from .task import TaskCreateV1 # noqa: F401
22
- from .task import TaskImportV1 # noqa: F401
23
- from .task import TaskReadV1 # noqa: F401
24
- from .task import TaskUpdateV1 # noqa: F401
25
- from .task_collection import TaskCollectPipV1 # noqa: F401
26
- from .task_collection import TaskCollectStatusV1 # noqa: F401
27
- from .workflow import WorkflowCreateV1 # noqa: F401
28
- from .workflow import WorkflowExportV1 # noqa: F401
29
- from .workflow import WorkflowImportV1 # noqa: F401
30
- from .workflow import WorkflowReadV1 # noqa: F401
31
- from .workflow import WorkflowTaskCreateV1 # noqa: F401
32
- from .workflow import WorkflowTaskExportV1 # noqa: F401
33
- from .workflow import WorkflowTaskImportV1 # noqa: F401
34
- from .workflow import WorkflowTaskReadV1 # noqa: F401
35
- from .workflow import WorkflowTaskStatusTypeV1 # noqa: F401
36
- from .workflow import WorkflowTaskUpdateV1 # noqa: F401
37
- from .workflow import WorkflowUpdateV1 # noqa: F401
@@ -1,161 +0,0 @@
1
- from datetime import datetime
2
- from enum import Enum
3
- from typing import Optional
4
-
5
- from pydantic import BaseModel
6
- from pydantic import validator
7
- from pydantic.types import StrictStr
8
-
9
- from .._validators import valstr
10
- from .dumps import DatasetDumpV1
11
- from .dumps import ProjectDumpV1
12
- from .dumps import WorkflowDumpV1
13
-
14
-
15
- __all__ = (
16
- "_ApplyWorkflowBaseV1",
17
- "ApplyWorkflowCreateV1",
18
- "ApplyWorkflowReadV1",
19
- )
20
-
21
-
22
- class JobStatusTypeV1(str, Enum):
23
- """
24
- Define the available job statuses
25
-
26
- Attributes:
27
- SUBMITTED:
28
- The job was created. This does not guarantee that it was also
29
- submitted to an executor (e.g. other errors could have prevented
30
- this), nor that it is actually running (e.g. SLURM jobs could be
31
- still in the queue).
32
- DONE:
33
- The job successfully reached its end.
34
- FAILED:
35
- The workflow terminated with an error.
36
- """
37
-
38
- SUBMITTED = "submitted"
39
- DONE = "done"
40
- FAILED = "failed"
41
-
42
-
43
- class _ApplyWorkflowBaseV1(BaseModel):
44
- """
45
- Base class for `ApplyWorkflow`.
46
-
47
- Attributes:
48
- worker_init:
49
- """
50
-
51
- worker_init: Optional[str]
52
-
53
-
54
- class ApplyWorkflowCreateV1(_ApplyWorkflowBaseV1):
55
- """
56
- Class for `ApplyWorkflow` creation.
57
-
58
- Attributes:
59
- first_task_index:
60
- last_task_index:
61
- slurm_account:
62
- """
63
-
64
- first_task_index: Optional[int] = None
65
- last_task_index: Optional[int] = None
66
- slurm_account: Optional[StrictStr] = None
67
-
68
- # Validators
69
- _worker_init = validator("worker_init", allow_reuse=True)(
70
- valstr("worker_init")
71
- )
72
-
73
- @validator("first_task_index", always=True)
74
- def first_task_index_non_negative(cls, v, values):
75
- """
76
- Check that `first_task_index` is non-negative.
77
- """
78
- if v is not None and v < 0:
79
- raise ValueError(
80
- f"first_task_index cannot be negative (given: {v})"
81
- )
82
- return v
83
-
84
- @validator("last_task_index", always=True)
85
- def first_last_task_indices(cls, v, values):
86
- """
87
- Check that `last_task_index` is non-negative, and that it is not
88
- smaller than `first_task_index`.
89
- """
90
- if v is not None and v < 0:
91
- raise ValueError(
92
- f"last_task_index cannot be negative (given: {v})"
93
- )
94
-
95
- first_task_index = values.get("first_task_index")
96
- last_task_index = v
97
- if first_task_index is not None and last_task_index is not None:
98
- if first_task_index > last_task_index:
99
- raise ValueError(
100
- f"{first_task_index=} cannot be larger than "
101
- f"{last_task_index=}"
102
- )
103
- return v
104
-
105
-
106
- class ApplyWorkflowReadV1(_ApplyWorkflowBaseV1):
107
- """
108
- Class for `ApplyWorkflow` read from database.
109
-
110
- Attributes:
111
- id:
112
- project_id:
113
- project_dump:
114
- user_email:
115
- slurm_account:
116
- workflow_id:
117
- workflow_dump:
118
- input_dataset_id:
119
- input_dataset_dump:
120
- output_dataset_id:
121
- output_dataset_dump:
122
- start_timestamp:
123
- end_timestamp:
124
- status:
125
- log:
126
- working_dir:
127
- working_dir_user:
128
- first_task_index:
129
- last_task_index:
130
- """
131
-
132
- id: int
133
- project_id: Optional[int]
134
- project_dump: ProjectDumpV1
135
- user_email: str
136
- slurm_account: Optional[str]
137
- workflow_id: Optional[int]
138
- workflow_dump: WorkflowDumpV1
139
- input_dataset_id: Optional[int]
140
- input_dataset_dump: DatasetDumpV1
141
- output_dataset_id: Optional[int]
142
- output_dataset_dump: DatasetDumpV1
143
- start_timestamp: datetime
144
- end_timestamp: Optional[datetime]
145
- status: str
146
- log: Optional[str]
147
- working_dir: Optional[str]
148
- working_dir_user: Optional[str]
149
- first_task_index: Optional[int]
150
- last_task_index: Optional[int]
151
-
152
-
153
- class ApplyWorkflowUpdateV1(BaseModel):
154
- """
155
- Class for updating a job status.
156
-
157
- Attributes:
158
- status: New job status.
159
- """
160
-
161
- status: JobStatusTypeV1
@@ -1,165 +0,0 @@
1
- from datetime import datetime
2
- from typing import Any
3
- from typing import Optional
4
-
5
- from pydantic import BaseModel
6
- from pydantic import Field
7
- from pydantic import validator
8
-
9
- from .._validators import val_absolute_path
10
- from .._validators import valstr
11
- from .dumps import WorkflowTaskDumpV1
12
- from .project import ProjectReadV1
13
- from .workflow import WorkflowTaskStatusTypeV1
14
-
15
- __all__ = (
16
- "DatasetUpdateV1",
17
- "DatasetCreateV1",
18
- "DatasetReadV1",
19
- "ResourceCreateV1",
20
- "ResourceReadV1",
21
- "ResourceUpdateV1",
22
- "DatasetStatusReadV1",
23
- )
24
-
25
-
26
- class _ResourceBaseV1(BaseModel):
27
- """
28
- Base class for `Resource`.
29
-
30
- Attributes:
31
- path:
32
- """
33
-
34
- path: str
35
-
36
-
37
- class ResourceCreateV1(_ResourceBaseV1):
38
- """
39
- Class for `Resource` creation.
40
- """
41
-
42
- # Validators
43
- _path = validator("path", allow_reuse=True)(val_absolute_path("path"))
44
-
45
-
46
- class ResourceUpdateV1(_ResourceBaseV1):
47
- """
48
- Class for `Resource` update.
49
- """
50
-
51
- # Validators
52
- _path = validator("path", allow_reuse=True)(val_absolute_path("path"))
53
-
54
-
55
- class ResourceReadV1(_ResourceBaseV1):
56
- """
57
- Class for `Resource` read from database.
58
-
59
- Attributes:
60
- id:
61
- dataset_id:
62
- """
63
-
64
- id: int
65
- dataset_id: int
66
-
67
-
68
- class _DatasetHistoryItemV1(BaseModel):
69
- """
70
- Class for an item of `Dataset.history`.
71
-
72
- Attributes:
73
- workflowtask:
74
- status:
75
- parallelization: If provided, it includes keys `parallelization_level`
76
- and `component_list`.
77
- """
78
-
79
- workflowtask: WorkflowTaskDumpV1
80
- status: WorkflowTaskStatusTypeV1
81
- parallelization: Optional[dict]
82
-
83
-
84
- class _DatasetBaseV1(BaseModel):
85
- """
86
- Base class for `Dataset`.
87
-
88
- Attributes:
89
- name:
90
- type:
91
- meta:
92
- history:
93
- read_only:
94
- """
95
-
96
- name: str
97
- type: Optional[str]
98
- meta: dict[str, Any] = Field(default={})
99
- history: list[_DatasetHistoryItemV1] = Field(default=[])
100
- read_only: bool = False
101
-
102
-
103
- class DatasetUpdateV1(_DatasetBaseV1):
104
- """
105
- Class for `Dataset` update.
106
-
107
- Attributes:
108
- name:
109
- meta:
110
- history:
111
- read_only:
112
- """
113
-
114
- name: Optional[str]
115
- meta: Optional[dict[str, Any]] = None
116
- history: Optional[list[_DatasetHistoryItemV1]] = None
117
- read_only: Optional[bool]
118
-
119
- # Validators
120
- _name = validator("name", allow_reuse=True)(valstr("name"))
121
- _type = validator("type", allow_reuse=True)(valstr("type"))
122
-
123
-
124
- class DatasetCreateV1(_DatasetBaseV1):
125
- """
126
- Class for `Dataset` creation.
127
- """
128
-
129
- # Validators
130
- _name = validator("name", allow_reuse=True)(valstr("name"))
131
- _type = validator("type", allow_reuse=True)(valstr("type"))
132
-
133
-
134
- class DatasetReadV1(_DatasetBaseV1):
135
- """
136
- Class for `Dataset` read from database.
137
-
138
- Attributes:
139
- id:
140
- read_only:
141
- resource_list:
142
- project_id:
143
- project:
144
- """
145
-
146
- id: int
147
- resource_list: list[ResourceReadV1]
148
- project_id: int
149
- read_only: bool
150
- project: ProjectReadV1
151
- timestamp_created: datetime
152
-
153
-
154
- class DatasetStatusReadV1(BaseModel):
155
- """
156
- Response type for the
157
- `/project/{project_id}/dataset/{dataset_id}/status/` endpoint
158
- """
159
-
160
- status: Optional[
161
- dict[
162
- int,
163
- WorkflowTaskStatusTypeV1,
164
- ]
165
- ] = None
@@ -1,64 +0,0 @@
1
- """
2
-
3
- Dump models differ from their Read counterpart in that:
4
- * They are directly JSON-able, without any additional encoder.
5
- * They may only include a subset of the Read attributes.
6
-
7
- These models are used in at least two situations:
8
- 1. In the "*_dump" attributes of ApplyWorkflow models;
9
- 2. In the `_DatasetHistoryItem.workflowtask` model, to trim its size.
10
- """
11
- from typing import Optional
12
-
13
- from pydantic import BaseModel
14
- from pydantic import Extra
15
-
16
-
17
- class ProjectDumpV1(BaseModel, extra=Extra.forbid):
18
-
19
- id: int
20
- name: str
21
- read_only: bool
22
- timestamp_created: str
23
-
24
-
25
- class TaskDumpV1(BaseModel):
26
- id: int
27
- source: str
28
- name: str
29
- command: str
30
- input_type: str
31
- output_type: str
32
- owner: Optional[str]
33
- version: Optional[str]
34
-
35
-
36
- class WorkflowTaskDumpV1(BaseModel):
37
- id: int
38
- order: Optional[int]
39
- workflow_id: int
40
- task_id: int
41
- task: TaskDumpV1
42
-
43
-
44
- class WorkflowDumpV1(BaseModel):
45
- id: int
46
- name: str
47
- project_id: int
48
- timestamp_created: str
49
-
50
-
51
- class ResourceDumpV1(BaseModel):
52
- id: int
53
- path: str
54
- dataset_id: int
55
-
56
-
57
- class DatasetDumpV1(BaseModel):
58
- id: int
59
- name: str
60
- type: Optional[str]
61
- read_only: bool
62
- resource_list: list[ResourceDumpV1]
63
- project_id: int
64
- timestamp_created: str
@@ -1,126 +0,0 @@
1
- from typing import Any
2
- from typing import Optional
3
- from typing import TypeVar
4
-
5
- from pydantic import BaseModel
6
- from pydantic import Field
7
- from pydantic import HttpUrl
8
- from pydantic import root_validator
9
- from pydantic import validator
10
-
11
-
12
- __all__ = ("TaskManifestV1", "ManifestV1")
13
-
14
-
15
- class _TaskManifestBaseV1(BaseModel):
16
- """
17
- Base class for `TaskManifestV1`.
18
-
19
- Represents a task within a manfest
20
-
21
- Attributes:
22
- name:
23
- The task name
24
- executable:
25
- Path to the executable relative to the package root
26
-
27
- Note: by package root we mean "as it will be installed". If a
28
- package `Pkg` installs in the folder `pkg` the executable
29
- `pkg/executable.py`, this attribute must contain only
30
- `executable.py`.
31
- input_type:
32
- The input type accepted by the task
33
- output_type:
34
- The output type returned by the task
35
- meta:
36
- Additional information about the package, such as hash of the
37
- executable, specific runtime requirements (e.g., need_gpu=True),
38
- etc.
39
- args_schema:
40
- JSON Schema for task arguments
41
- docs_info:
42
- Additional information about the Task, coming from the docstring.
43
- docs_link:
44
- Link to Task docs.
45
- """
46
-
47
- name: str
48
- executable: str
49
- input_type: str
50
- output_type: str
51
- meta: Optional[dict[str, Any]] = Field(default_factory=dict)
52
- args_schema: Optional[dict[str, Any]]
53
- docs_info: Optional[str]
54
- docs_link: Optional[HttpUrl]
55
-
56
-
57
- TaskManifestType = TypeVar("TaskManifestType", bound=_TaskManifestBaseV1)
58
-
59
-
60
- class _ManifestBaseV1(BaseModel):
61
- """
62
- Base class for `ManifestV1`.
63
-
64
- Packages containing tasks are required to include a special file
65
- `__FRACTAL_MANIFEST__.json` in order to be discovered and used by Fractal.
66
-
67
- This model class and the model classes it depends on provide the base
68
- schema to read, write and validate manifests.
69
-
70
- Attributes:
71
- manifest_version:
72
- A version string that provides indication for compatibility between
73
- manifests as the schema evolves. This is for instance used by
74
- Fractal to determine which subclass of the present base class needs
75
- be used to read and validate the input.
76
- task_list : list[TaskManifestType]
77
- The list of tasks, represented as specified by subclasses of the
78
- _TaskManifestBase (a.k.a. TaskManifestType)
79
- has_args_schemas:
80
- `True` if the manifest incldues JSON Schemas for the arguments of
81
- each task.
82
- args_schema_version:
83
- Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
84
- """
85
-
86
- manifest_version: str
87
- task_list: list[TaskManifestType]
88
- has_args_schemas: bool = False
89
- args_schema_version: Optional[str]
90
-
91
- @root_validator()
92
- def _check_args_schemas_are_present(cls, values):
93
- has_args_schemas = values["has_args_schemas"]
94
- task_list = values["task_list"]
95
- if has_args_schemas:
96
- for task in task_list:
97
- if task.args_schema is None:
98
- raise ValueError(
99
- f'has_args_schemas={has_args_schemas} but task "'
100
- f'{task.name}" has args_schema={task.args_schema}.'
101
- )
102
- return values
103
-
104
-
105
- class TaskManifestV1(_TaskManifestBaseV1):
106
- """
107
- Task manifest schema version 1.
108
- """
109
-
110
- pass
111
-
112
-
113
- class ManifestV1(_ManifestBaseV1):
114
- """
115
- Manifest schema version 1.
116
-
117
- Attributes:
118
- task_list:
119
- """
120
-
121
- task_list: list[TaskManifestV1]
122
-
123
- @validator("manifest_version")
124
- def manifest_version_1(cls, value):
125
- if value != "1":
126
- raise ValueError(f"Wrong manifest version (given {value})")