fractal-server 1.4.9__py3-none-any.whl → 2.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +4 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +10 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +275 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +44 -37
  24. fractal_server/app/routes/api/v1/job.py +12 -12
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/apply.py +220 -0
  33. fractal_server/app/routes/api/v2/dataset.py +310 -0
  34. fractal_server/app/routes/api/v2/images.py +212 -0
  35. fractal_server/app/routes/api/v2/job.py +200 -0
  36. fractal_server/app/routes/api/v2/project.py +205 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +398 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/exceptions.py +129 -0
  44. fractal_server/app/runner/executors/local/__init__.py +3 -0
  45. fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
  46. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  48. fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  51. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  52. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
  53. fractal_server/app/runner/filenames.py +6 -0
  54. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  55. fractal_server/app/runner/task_files.py +105 -0
  56. fractal_server/app/runner/{__init__.py → v1/__init__.py} +36 -49
  57. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  58. fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
  59. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  60. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  61. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  62. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  63. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  64. fractal_server/app/runner/v1/common.py +117 -0
  65. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  66. fractal_server/app/runner/v2/__init__.py +337 -0
  67. fractal_server/app/runner/v2/_local/__init__.py +169 -0
  68. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  69. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  70. fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/components.py +5 -0
  74. fractal_server/app/runner/v2/deduplicate_list.py +24 -0
  75. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  76. fractal_server/app/runner/v2/merge_outputs.py +41 -0
  77. fractal_server/app/runner/v2/runner.py +264 -0
  78. fractal_server/app/runner/v2/runner_functions.py +339 -0
  79. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  80. fractal_server/app/runner/v2/task_interface.py +43 -0
  81. fractal_server/app/runner/v2/v1_compat.py +21 -0
  82. fractal_server/app/schemas/__init__.py +4 -42
  83. fractal_server/app/schemas/v1/__init__.py +42 -0
  84. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  85. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  86. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  87. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  88. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  89. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  90. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  91. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  92. fractal_server/app/schemas/v2/__init__.py +34 -0
  93. fractal_server/app/schemas/v2/dataset.py +88 -0
  94. fractal_server/app/schemas/v2/dumps.py +87 -0
  95. fractal_server/app/schemas/v2/job.py +113 -0
  96. fractal_server/app/schemas/v2/manifest.py +109 -0
  97. fractal_server/app/schemas/v2/project.py +36 -0
  98. fractal_server/app/schemas/v2/task.py +121 -0
  99. fractal_server/app/schemas/v2/task_collection.py +105 -0
  100. fractal_server/app/schemas/v2/workflow.py +78 -0
  101. fractal_server/app/schemas/v2/workflowtask.py +118 -0
  102. fractal_server/config.py +5 -10
  103. fractal_server/images/__init__.py +50 -0
  104. fractal_server/images/tools.py +86 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
  107. fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
  108. fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
  109. fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
  110. fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
  111. fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
  112. fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
  113. fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
  114. fractal_server/tasks/__init__.py +0 -5
  115. fractal_server/tasks/endpoint_operations.py +13 -19
  116. fractal_server/tasks/utils.py +35 -0
  117. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  118. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  119. fractal_server/tasks/v1/get_collection_data.py +14 -0
  120. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  121. fractal_server/tasks/v2/background_operations.py +382 -0
  122. fractal_server/tasks/v2/get_collection_data.py +14 -0
  123. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +3 -4
  124. fractal_server-2.0.0a0.dist-info/RECORD +166 -0
  125. fractal_server/app/runner/_slurm/.gitignore +0 -2
  126. fractal_server/app/runner/_slurm/__init__.py +0 -150
  127. fractal_server/app/runner/common.py +0 -311
  128. fractal_server-1.4.9.dist-info/RECORD +0 -97
  129. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  130. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
  131. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
  132. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,34 @@
1
+ from .dataset import DatasetCreateV2 # noqa F401
2
+ from .dataset import DatasetReadV2 # noqa F401
3
+ from .dataset import DatasetUpdateV2 # noqa F401
4
+ from .dumps import DatasetDumpV2 # noqa F401
5
+ from .dumps import ProjectDumpV2 # noqa F401
6
+ from .dumps import TaskDumpV2 # noqa F401
7
+ from .dumps import WorkflowDumpV2 # noqa F401
8
+ from .dumps import WorkflowTaskDumpV2 # noqa F401
9
+ from .job import JobCreateV2 # noqa F401
10
+ from .job import JobReadV2 # noqa F401
11
+ from .job import JobStatusTypeV2 # noqa F401
12
+ from .job import JobUpdateV2 # noqa F401
13
+ from .manifest import ManifestV2 # noqa F401
14
+ from .project import ProjectCreateV2 # noqa F401
15
+ from .project import ProjectReadV2 # noqa F401
16
+ from .project import ProjectUpdateV2 # noqa F401
17
+ from .task import TaskCreateV2 # noqa F401
18
+ from .task import TaskExportV2 # noqa F401
19
+ from .task import TaskImportV2 # noqa F401
20
+ from .task import TaskReadV2 # noqa F401
21
+ from .task import TaskUpdateV2 # noqa F401
22
+ from .task_collection import TaskCollectPipV2 # noqa F401
23
+ from .task_collection import TaskCollectStatusV2 # noqa F401
24
+ from .workflow import WorkflowCreateV2 # noqa F401
25
+ from .workflow import WorkflowExportV2 # noqa F401
26
+ from .workflow import WorkflowImportV2 # noqa F401
27
+ from .workflow import WorkflowReadV2 # noqa F401
28
+ from .workflow import WorkflowUpdateV2 # noqa F401
29
+ from .workflowtask import WorkflowTaskCreateV2 # noqa F401
30
+ from .workflowtask import WorkflowTaskExportV2 # noqa F401
31
+ from .workflowtask import WorkflowTaskImportV2 # noqa F401
32
+ from .workflowtask import WorkflowTaskReadV2 # noqa F401
33
+ from .workflowtask import WorkflowTaskStatusTypeV2 # noqa F401
34
+ from .workflowtask import WorkflowTaskUpdateV2 # noqa F401
@@ -0,0 +1,88 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import Field
6
+ from pydantic import validator
7
+
8
+ from .._validators import valstr
9
+ from .._validators import valutc
10
+ from ..v1.project import ProjectReadV1
11
+ from .dumps import WorkflowTaskDumpV2
12
+ from .workflowtask import WorkflowTaskStatusTypeV2
13
+ from fractal_server.images import Filters
14
+
15
+
16
+ class _DatasetHistoryItemV2(BaseModel):
17
+ """
18
+ Class for an item of `Dataset.history`.
19
+ """
20
+
21
+ workflowtask: WorkflowTaskDumpV2
22
+ status: WorkflowTaskStatusTypeV2
23
+ parallelization: Optional[dict]
24
+
25
+
26
+ class DatasetStatusReadV2(BaseModel):
27
+ """
28
+ Response type for the
29
+ `/project/{project_id}/dataset/{dataset_id}/status/` endpoint
30
+ """
31
+
32
+ status: Optional[
33
+ dict[
34
+ str,
35
+ WorkflowTaskStatusTypeV2,
36
+ ]
37
+ ] = None
38
+
39
+
40
+ # CRUD
41
+
42
+
43
+ class DatasetCreateV2(BaseModel):
44
+
45
+ name: str
46
+
47
+ read_only: bool = False
48
+ zarr_dir: str
49
+
50
+ filters: Filters = Field(default_factory=Filters)
51
+
52
+ # Validators
53
+ _name = validator("name", allow_reuse=True)(valstr("name"))
54
+
55
+
56
+ class DatasetReadV2(BaseModel):
57
+
58
+ id: int
59
+ name: str
60
+
61
+ project_id: int
62
+ project: ProjectReadV1
63
+
64
+ history: list[_DatasetHistoryItemV2]
65
+ read_only: bool
66
+
67
+ timestamp_created: datetime
68
+
69
+ zarr_dir: str
70
+ filters: Filters = Field(default_factory=Filters)
71
+
72
+ # Validators
73
+ _timestamp_created = validator("timestamp_created", allow_reuse=True)(
74
+ valutc("timestamp_created")
75
+ )
76
+
77
+
78
+ class DatasetUpdateV2(BaseModel):
79
+ class Config:
80
+ extra = "forbid"
81
+
82
+ name: Optional[str]
83
+ read_only: Optional[bool]
84
+ zarr_dir: Optional[str]
85
+ filters: Optional[Filters]
86
+
87
+ # Validators
88
+ _name = validator("name", allow_reuse=True)(valstr("name"))
@@ -0,0 +1,87 @@
1
+ """
2
+
3
+ Dump models differ from their Read counterpart in that:
4
+ * They are directly JSON-able, without any additional encoder.
5
+ * They may only include a subset of the Read attributes.
6
+
7
+ These models are used in at least two situations:
8
+ 1. In the "*_dump" attributes of ApplyWorkflow models;
9
+ 2. In the `_DatasetHistoryItem.workflowtask` model, to trim its size.
10
+ """
11
+ from typing import Optional
12
+
13
+ from pydantic import BaseModel
14
+ from pydantic import Extra
15
+ from pydantic import root_validator
16
+
17
+ from fractal_server.app.schemas.v1.dumps import TaskDumpV1
18
+ from fractal_server.images import Filters
19
+
20
+
21
+ class ProjectDumpV2(BaseModel, extra=Extra.forbid):
22
+
23
+ id: int
24
+ name: str
25
+ read_only: bool
26
+ timestamp_created: str
27
+
28
+
29
+ class TaskDumpV2(BaseModel):
30
+ id: int
31
+ name: str
32
+ type: str
33
+
34
+ command_non_parallel: Optional[str]
35
+ command_parallel: Optional[str]
36
+ source: str
37
+ owner: Optional[str]
38
+ version: Optional[str]
39
+
40
+ input_types: dict[str, bool]
41
+ output_types: dict[str, bool]
42
+
43
+
44
+ class WorkflowTaskDumpV2(BaseModel):
45
+ id: int
46
+ workflow_id: int
47
+ order: Optional[int]
48
+
49
+ input_filters: Filters
50
+
51
+ task_id: Optional[int]
52
+ task: Optional[TaskDumpV2]
53
+ task_legacy_id: Optional[int]
54
+ task_legacy: Optional[TaskDumpV1]
55
+
56
+ # Validators
57
+ @root_validator
58
+ def task_v1_or_v2(cls, values):
59
+ v1 = values.get("task_legacy_id")
60
+ v2 = values.get("task_id")
61
+ if ((v1 is not None) and (v2 is not None)) or (
62
+ (v1 is None) and (v2 is None)
63
+ ):
64
+ message = "both" if (v1 and v2) else "none"
65
+ raise ValueError(
66
+ "One and only one must be provided between "
67
+ f"'task_legacy_id' and 'task_id' (you provided {message})"
68
+ )
69
+ return values
70
+
71
+
72
+ class WorkflowDumpV2(BaseModel):
73
+ id: int
74
+ name: str
75
+ project_id: int
76
+ timestamp_created: str
77
+
78
+
79
+ class DatasetDumpV2(BaseModel):
80
+ id: int
81
+ name: str
82
+ project_id: int
83
+ read_only: bool
84
+ timestamp_created: str
85
+
86
+ zarr_dir: str
87
+ filters: Filters
@@ -0,0 +1,113 @@
1
+ from datetime import datetime
2
+ from enum import Enum
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import validator
7
+ from pydantic.types import StrictStr
8
+
9
+ from .._validators import valstr
10
+ from .._validators import valutc
11
+ from .dumps import DatasetDumpV2
12
+ from .dumps import ProjectDumpV2
13
+ from .dumps import WorkflowDumpV2
14
+
15
+
16
+ class JobStatusTypeV2(str, Enum):
17
+ """
18
+ Define the available job statuses
19
+
20
+ Attributes:
21
+ SUBMITTED:
22
+ The job was created. This does not guarantee that it was also
23
+ submitted to an executor (e.g. other errors could have prevented
24
+ this), nor that it is actually running (e.g. SLURM jobs could be
25
+ still in the queue).
26
+ DONE:
27
+ The job successfully reached its end.
28
+ FAILED:
29
+ The workflow terminated with an error.
30
+ """
31
+
32
+ SUBMITTED = "submitted"
33
+ DONE = "done"
34
+ FAILED = "failed"
35
+
36
+
37
+ class JobCreateV2(BaseModel):
38
+
39
+ first_task_index: Optional[int] = None
40
+ last_task_index: Optional[int] = None
41
+ slurm_account: Optional[StrictStr] = None
42
+ worker_init: Optional[str]
43
+
44
+ # Validators
45
+ _worker_init = validator("worker_init", allow_reuse=True)(
46
+ valstr("worker_init")
47
+ )
48
+
49
+ @validator("first_task_index", always=True)
50
+ def first_task_index_non_negative(cls, v, values):
51
+ """
52
+ Check that `first_task_index` is non-negative.
53
+ """
54
+ if v is not None and v < 0:
55
+ raise ValueError(
56
+ f"first_task_index cannot be negative (given: {v})"
57
+ )
58
+ return v
59
+
60
+ @validator("last_task_index", always=True)
61
+ def first_last_task_indices(cls, v, values):
62
+ """
63
+ Check that `last_task_index` is non-negative, and that it is not
64
+ smaller than `first_task_index`.
65
+ """
66
+ if v is not None and v < 0:
67
+ raise ValueError(
68
+ f"last_task_index cannot be negative (given: {v})"
69
+ )
70
+
71
+ first_task_index = values.get("first_task_index")
72
+ last_task_index = v
73
+ if first_task_index is not None and last_task_index is not None:
74
+ if first_task_index > last_task_index:
75
+ raise ValueError(
76
+ f"{first_task_index=} cannot be larger than "
77
+ f"{last_task_index=}"
78
+ )
79
+ return v
80
+
81
+
82
+ class JobReadV2(BaseModel):
83
+
84
+ id: int
85
+ project_id: Optional[int]
86
+ project_dump: ProjectDumpV2
87
+ user_email: str
88
+ slurm_account: Optional[str]
89
+ workflow_id: Optional[int]
90
+ workflow_dump: WorkflowDumpV2
91
+ dataset_id: Optional[int]
92
+ dataset_dump: DatasetDumpV2
93
+ start_timestamp: datetime
94
+ end_timestamp: Optional[datetime]
95
+ status: str
96
+ log: Optional[str]
97
+ working_dir: Optional[str]
98
+ working_dir_user: Optional[str]
99
+ first_task_index: Optional[int]
100
+ last_task_index: Optional[int]
101
+ worker_init: Optional[str]
102
+
103
+ _start_timestamp = validator("start_timestamp", allow_reuse=True)(
104
+ valutc("start_timestamp")
105
+ )
106
+ _end_timestamp = validator("end_timestamp", allow_reuse=True)(
107
+ valutc("end_timestamp")
108
+ )
109
+
110
+
111
+ class JobUpdateV2(BaseModel):
112
+
113
+ status: JobStatusTypeV2
@@ -0,0 +1,109 @@
1
+ from typing import Any
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import Field
6
+ from pydantic import HttpUrl
7
+ from pydantic import root_validator
8
+ from pydantic import validator
9
+
10
+
11
+ class TaskManifestV2(BaseModel):
12
+ """
13
+ Represents a task within a V2 manifest.
14
+
15
+ Attributes:
16
+ name:
17
+ The task name
18
+ executable:
19
+ Path to the executable relative to the package root
20
+
21
+ Note: by package root we mean "as it will be installed". If a
22
+ package `Pkg` installs in the folder `pkg` the executable
23
+ `pkg/executable.py`, this attribute must contain only
24
+ `executable.py`.
25
+ input_type:
26
+ The input type accepted by the task
27
+ output_type:
28
+ The output type returned by the task
29
+ meta:
30
+ Additional information about the package, such as hash of the
31
+ executable, specific runtime requirements (e.g., need_gpu=True),
32
+ etc.
33
+ args_schema:
34
+ JSON Schema for task arguments
35
+ docs_info:
36
+ Additional information about the Task, coming from the docstring.
37
+ docs_link:
38
+ Link to Task docs.
39
+ """
40
+
41
+ name: str
42
+ executable_non_parallel: Optional[str] = None
43
+ executable_parallel: Optional[str] = None
44
+ input_types: dict[str, bool] = Field(default_factory=dict)
45
+ output_types: dict[str, bool] = Field(default_factory=dict)
46
+ meta_parallel: dict[str, Any] = Field(default_factory=dict)
47
+ meta_non_parallel: dict[str, Any] = Field(default_factory=dict)
48
+ args_schema_non_parallel: Optional[dict[str, Any]] = None
49
+ args_schema_parallel: Optional[dict[str, Any]] = None
50
+ docs_info: Optional[str] = None
51
+ docs_link: Optional[HttpUrl] = None
52
+
53
+
54
+ class ManifestV2(BaseModel):
55
+ """
56
+ Packages containing tasks are required to include a special file
57
+ `__FRACTAL_MANIFEST__.json` in order to be discovered and used by Fractal.
58
+
59
+ This model class and the model classes it depends on provide the base
60
+ schema to read, write and validate manifests.
61
+
62
+ Attributes:
63
+ manifest_version:
64
+ A version string that provides indication for compatibility between
65
+ manifests as the schema evolves. This is for instance used by
66
+ Fractal to determine which subclass of the present base class needs
67
+ be used to read and validate the input.
68
+ task_list : list[TaskManifestType]
69
+ The list of tasks, represented as specified by subclasses of the
70
+ _TaskManifestBase (a.k.a. TaskManifestType)
71
+ has_args_schemas:
72
+ `True` if the manifest incldues JSON Schemas for the arguments of
73
+ each task.
74
+ args_schema_version:
75
+ Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
76
+ """
77
+
78
+ manifest_version: str
79
+ task_list: list[TaskManifestV2]
80
+ has_args_schemas: bool = False
81
+ args_schema_version: Optional[str]
82
+
83
+ @root_validator()
84
+ def _check_args_schemas_are_present(cls, values):
85
+ has_args_schemas = values["has_args_schemas"]
86
+ task_list = values["task_list"]
87
+ if has_args_schemas is True:
88
+ for task in task_list:
89
+ if task.executable_parallel is not None:
90
+ if task.args_schema_parallel is None:
91
+ raise ValueError(
92
+ f"Manifest has {has_args_schemas=}, but "
93
+ f"task '{task.name}' has "
94
+ f"{task.args_schema_parallel=}."
95
+ )
96
+ if task.executable_non_parallel is not None:
97
+ if task.args_schema_non_parallel is None:
98
+ raise ValueError(
99
+ f"Manifest has {has_args_schemas=}, but "
100
+ f"task '{task.name}' has "
101
+ f"{task.args_schema_non_parallel=}."
102
+ )
103
+ return values
104
+
105
+ @validator("manifest_version")
106
+ def manifest_version_2(cls, value):
107
+ if value != "2":
108
+ raise ValueError(f"Wrong manifest version (given {value})")
109
+ return value
@@ -0,0 +1,36 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import validator
6
+
7
+ from .._validators import valstr
8
+ from .._validators import valutc
9
+
10
+
11
+ class ProjectCreateV2(BaseModel):
12
+
13
+ name: str
14
+ read_only: bool = False
15
+ # Validators
16
+ _name = validator("name", allow_reuse=True)(valstr("name"))
17
+
18
+
19
+ class ProjectReadV2(BaseModel):
20
+
21
+ id: int
22
+ name: str
23
+ read_only: bool
24
+ timestamp_created: datetime
25
+ # Validators
26
+ _timestamp_created = validator("timestamp_created", allow_reuse=True)(
27
+ valutc("timestamp_created")
28
+ )
29
+
30
+
31
+ class ProjectUpdateV2(BaseModel):
32
+
33
+ name: Optional[str]
34
+ read_only: Optional[bool]
35
+ # Validators
36
+ _name = validator("name", allow_reuse=True)(valstr("name"))
@@ -0,0 +1,121 @@
1
+ from typing import Any
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import HttpUrl
8
+ from pydantic import root_validator
9
+ from pydantic import validator
10
+
11
+ from .._validators import valstr
12
+
13
+
14
+ class TaskCreateV2(BaseModel):
15
+ class Config:
16
+ extra = "forbid"
17
+
18
+ name: str
19
+
20
+ command_non_parallel: Optional[str]
21
+ command_parallel: Optional[str]
22
+ source: str
23
+
24
+ meta_parallel: Optional[dict[str, Any]]
25
+ meta_non_parallel: Optional[dict[str, Any]]
26
+ version: Optional[str]
27
+ args_schema_non_parallel: Optional[dict[str, Any]]
28
+ args_schema_parallel: Optional[dict[str, Any]]
29
+ args_schema_version: Optional[str]
30
+ docs_info: Optional[str]
31
+ docs_link: Optional[HttpUrl]
32
+
33
+ input_types: dict[str, bool] = Field(default={})
34
+ output_types: dict[str, bool] = Field(default={})
35
+
36
+ # Validators
37
+ @root_validator
38
+ def validate_commands(cls, values):
39
+ command_parallel = values.get("command_parallel")
40
+ command_non_parallel = values.get("command_non_parallel")
41
+ if (command_parallel is None) and (command_non_parallel is None):
42
+ raise ValueError(
43
+ "Task must have at least one valid command "
44
+ "(parallel and/or non_parallel)"
45
+ )
46
+ return values
47
+
48
+ _name = validator("name", allow_reuse=True)(valstr("name"))
49
+ _command_non_parallel = validator(
50
+ "command_non_parallel", allow_reuse=True
51
+ )(valstr("command_non_parallel"))
52
+ _command_parallel = validator("command_parallel", allow_reuse=True)(
53
+ valstr("command_parallel")
54
+ )
55
+ _source = validator("source", allow_reuse=True)(valstr("source"))
56
+ _version = validator("version", allow_reuse=True)(valstr("version"))
57
+ _args_schema_version = validator("args_schema_version", allow_reuse=True)(
58
+ valstr("args_schema_version")
59
+ )
60
+
61
+
62
+ class TaskReadV2(BaseModel):
63
+
64
+ id: int
65
+ name: str
66
+ type: Literal["parallel", "non_parallel", "compound"]
67
+ source: str
68
+ owner: Optional[str]
69
+ version: Optional[str]
70
+
71
+ command_non_parallel: Optional[str]
72
+ command_parallel: Optional[str]
73
+ meta_parallel: dict[str, Any]
74
+ meta_non_parallel: dict[str, Any]
75
+ args_schema_non_parallel: Optional[dict[str, Any]] = None
76
+ args_schema_parallel: Optional[dict[str, Any]] = None
77
+ args_schema_version: Optional[str]
78
+ docs_info: Optional[str]
79
+ docs_link: Optional[HttpUrl]
80
+ input_types: dict[str, bool]
81
+ output_types: dict[str, bool]
82
+
83
+
84
+ class TaskUpdateV2(BaseModel):
85
+
86
+ name: Optional[str]
87
+ version: Optional[str]
88
+ command_parallel: Optional[str]
89
+ command_non_parallel: Optional[str]
90
+ input_types: Optional[dict[str, bool]]
91
+ output_types: Optional[dict[str, bool]]
92
+
93
+ # Validators
94
+ @validator("input_types", "output_types")
95
+ def val_is_dict(cls, v):
96
+ if not isinstance(v, dict):
97
+ raise ValueError
98
+ return v
99
+
100
+ _name = validator("name", allow_reuse=True)(valstr("name"))
101
+ _version = validator("version", allow_reuse=True)(
102
+ valstr("version", accept_none=True)
103
+ )
104
+ _command_parallel = validator("command_parallel", allow_reuse=True)(
105
+ valstr("command_parallel")
106
+ )
107
+ _command_non_parallel = validator(
108
+ "command_non_parallel", allow_reuse=True
109
+ )(valstr("command_non_parallel"))
110
+
111
+
112
+ class TaskImportV2(BaseModel):
113
+
114
+ source: str
115
+ _source = validator("source", allow_reuse=True)(valstr("source"))
116
+
117
+
118
+ class TaskExportV2(BaseModel):
119
+
120
+ source: str
121
+ _source = validator("source", allow_reuse=True)(valstr("source"))
@@ -0,0 +1,105 @@
1
+ from pathlib import Path
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import validator
8
+
9
+ from .._validators import valstr
10
+ from .task import TaskReadV2
11
+
12
+
13
+ class TaskCollectPipV2(BaseModel):
14
+ """
15
+ TaskCollectPipV2 class
16
+
17
+ This class only encodes the attributes required to trigger a
18
+ task-collection operation. Other attributes (that are assigned *during*
19
+ task collection) are defined as part of fractal-server.
20
+
21
+ Two cases are supported:
22
+
23
+ 1. `package` is the path of a local wheel file;
24
+ 2. `package` is the name of a package that can be installed via `pip`.
25
+
26
+
27
+ Attributes:
28
+ package:
29
+ The name of a `pip`-installable package, or the path to a local
30
+ wheel file.
31
+ package_version: Version of the package
32
+ package_extras: Package extras to include in the `pip install` command
33
+ python_version: Python version to install and run the package tasks
34
+ pinned_package_versions:
35
+ dictionary 'package':'version' used to pin versions for specific
36
+ packages.
37
+
38
+ """
39
+
40
+ package: str
41
+ package_version: Optional[str] = None
42
+ package_extras: Optional[str] = None
43
+ python_version: Optional[str] = None
44
+ pinned_package_versions: Optional[dict[str, str]] = None
45
+
46
+ _package_extras = validator("package_extras", allow_reuse=True)(
47
+ valstr("package_extras")
48
+ )
49
+ _python_version = validator("python_version", allow_reuse=True)(
50
+ valstr("python_version")
51
+ )
52
+
53
+ @validator("package")
54
+ def package_validator(cls, value):
55
+ if "/" in value:
56
+ if not value.endswith(".whl"):
57
+ raise ValueError(
58
+ "Local-package path must be a wheel file "
59
+ f"(given {value})."
60
+ )
61
+ if not Path(value).is_absolute():
62
+ raise ValueError(
63
+ f"Local-package path must be absolute: (given {value})."
64
+ )
65
+ return value
66
+
67
+ @validator("package_version")
68
+ def package_version_validator(cls, v, values):
69
+
70
+ valstr("package_version")(v)
71
+
72
+ if values["package"].endswith(".whl"):
73
+ raise ValueError(
74
+ "Cannot provide version when package is a Wheel file."
75
+ )
76
+ return v
77
+
78
+
79
+ class TaskCollectStatusV2(BaseModel):
80
+ """
81
+ TaskCollectStatus class
82
+
83
+ Attributes:
84
+ status:
85
+ package:
86
+ venv_path:
87
+ task_list:
88
+ log:
89
+ info:
90
+ """
91
+
92
+ status: Literal["pending", "installing", "collecting", "fail", "OK"]
93
+ package: str
94
+ venv_path: Path
95
+ task_list: Optional[list[TaskReadV2]] = Field(default=[])
96
+ log: Optional[str]
97
+ info: Optional[str]
98
+
99
+ def sanitised_dict(self):
100
+ """
101
+ Return `self.dict()` after casting `self.venv_path` to a string
102
+ """
103
+ d = self.dict()
104
+ d["venv_path"] = str(self.venv_path)
105
+ return d