fractal-server 2.13.1__py3-none-any.whl → 2.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +3 -1
  3. fractal_server/app/models/linkusergroup.py +6 -2
  4. fractal_server/app/models/v2/__init__.py +7 -1
  5. fractal_server/app/models/v2/dataset.py +1 -11
  6. fractal_server/app/models/v2/history.py +78 -0
  7. fractal_server/app/models/v2/job.py +10 -3
  8. fractal_server/app/models/v2/task_group.py +2 -2
  9. fractal_server/app/models/v2/workflow.py +1 -1
  10. fractal_server/app/models/v2/workflowtask.py +1 -1
  11. fractal_server/app/routes/admin/v2/accounting.py +18 -28
  12. fractal_server/app/routes/admin/v2/task.py +1 -1
  13. fractal_server/app/routes/admin/v2/task_group.py +0 -17
  14. fractal_server/app/routes/api/__init__.py +1 -1
  15. fractal_server/app/routes/api/v2/__init__.py +8 -2
  16. fractal_server/app/routes/api/v2/_aux_functions.py +66 -0
  17. fractal_server/app/routes/api/v2/_aux_functions_history.py +166 -0
  18. fractal_server/app/routes/api/v2/dataset.py +0 -17
  19. fractal_server/app/routes/api/v2/history.py +544 -0
  20. fractal_server/app/routes/api/v2/images.py +31 -43
  21. fractal_server/app/routes/api/v2/job.py +30 -0
  22. fractal_server/app/routes/api/v2/project.py +1 -53
  23. fractal_server/app/routes/api/v2/{status.py → status_legacy.py} +6 -6
  24. fractal_server/app/routes/api/v2/submit.py +16 -14
  25. fractal_server/app/routes/api/v2/task.py +3 -10
  26. fractal_server/app/routes/api/v2/task_collection_custom.py +4 -9
  27. fractal_server/app/routes/api/v2/task_group.py +0 -17
  28. fractal_server/app/routes/api/v2/verify_image_types.py +61 -0
  29. fractal_server/app/routes/api/v2/workflow.py +28 -69
  30. fractal_server/app/routes/api/v2/workflowtask.py +53 -50
  31. fractal_server/app/routes/auth/group.py +0 -16
  32. fractal_server/app/routes/auth/oauth.py +5 -3
  33. fractal_server/app/routes/pagination.py +47 -0
  34. fractal_server/app/runner/components.py +0 -3
  35. fractal_server/app/runner/compress_folder.py +57 -29
  36. fractal_server/app/runner/exceptions.py +4 -0
  37. fractal_server/app/runner/executors/base_runner.py +157 -0
  38. fractal_server/app/runner/{v2/_local/_local_config.py → executors/local/get_local_config.py} +7 -9
  39. fractal_server/app/runner/executors/local/runner.py +248 -0
  40. fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
  41. fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +9 -7
  42. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +868 -0
  43. fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +48 -17
  44. fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +36 -47
  45. fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +134 -0
  46. fractal_server/app/runner/executors/slurm_ssh/runner.py +268 -0
  47. fractal_server/app/runner/executors/slurm_sudo/__init__.py +0 -0
  48. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -83
  49. fractal_server/app/runner/executors/slurm_sudo/runner.py +193 -0
  50. fractal_server/app/runner/extract_archive.py +1 -3
  51. fractal_server/app/runner/task_files.py +134 -87
  52. fractal_server/app/runner/v2/__init__.py +0 -399
  53. fractal_server/app/runner/v2/_local.py +88 -0
  54. fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +20 -19
  55. fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +17 -15
  56. fractal_server/app/runner/v2/db_tools.py +119 -0
  57. fractal_server/app/runner/v2/runner.py +206 -95
  58. fractal_server/app/runner/v2/runner_functions.py +488 -187
  59. fractal_server/app/runner/v2/runner_functions_low_level.py +40 -43
  60. fractal_server/app/runner/v2/submit_workflow.py +358 -0
  61. fractal_server/app/runner/v2/task_interface.py +31 -0
  62. fractal_server/app/schemas/_validators.py +13 -24
  63. fractal_server/app/schemas/user.py +10 -7
  64. fractal_server/app/schemas/user_settings.py +9 -21
  65. fractal_server/app/schemas/v2/__init__.py +9 -1
  66. fractal_server/app/schemas/v2/dataset.py +12 -94
  67. fractal_server/app/schemas/v2/dumps.py +26 -9
  68. fractal_server/app/schemas/v2/history.py +80 -0
  69. fractal_server/app/schemas/v2/job.py +15 -8
  70. fractal_server/app/schemas/v2/manifest.py +14 -7
  71. fractal_server/app/schemas/v2/project.py +9 -7
  72. fractal_server/app/schemas/v2/status_legacy.py +35 -0
  73. fractal_server/app/schemas/v2/task.py +72 -77
  74. fractal_server/app/schemas/v2/task_collection.py +14 -32
  75. fractal_server/app/schemas/v2/task_group.py +10 -9
  76. fractal_server/app/schemas/v2/workflow.py +10 -11
  77. fractal_server/app/schemas/v2/workflowtask.py +2 -21
  78. fractal_server/app/security/__init__.py +3 -3
  79. fractal_server/app/security/signup_email.py +2 -2
  80. fractal_server/config.py +41 -46
  81. fractal_server/images/tools.py +23 -0
  82. fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
  83. fractal_server/migrations/versions/9db60297b8b2_set_ondelete.py +250 -0
  84. fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py +41 -0
  85. fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
  86. fractal_server/migrations/versions/f37aceb45062_make_historyunit_logfile_required.py +39 -0
  87. fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
  88. fractal_server/ssh/_fabric.py +28 -14
  89. fractal_server/tasks/v2/local/collect.py +2 -2
  90. fractal_server/tasks/v2/ssh/collect.py +2 -2
  91. fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
  92. fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
  93. fractal_server/tasks/v2/utils_background.py +0 -19
  94. fractal_server/tasks/v2/utils_database.py +30 -17
  95. fractal_server/tasks/v2/utils_templates.py +6 -0
  96. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/METADATA +4 -4
  97. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/RECORD +106 -96
  98. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/WHEEL +1 -1
  99. fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +0 -126
  100. fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +0 -116
  101. fractal_server/app/runner/executors/slurm/ssh/executor.py +0 -1386
  102. fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +0 -71
  103. fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +0 -130
  104. fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
  105. fractal_server/app/runner/v2/_local/__init__.py +0 -132
  106. fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
  107. fractal_server/app/runner/v2/_local/executor.py +0 -100
  108. fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py +0 -83
  109. fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
  110. fractal_server/app/runner/v2/handle_failed_job.py +0 -59
  111. fractal_server/app/schemas/v2/status.py +0 -16
  112. /fractal_server/app/{runner/executors/slurm → history}/__init__.py +0 -0
  113. /fractal_server/app/runner/executors/{slurm/ssh → local}/__init__.py +0 -0
  114. /fractal_server/app/runner/executors/{slurm/sudo → slurm_common}/__init__.py +0 -0
  115. /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
  116. /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
  117. /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_ssh}/__init__.py +0 -0
  118. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/LICENSE +0 -0
  119. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/entry_points.txt +0 -0
@@ -5,9 +5,9 @@ from pydantic import ConfigDict
5
5
  from pydantic import field_validator
6
6
  from pydantic.types import StrictStr
7
7
 
8
+ from ._validators import NonEmptyString
8
9
  from ._validators import val_absolute_path
9
10
  from ._validators import val_unique_list
10
- from ._validators import valstr
11
11
  from fractal_server.string_tools import validate_cmd
12
12
 
13
13
  __all__ = (
@@ -48,21 +48,15 @@ class UserSettingsUpdate(BaseModel):
48
48
 
49
49
  model_config = ConfigDict(extra="forbid")
50
50
 
51
- ssh_host: Optional[str] = None
52
- ssh_username: Optional[str] = None
53
- ssh_private_key_path: Optional[str] = None
54
- ssh_tasks_dir: Optional[str] = None
55
- ssh_jobs_dir: Optional[str] = None
56
- slurm_user: Optional[str] = None
57
- slurm_accounts: Optional[list[StrictStr]] = None
58
- project_dir: Optional[str] = None
51
+ ssh_host: Optional[NonEmptyString] = None
52
+ ssh_username: Optional[NonEmptyString] = None
53
+ ssh_private_key_path: Optional[NonEmptyString] = None
54
+ ssh_tasks_dir: Optional[NonEmptyString] = None
55
+ ssh_jobs_dir: Optional[NonEmptyString] = None
56
+ slurm_user: Optional[NonEmptyString] = None
57
+ slurm_accounts: Optional[list[NonEmptyString]] = None
58
+ project_dir: Optional[NonEmptyString] = None
59
59
 
60
- _ssh_host = field_validator("ssh_host")(
61
- classmethod(valstr("ssh_host", accept_none=True))
62
- )
63
- _ssh_username = field_validator("ssh_username")(
64
- classmethod(valstr("ssh_username", accept_none=True))
65
- )
66
60
  _ssh_private_key_path = field_validator("ssh_private_key_path")(
67
61
  classmethod(
68
62
  val_absolute_path("ssh_private_key_path", accept_none=True)
@@ -76,17 +70,11 @@ class UserSettingsUpdate(BaseModel):
76
70
  classmethod(val_absolute_path("ssh_jobs_dir", accept_none=True))
77
71
  )
78
72
 
79
- _slurm_user = field_validator("slurm_user")(
80
- classmethod(valstr("slurm_user", accept_none=True))
81
- )
82
-
83
73
  @field_validator("slurm_accounts")
84
74
  @classmethod
85
75
  def slurm_accounts_validator(cls, value):
86
76
  if value is None:
87
77
  return value
88
- for i, item in enumerate(value):
89
- value[i] = valstr(f"slurm_accounts[{i}]")(cls, item)
90
78
  return val_unique_list("slurm_accounts")(cls, value)
91
79
 
92
80
  @field_validator("project_dir")
@@ -7,8 +7,16 @@ from .dataset import DatasetUpdateV2 # noqa F401
7
7
  from .dumps import DatasetDumpV2 # noqa F401
8
8
  from .dumps import ProjectDumpV2 # noqa F401
9
9
  from .dumps import TaskDumpV2 # noqa F401
10
+ from .dumps import TaskGroupDumpV2 # noqa F401
10
11
  from .dumps import WorkflowDumpV2 # noqa F401
11
12
  from .dumps import WorkflowTaskDumpV2 # noqa F401
13
+ from .history import HistoryRunRead # noqa F401
14
+ from .history import HistoryRunReadAggregated # noqa F401
15
+ from .history import HistoryUnitRead # noqa F401
16
+ from .history import HistoryUnitStatus # noqa F401
17
+ from .history import HistoryUnitStatusQuery # noqa F401
18
+ from .history import ImageLogsRequest # noqa F401
19
+ from .history import SingleImageWithStatus # noqa F401
12
20
  from .job import JobCreateV2 # noqa F401
13
21
  from .job import JobReadV2 # noqa F401
14
22
  from .job import JobStatusTypeV2 # noqa F401
@@ -18,6 +26,7 @@ from .manifest import TaskManifestV2 # noqa F401
18
26
  from .project import ProjectCreateV2 # noqa F401
19
27
  from .project import ProjectReadV2 # noqa F401
20
28
  from .project import ProjectUpdateV2 # noqa F401
29
+ from .status_legacy import WorkflowTaskStatusTypeV2 # noqa F401
21
30
  from .task import TaskCreateV2 # noqa F401
22
31
  from .task import TaskExportV2 # noqa F401
23
32
  from .task import TaskImportV2 # noqa F401
@@ -47,5 +56,4 @@ from .workflowtask import WorkflowTaskImportV2 # noqa F401
47
56
  from .workflowtask import WorkflowTaskReadV2 # noqa F401
48
57
  from .workflowtask import WorkflowTaskReadV2WithWarning # noqa F401
49
58
  from .workflowtask import WorkflowTaskReplaceV2 # noqa F401
50
- from .workflowtask import WorkflowTaskStatusTypeV2 # noqa F401
51
59
  from .workflowtask import WorkflowTaskUpdateV2 # noqa F401
@@ -1,5 +1,4 @@
1
1
  from datetime import datetime
2
- from typing import Any
3
2
  from typing import Optional
4
3
 
5
4
  from pydantic import BaseModel
@@ -10,39 +9,22 @@ from pydantic import field_validator
10
9
  from pydantic import model_validator
11
10
  from pydantic.types import AwareDatetime
12
11
 
13
- from .._filter_validators import validate_attribute_filters
14
- from .._filter_validators import validate_type_filters
12
+ from .._validators import cant_set_none
13
+ from .._validators import NonEmptyString
15
14
  from .._validators import root_validate_dict_keys
16
- from .._validators import valstr
17
- from .dumps import WorkflowTaskDumpV2
18
15
  from .project import ProjectReadV2
19
- from .workflowtask import WorkflowTaskStatusTypeV2
20
16
  from fractal_server.images import SingleImage
21
17
  from fractal_server.images.models import AttributeFiltersType
22
18
  from fractal_server.urls import normalize_url
23
19
 
24
20
 
25
- class _DatasetHistoryItemV2(BaseModel):
26
- """
27
- Class for an item of `Dataset.history`.
28
- """
29
-
30
- workflowtask: WorkflowTaskDumpV2
31
- status: WorkflowTaskStatusTypeV2
32
- parallelization: Optional[dict] = None
33
-
34
-
35
- # CRUD
36
-
37
-
38
21
  class DatasetCreateV2(BaseModel):
39
22
  model_config = ConfigDict(extra="forbid")
40
23
 
41
- name: str
24
+ name: NonEmptyString
42
25
 
43
26
  zarr_dir: Optional[str] = None
44
27
 
45
- type_filters: dict[str, bool] = Field(default_factory=dict)
46
28
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
47
29
 
48
30
  # Validators
@@ -50,14 +32,6 @@ class DatasetCreateV2(BaseModel):
50
32
  _dict_keys = model_validator(mode="before")(
51
33
  classmethod(root_validate_dict_keys)
52
34
  )
53
- _type_filters = field_validator("type_filters")(
54
- classmethod(validate_type_filters)
55
- )
56
- _attribute_filters = field_validator("attribute_filters")(
57
- classmethod(validate_attribute_filters)
58
- )
59
-
60
- _name = field_validator("name")(classmethod(valstr("name")))
61
35
 
62
36
  @field_validator("zarr_dir")
63
37
  @classmethod
@@ -74,13 +48,9 @@ class DatasetReadV2(BaseModel):
74
48
  project_id: int
75
49
  project: ProjectReadV2
76
50
 
77
- history: list[_DatasetHistoryItemV2]
78
-
79
51
  timestamp_created: AwareDatetime
80
52
 
81
53
  zarr_dir: str
82
- type_filters: dict[str, bool]
83
- attribute_filters: AttributeFiltersType
84
54
 
85
55
  @field_serializer("timestamp_created")
86
56
  def serialize_datetime(v: datetime) -> str:
@@ -90,24 +60,19 @@ class DatasetReadV2(BaseModel):
90
60
  class DatasetUpdateV2(BaseModel):
91
61
  model_config = ConfigDict(extra="forbid")
92
62
 
93
- name: Optional[str] = None
63
+ name: Optional[NonEmptyString] = None
94
64
  zarr_dir: Optional[str] = None
95
- type_filters: Optional[dict[str, bool]] = None
96
- attribute_filters: Optional[dict[str, list[Any]]] = None
97
65
 
98
66
  # Validators
99
67
 
100
68
  _dict_keys = model_validator(mode="before")(
101
69
  classmethod(root_validate_dict_keys)
102
70
  )
103
- _type_filters = field_validator("type_filters")(
104
- classmethod(validate_type_filters)
105
- )
106
- _attribute_filters = field_validator("attribute_filters")(
107
- classmethod(validate_attribute_filters)
108
- )
109
71
 
110
- _name = field_validator("name")(classmethod(valstr("name")))
72
+ @field_validator("name")
73
+ @classmethod
74
+ def _cant_set_none(cls, v):
75
+ return cant_set_none(v)
111
76
 
112
77
  @field_validator("zarr_dir")
113
78
  @classmethod
@@ -121,63 +86,20 @@ class DatasetImportV2(BaseModel):
121
86
  """
122
87
  Class for `Dataset` import.
123
88
 
89
+ We are dropping `model_config = ConfigDict(extra="forbid")` so that any
90
+ kind of legacy filters can be included in the payload, and ignored in the
91
+ API.
92
+
124
93
  Attributes:
125
94
  name:
126
95
  zarr_dir:
127
96
  images:
128
- filters:
129
- type_filters:
130
- attribute_filters:
131
97
  """
132
98
 
133
- model_config = ConfigDict(extra="forbid")
134
-
135
99
  name: str
136
100
  zarr_dir: str
137
101
  images: list[SingleImage] = Field(default_factory=list)
138
102
 
139
- filters: Optional[dict[str, Any]] = None
140
- type_filters: dict[str, bool] = Field(default_factory=dict)
141
- attribute_filters: AttributeFiltersType = Field(default_factory=dict)
142
-
143
- @model_validator(mode="before")
144
- @classmethod
145
- def update_legacy_filters(cls, values: dict):
146
- """
147
- Transform legacy filters (created with fractal-server<2.11.0)
148
- into attribute/type filters
149
- """
150
- if values.get("filters") is not None:
151
- if (
152
- "type_filters" in values.keys()
153
- or "attribute_filters" in values.keys()
154
- ):
155
- raise ValueError(
156
- "Cannot set filters both through the legacy field "
157
- "('filters') and the new ones ('type_filters' and/or "
158
- "'attribute_filters')."
159
- )
160
-
161
- else:
162
- # Convert legacy filters.types into new type_filters
163
- values["type_filters"] = values["filters"].get("types", {})
164
- values["attribute_filters"] = {
165
- key: [value]
166
- for key, value in values["filters"]
167
- .get("attributes", {})
168
- .items()
169
- }
170
- values["filters"] = None
171
-
172
- return values
173
-
174
- _type_filters = field_validator("type_filters")(
175
- classmethod(validate_type_filters)
176
- )
177
- _attribute_filters = field_validator("attribute_filters")(
178
- classmethod(validate_attribute_filters)
179
- )
180
-
181
103
  @field_validator("zarr_dir")
182
104
  @classmethod
183
105
  def normalize_zarr_dir(cls, v: str) -> str:
@@ -192,12 +114,8 @@ class DatasetExportV2(BaseModel):
192
114
  name:
193
115
  zarr_dir:
194
116
  images:
195
- type_filters:
196
- attribute_filters:
197
117
  """
198
118
 
199
119
  name: str
200
120
  zarr_dir: str
201
121
  images: list[SingleImage]
202
- type_filters: dict[str, bool]
203
- attribute_filters: AttributeFiltersType
@@ -1,19 +1,20 @@
1
1
  """
2
-
3
2
  Dump models differ from their Read counterpart in that:
4
3
  * They are directly JSON-able, without any additional encoder.
5
- * They may only include a subset of the Read attributes.
4
+ * They may include only a subset of the available fields.
6
5
 
7
6
  These models are used in at least two situations:
8
7
  1. In the "*_dump" attributes of Job models;
9
- 2. In the `_DatasetHistoryItem.workflowtask` model, to trim its size.
8
+ 2. In the history items, to trim their size.
10
9
  """
11
10
  from typing import Optional
12
11
 
13
12
  from pydantic import BaseModel
14
13
  from pydantic import ConfigDict
14
+ from pydantic import Field
15
15
 
16
- from fractal_server.images.models import AttributeFiltersType
16
+ from .task import TaskTypeType
17
+ from .task_group import TaskGroupV2OriginEnum
17
18
 
18
19
 
19
20
  class ProjectDumpV2(BaseModel):
@@ -26,7 +27,7 @@ class ProjectDumpV2(BaseModel):
26
27
  class TaskDumpV2(BaseModel):
27
28
  id: int
28
29
  name: str
29
- type: str
30
+ type: TaskTypeType
30
31
 
31
32
  command_non_parallel: Optional[str] = None
32
33
  command_parallel: Optional[str] = None
@@ -63,12 +64,28 @@ class WorkflowDumpV2(BaseModel):
63
64
 
64
65
 
65
66
  class DatasetDumpV2(BaseModel):
66
- model_config = ConfigDict(extra="forbid")
67
+ """
68
+ We do not include 'model_config = ConfigDict(extra="forbid")' because
69
+ legacy data may include 'type_filters' or 'attribute_filters' and we
70
+ want to avoid response-validation errors.
71
+ """
72
+
67
73
  id: int
68
74
  name: str
69
75
  project_id: int
70
76
  timestamp_created: str
71
-
72
77
  zarr_dir: str
73
- type_filters: dict[str, bool]
74
- attribute_filters: AttributeFiltersType
78
+
79
+
80
+ class TaskGroupDumpV2(BaseModel):
81
+ id: int
82
+ origin: TaskGroupV2OriginEnum
83
+ pkg_name: str
84
+ version: Optional[str] = None
85
+ python_version: Optional[str] = None
86
+ pip_extras: Optional[str] = None
87
+ pinned_package_versions: dict[str, str] = Field(default_factory=dict)
88
+
89
+ path: Optional[str] = None
90
+ venv_path: Optional[str] = None
91
+ wheel_path: Optional[str] = None
@@ -0,0 +1,80 @@
1
+ from datetime import datetime
2
+ from enum import Enum
3
+ from typing import Any
4
+ from typing import Optional
5
+
6
+ from pydantic import AwareDatetime
7
+ from pydantic import BaseModel
8
+ from pydantic import field_serializer
9
+
10
+ from ....images import SingleImage
11
+
12
+
13
+ class HistoryUnitStatus(str, Enum):
14
+ """
15
+ Available status for images
16
+
17
+ Attributes:
18
+ SUBMITTED:
19
+ DONE:
20
+ FAILED:
21
+ """
22
+
23
+ SUBMITTED = "submitted"
24
+ DONE = "done"
25
+ FAILED = "failed"
26
+
27
+
28
+ class HistoryUnitStatusQuery(str, Enum):
29
+
30
+ SUBMITTED = "submitted"
31
+ DONE = "done"
32
+ FAILED = "failed"
33
+
34
+ UNSET = "unset"
35
+
36
+
37
+ class HistoryUnitRead(BaseModel):
38
+ id: int
39
+ logfile: Optional[str] = None
40
+ status: HistoryUnitStatus
41
+ zarr_urls: list[str]
42
+
43
+
44
+ class HistoryRunRead(BaseModel):
45
+ id: int
46
+ dataset_id: int
47
+ workflowtask_id: Optional[int] = None
48
+ job_id: int
49
+ workflowtask_dump: dict[str, Any]
50
+ task_group_dump: dict[str, Any]
51
+ timestamp_started: AwareDatetime
52
+ status: HistoryUnitStatus
53
+ num_available_images: int
54
+
55
+ @field_serializer("timestamp_started")
56
+ def serialize_datetime(v: datetime) -> str:
57
+ return v.isoformat()
58
+
59
+
60
+ class HistoryRunReadAggregated(BaseModel):
61
+ id: int
62
+ timestamp_started: AwareDatetime
63
+ workflowtask_dump: dict[str, Any]
64
+ num_submitted_units: int
65
+ num_done_units: int
66
+ num_failed_units: int
67
+
68
+ @field_serializer("timestamp_started")
69
+ def serialize_datetime(v: datetime) -> str:
70
+ return v.isoformat()
71
+
72
+
73
+ class ImageLogsRequest(BaseModel):
74
+ workflowtask_id: int
75
+ dataset_id: int
76
+ zarr_url: str
77
+
78
+
79
+ class SingleImageWithStatus(SingleImage):
80
+ status: Optional[HistoryUnitStatus] = None
@@ -13,8 +13,10 @@ from pydantic.types import AwareDatetime
13
13
  from pydantic.types import StrictStr
14
14
 
15
15
  from .._filter_validators import validate_attribute_filters
16
+ from .._filter_validators import validate_type_filters
17
+ from .._validators import cant_set_none
18
+ from .._validators import NonEmptyString
16
19
  from .._validators import root_validate_dict_keys
17
- from .._validators import valstr
18
20
  from .dumps import DatasetDumpV2
19
21
  from .dumps import ProjectDumpV2
20
22
  from .dumps import WorkflowDumpV2
@@ -43,26 +45,32 @@ class JobStatusTypeV2(str, Enum):
43
45
 
44
46
 
45
47
  class JobCreateV2(BaseModel):
46
-
47
48
  model_config = ConfigDict(extra="forbid")
48
49
 
49
50
  first_task_index: Optional[int] = None
50
51
  last_task_index: Optional[int] = None
51
52
  slurm_account: Optional[StrictStr] = None
52
- worker_init: Optional[str] = None
53
+ worker_init: Optional[NonEmptyString] = None
53
54
 
54
55
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
56
+ type_filters: dict[str, bool] = Field(default_factory=dict)
55
57
 
56
58
  # Validators
57
- _worker_init = field_validator("worker_init")(
58
- classmethod(valstr("worker_init"))
59
- )
59
+
60
+ @field_validator("worker_init")
61
+ @classmethod
62
+ def _cant_set_none(cls, v):
63
+ return cant_set_none(v)
64
+
60
65
  _dict_keys = model_validator(mode="before")(
61
66
  classmethod(root_validate_dict_keys)
62
67
  )
63
68
  _attribute_filters = field_validator("attribute_filters")(
64
69
  classmethod(validate_attribute_filters)
65
70
  )
71
+ _type_filters = field_validator("type_filters")(
72
+ classmethod(validate_type_filters)
73
+ )
66
74
 
67
75
  @field_validator("first_task_index")
68
76
  @classmethod
@@ -100,7 +108,6 @@ class JobCreateV2(BaseModel):
100
108
 
101
109
 
102
110
  class JobReadV2(BaseModel):
103
-
104
111
  id: int
105
112
  project_id: Optional[int] = None
106
113
  project_dump: ProjectDumpV2
@@ -120,6 +127,7 @@ class JobReadV2(BaseModel):
120
127
  last_task_index: Optional[int] = None
121
128
  worker_init: Optional[str] = None
122
129
  attribute_filters: AttributeFiltersType
130
+ type_filters: dict[str, bool]
123
131
 
124
132
  @field_serializer("start_timestamp")
125
133
  def serialize_datetime_start(v: datetime) -> str:
@@ -134,7 +142,6 @@ class JobReadV2(BaseModel):
134
142
 
135
143
 
136
144
  class JobUpdateV2(BaseModel):
137
-
138
145
  model_config = ConfigDict(extra="forbid")
139
146
 
140
147
  status: JobStatusTypeV2
@@ -1,4 +1,5 @@
1
1
  from typing import Any
2
+ from typing import Literal
2
3
  from typing import Optional
3
4
 
4
5
  from pydantic import BaseModel
@@ -7,7 +8,7 @@ from pydantic import field_validator
7
8
  from pydantic import HttpUrl
8
9
  from pydantic import model_validator
9
10
 
10
- from .._validators import valstr
11
+ from .._validators import NonEmptyString
11
12
 
12
13
 
13
14
  class TaskManifestV2(BaseModel):
@@ -56,6 +57,16 @@ class TaskManifestV2(BaseModel):
56
57
  modality: Optional[str] = None
57
58
  tags: list[str] = Field(default_factory=list)
58
59
 
60
+ type: Optional[
61
+ Literal[
62
+ "compound",
63
+ "converter_compound",
64
+ "non_parallel",
65
+ "converter_non_parallel",
66
+ "parallel",
67
+ ]
68
+ ] = None
69
+
59
70
  @model_validator(mode="after")
60
71
  def validate_executable_args_meta(self):
61
72
  executable_non_parallel = self.executable_non_parallel
@@ -128,7 +139,7 @@ class ManifestV2(BaseModel):
128
139
  The list of tasks, represented as specified by subclasses of the
129
140
  _TaskManifestBase (a.k.a. TaskManifestType)
130
141
  has_args_schemas:
131
- `True` if the manifest incldues JSON Schemas for the arguments of
142
+ `True` if the manifest includes JSON Schemas for the arguments of
132
143
  each task.
133
144
  args_schema_version:
134
145
  Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
@@ -138,7 +149,7 @@ class ManifestV2(BaseModel):
138
149
  task_list: list[TaskManifestV2]
139
150
  has_args_schemas: bool = False
140
151
  args_schema_version: Optional[str] = None
141
- authors: Optional[str] = None
152
+ authors: Optional[NonEmptyString] = None
142
153
 
143
154
  @model_validator(mode="after")
144
155
  def _check_args_schemas_are_present(self):
@@ -181,7 +192,3 @@ class ManifestV2(BaseModel):
181
192
  if value != "2":
182
193
  raise ValueError(f"Wrong manifest version (given {value})")
183
194
  return value
184
-
185
- _authors = field_validator("authors")(
186
- classmethod(valstr("authors", accept_none=True))
187
- )
@@ -7,16 +7,15 @@ from pydantic import field_serializer
7
7
  from pydantic import field_validator
8
8
  from pydantic.types import AwareDatetime
9
9
 
10
- from .._validators import valstr
10
+ from .._validators import cant_set_none
11
+ from .._validators import NonEmptyString
11
12
 
12
13
 
13
14
  class ProjectCreateV2(BaseModel):
14
15
 
15
16
  model_config = ConfigDict(extra="forbid")
16
17
 
17
- name: str
18
- # Validators
19
- _name = field_validator("name")(classmethod(valstr("name")))
18
+ name: NonEmptyString
20
19
 
21
20
 
22
21
  class ProjectReadV2(BaseModel):
@@ -34,6 +33,9 @@ class ProjectUpdateV2(BaseModel):
34
33
 
35
34
  model_config = ConfigDict(extra="forbid")
36
35
 
37
- name: Optional[str] = None
38
- # Validators
39
- _name = field_validator("name")(classmethod(valstr("name")))
36
+ name: Optional[NonEmptyString] = None
37
+
38
+ @field_validator("name")
39
+ @classmethod
40
+ def _cant_set_none(cls, v):
41
+ return cant_set_none(v)
@@ -0,0 +1,35 @@
1
+ from enum import Enum
2
+
3
+ from pydantic import BaseModel
4
+ from pydantic import Field
5
+
6
+
7
+ class WorkflowTaskStatusTypeV2(str, Enum):
8
+ """
9
+ Define the available values for the status of a `WorkflowTask`.
10
+
11
+ This model is used within the `Dataset.history` attribute, which is
12
+ constructed in the runner and then used in the API (e.g. in the
13
+ `api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
14
+
15
+ Attributes:
16
+ SUBMITTED: The `WorkflowTask` is part of a running job.
17
+ DONE: The most-recent execution of this `WorkflowTask` was successful.
18
+ FAILED: The most-recent execution of this `WorkflowTask` failed.
19
+ """
20
+
21
+ SUBMITTED = "submitted"
22
+ DONE = "done"
23
+ FAILED = "failed"
24
+
25
+
26
+ class LegacyStatusReadV2(BaseModel):
27
+ """
28
+ Response type for the
29
+ `/project/{project_id}/status/` endpoint
30
+ """
31
+
32
+ status: dict[
33
+ str,
34
+ WorkflowTaskStatusTypeV2,
35
+ ] = Field(default_factory=dict)