fractal-server 2.6.4__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +1 -1
  3. fractal_server/app/models/linkusergroup.py +11 -0
  4. fractal_server/app/models/v2/__init__.py +2 -0
  5. fractal_server/app/models/v2/collection_state.py +1 -0
  6. fractal_server/app/models/v2/task.py +67 -2
  7. fractal_server/app/routes/admin/v2/__init__.py +16 -0
  8. fractal_server/app/routes/admin/{v2.py → v2/job.py} +20 -191
  9. fractal_server/app/routes/admin/v2/project.py +43 -0
  10. fractal_server/app/routes/admin/v2/task.py +133 -0
  11. fractal_server/app/routes/admin/v2/task_group.py +162 -0
  12. fractal_server/app/routes/api/v1/task_collection.py +4 -4
  13. fractal_server/app/routes/api/v2/__init__.py +8 -0
  14. fractal_server/app/routes/api/v2/_aux_functions.py +1 -68
  15. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +343 -0
  16. fractal_server/app/routes/api/v2/submit.py +16 -35
  17. fractal_server/app/routes/api/v2/task.py +85 -110
  18. fractal_server/app/routes/api/v2/task_collection.py +184 -196
  19. fractal_server/app/routes/api/v2/task_collection_custom.py +70 -64
  20. fractal_server/app/routes/api/v2/task_group.py +173 -0
  21. fractal_server/app/routes/api/v2/workflow.py +39 -102
  22. fractal_server/app/routes/api/v2/workflow_import.py +360 -0
  23. fractal_server/app/routes/api/v2/workflowtask.py +4 -8
  24. fractal_server/app/routes/auth/_aux_auth.py +86 -40
  25. fractal_server/app/routes/auth/current_user.py +5 -5
  26. fractal_server/app/routes/auth/group.py +73 -23
  27. fractal_server/app/routes/auth/router.py +0 -2
  28. fractal_server/app/routes/auth/users.py +8 -7
  29. fractal_server/app/runner/executors/slurm/ssh/executor.py +82 -63
  30. fractal_server/app/runner/v2/__init__.py +13 -7
  31. fractal_server/app/runner/v2/task_interface.py +4 -9
  32. fractal_server/app/schemas/user.py +1 -2
  33. fractal_server/app/schemas/v2/__init__.py +7 -0
  34. fractal_server/app/schemas/v2/dataset.py +2 -7
  35. fractal_server/app/schemas/v2/dumps.py +1 -2
  36. fractal_server/app/schemas/v2/job.py +1 -1
  37. fractal_server/app/schemas/v2/manifest.py +25 -1
  38. fractal_server/app/schemas/v2/project.py +1 -1
  39. fractal_server/app/schemas/v2/task.py +95 -36
  40. fractal_server/app/schemas/v2/task_collection.py +8 -6
  41. fractal_server/app/schemas/v2/task_group.py +85 -0
  42. fractal_server/app/schemas/v2/workflow.py +7 -2
  43. fractal_server/app/schemas/v2/workflowtask.py +9 -6
  44. fractal_server/app/security/__init__.py +8 -1
  45. fractal_server/config.py +8 -28
  46. fractal_server/data_migrations/2_7_0.py +323 -0
  47. fractal_server/images/models.py +2 -4
  48. fractal_server/main.py +1 -1
  49. fractal_server/migrations/versions/034a469ec2eb_task_groups.py +184 -0
  50. fractal_server/ssh/_fabric.py +186 -73
  51. fractal_server/string_tools.py +6 -2
  52. fractal_server/tasks/utils.py +19 -5
  53. fractal_server/tasks/v1/_TaskCollectPip.py +1 -1
  54. fractal_server/tasks/v1/background_operations.py +5 -5
  55. fractal_server/tasks/v1/get_collection_data.py +2 -2
  56. fractal_server/tasks/v2/_venv_pip.py +67 -70
  57. fractal_server/tasks/v2/background_operations.py +180 -69
  58. fractal_server/tasks/v2/background_operations_ssh.py +57 -70
  59. fractal_server/tasks/v2/database_operations.py +44 -0
  60. fractal_server/tasks/v2/endpoint_operations.py +104 -116
  61. fractal_server/tasks/v2/templates/_1_create_venv.sh +9 -5
  62. fractal_server/tasks/v2/templates/{_2_upgrade_pip.sh → _2_preliminary_pip_operations.sh} +1 -0
  63. fractal_server/tasks/v2/utils.py +5 -0
  64. fractal_server/utils.py +3 -2
  65. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/METADATA +3 -7
  66. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/RECORD +69 -60
  67. fractal_server/app/routes/auth/group_names.py +0 -34
  68. fractal_server/tasks/v2/_TaskCollectPip.py +0 -132
  69. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/LICENSE +0 -0
  70. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/WHEEL +0 -0
  71. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/entry_points.txt +0 -0
@@ -4,6 +4,7 @@ Definition of `/auth/group/` routes
4
4
  from fastapi import APIRouter
5
5
  from fastapi import Depends
6
6
  from fastapi import HTTPException
7
+ from fastapi import Response
7
8
  from fastapi import status
8
9
  from sqlalchemy.exc import IntegrityError
9
10
  from sqlalchemy.ext.asyncio import AsyncSession
@@ -12,14 +13,19 @@ from sqlmodel import func
12
13
  from sqlmodel import select
13
14
 
14
15
  from . import current_active_superuser
15
- from ...db import get_async_db
16
- from ...schemas.user_group import UserGroupCreate
17
- from ...schemas.user_group import UserGroupRead
18
- from ...schemas.user_group import UserGroupUpdate
19
- from ._aux_auth import _get_single_group_with_user_ids
16
+ from ._aux_auth import _get_single_usergroup_with_user_ids
17
+ from ._aux_auth import _usergroup_or_404
18
+ from fractal_server.app.db import get_async_db
20
19
  from fractal_server.app.models import LinkUserGroup
21
20
  from fractal_server.app.models import UserGroup
22
21
  from fractal_server.app.models import UserOAuth
22
+ from fractal_server.app.models import UserSettings
23
+ from fractal_server.app.models.v2 import TaskGroupV2
24
+ from fractal_server.app.schemas.user_group import UserGroupCreate
25
+ from fractal_server.app.schemas.user_group import UserGroupRead
26
+ from fractal_server.app.schemas.user_group import UserGroupUpdate
27
+ from fractal_server.app.schemas.user_settings import UserSettingsUpdate
28
+ from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
23
29
  from fractal_server.logger import set_logger
24
30
 
25
31
  logger = set_logger(__name__)
@@ -70,7 +76,7 @@ async def get_single_user_group(
70
76
  user: UserOAuth = Depends(current_active_superuser),
71
77
  db: AsyncSession = Depends(get_async_db),
72
78
  ) -> UserGroupRead:
73
- group = await _get_single_group_with_user_ids(group_id=group_id, db=db)
79
+ group = await _get_single_usergroup_with_user_ids(group_id=group_id, db=db)
74
80
  return group
75
81
 
76
82
 
@@ -118,12 +124,7 @@ async def update_single_group(
118
124
  db: AsyncSession = Depends(get_async_db),
119
125
  ) -> UserGroupRead:
120
126
 
121
- group = await db.get(UserGroup, group_id)
122
- if group is None:
123
- raise HTTPException(
124
- status_code=status.HTTP_404_NOT_FOUND,
125
- detail=f"UserGroup {group_id} not found.",
126
- )
127
+ group = await _usergroup_or_404(group_id, db)
127
128
 
128
129
  # Check that all required users exist
129
130
  # Note: The reason for introducing `col` is as in
@@ -167,25 +168,74 @@ async def update_single_group(
167
168
  db.add(group)
168
169
  await db.commit()
169
170
 
170
- updated_group = await _get_single_group_with_user_ids(
171
+ updated_group = await _get_single_usergroup_with_user_ids(
171
172
  group_id=group_id, db=db
172
173
  )
173
174
 
174
175
  return updated_group
175
176
 
176
177
 
177
- @router_group.delete(
178
- "/group/{group_id}/", status_code=status.HTTP_405_METHOD_NOT_ALLOWED
179
- )
178
+ @router_group.delete("/group/{group_id}/", status_code=204)
180
179
  async def delete_single_group(
181
180
  group_id: int,
182
181
  user: UserOAuth = Depends(current_active_superuser),
183
182
  db: AsyncSession = Depends(get_async_db),
184
- ) -> UserGroupRead:
185
- raise HTTPException(
186
- status_code=status.HTTP_405_METHOD_NOT_ALLOWED,
187
- detail=(
188
- "Deleting a user group is not allowed, as it may restrict "
189
- "previously-granted access.",
190
- ),
183
+ ) -> Response:
184
+
185
+ group = await _usergroup_or_404(group_id, db)
186
+
187
+ if group.name == FRACTAL_DEFAULT_GROUP_NAME:
188
+ raise HTTPException(
189
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
190
+ detail=(
191
+ "Cannot delete default UserGroup "
192
+ f"'{FRACTAL_DEFAULT_GROUP_NAME}'."
193
+ ),
194
+ )
195
+
196
+ # Cascade operations
197
+
198
+ res = await db.execute(
199
+ select(LinkUserGroup).where(LinkUserGroup.group_id == group_id)
191
200
  )
201
+ for link in res.scalars().all():
202
+ await db.delete(link)
203
+
204
+ res = await db.execute(
205
+ select(TaskGroupV2).where(TaskGroupV2.user_group_id == group_id)
206
+ )
207
+ for task_group in res.scalars().all():
208
+ task_group.user_group_id = None
209
+ db.add(task_group)
210
+
211
+ # Delete
212
+
213
+ await db.delete(group)
214
+ await db.commit()
215
+
216
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
217
+
218
+
219
+ @router_group.patch("/group/{group_id}/user-settings/", status_code=200)
220
+ async def patch_user_settings_bulk(
221
+ group_id: int,
222
+ settings_update: UserSettingsUpdate,
223
+ superuser: UserOAuth = Depends(current_active_superuser),
224
+ db: AsyncSession = Depends(get_async_db),
225
+ ):
226
+ await _usergroup_or_404(group_id, db)
227
+ res = await db.execute(
228
+ select(UserSettings)
229
+ .join(UserOAuth)
230
+ .where(LinkUserGroup.user_id == UserOAuth.id)
231
+ .where(LinkUserGroup.group_id == group_id)
232
+ )
233
+ settings_list = res.scalars().all()
234
+ update = settings_update.dict(exclude_unset=True)
235
+ for settings in settings_list:
236
+ for k, v in update.items():
237
+ setattr(settings, k, v)
238
+ db.add(settings)
239
+ await db.commit()
240
+
241
+ return Response(status_code=status.HTTP_200_OK)
@@ -2,7 +2,6 @@ from fastapi import APIRouter
2
2
 
3
3
  from .current_user import router_current_user
4
4
  from .group import router_group
5
- from .group_names import router_group_names
6
5
  from .login import router_login
7
6
  from .oauth import router_oauth
8
7
  from .register import router_register
@@ -13,7 +12,6 @@ router_auth = APIRouter()
13
12
  router_auth.include_router(router_register)
14
13
  router_auth.include_router(router_current_user)
15
14
  router_auth.include_router(router_login)
16
- router_auth.include_router(router_group_names)
17
15
  router_auth.include_router(router_users)
18
16
  router_auth.include_router(router_group)
19
17
  router_auth.include_router(router_oauth)
@@ -20,7 +20,7 @@ from ...schemas.user import UserRead
20
20
  from ...schemas.user import UserUpdate
21
21
  from ...schemas.user import UserUpdateWithNewGroupIds
22
22
  from ..aux.validate_user_settings import verify_user_has_settings
23
- from ._aux_auth import _get_single_user_with_group_ids
23
+ from ._aux_auth import _get_single_user_with_groups
24
24
  from fractal_server.app.models import LinkUserGroup
25
25
  from fractal_server.app.models import UserGroup
26
26
  from fractal_server.app.models import UserOAuth
@@ -41,13 +41,14 @@ logger = set_logger(__name__)
41
41
  @router_users.get("/users/{user_id}/", response_model=UserRead)
42
42
  async def get_user(
43
43
  user_id: int,
44
- group_ids: bool = True,
44
+ group_ids_names: bool = True,
45
45
  superuser: UserOAuth = Depends(current_active_superuser),
46
46
  db: AsyncSession = Depends(get_async_db),
47
47
  ) -> UserRead:
48
48
  user = await _user_or_404(user_id, db)
49
- if group_ids:
50
- user = await _get_single_user_with_group_ids(user, db)
49
+ if group_ids_names:
50
+ user_with_groups = await _get_single_user_with_groups(user, db)
51
+ return user_with_groups
51
52
  return user
52
53
 
53
54
 
@@ -163,12 +164,12 @@ async def patch_user(
163
164
  # Nothing to do, just continue
164
165
  patched_user = user_to_patch
165
166
 
166
- # Enrich user object with `group_ids` attribute
167
- patched_user_with_group_ids = await _get_single_user_with_group_ids(
167
+ # Enrich user object with `group_ids_names` attribute
168
+ patched_user_with_groups = await _get_single_user_with_groups(
168
169
  patched_user, db
169
170
  )
170
171
 
171
- return patched_user_with_group_ids
172
+ return patched_user_with_groups
172
173
 
173
174
 
174
175
  @router_users.get("/users/", response_model=list[UserRead])
@@ -861,7 +861,7 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
861
861
 
862
862
  # Transfer archive
863
863
  t_0_put = time.perf_counter()
864
- self.fractal_ssh.put(
864
+ self.fractal_ssh.send_file(
865
865
  local=tarfile_path_local,
866
866
  remote=tarfile_path_remote,
867
867
  )
@@ -1055,55 +1055,59 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1055
1055
  Arguments:
1056
1056
  jobid: ID of the SLURM job
1057
1057
  """
1058
-
1059
- # Loop over all job_ids, and fetch future and job objects
1060
- futures: list[Future] = []
1061
- jobs: list[SlurmJob] = []
1062
- with self.jobs_lock:
1063
- for job_id in job_ids:
1064
- future, job = self.jobs.pop(job_id)
1065
- futures.append(future)
1066
- jobs.append(job)
1067
- if not self.jobs:
1068
- self.jobs_empty_cond.notify_all()
1069
-
1070
- # Fetch subfolder from remote host
1058
+ # Handle all uncaught exceptions in this broad try/except block
1071
1059
  try:
1072
- self._get_subfolder_sftp(jobs=jobs)
1073
- except NoValidConnectionsError as e:
1074
- logger.error("NoValidConnectionError")
1075
- logger.error(f"{str(e)=}")
1076
- logger.error(f"{e.errors=}")
1077
- for err in e.errors:
1078
- logger.error(f"{str(err)}")
1079
-
1080
- raise e
1081
-
1082
- # First round of checking whether all output files exist
1083
- missing_out_paths = []
1084
- for job in jobs:
1085
- for ind_out_path, out_path in enumerate(
1086
- job.output_pickle_files_local
1087
- ):
1088
- if not out_path.exists():
1089
- missing_out_paths.append(out_path)
1090
- num_missing = len(missing_out_paths)
1091
- if num_missing > 0:
1092
- # Output pickle files may be missing e.g. because of some slow
1093
- # filesystem operation; wait some time before re-trying
1094
- settings = Inject(get_settings)
1095
- sleep_time = settings.FRACTAL_SLURM_ERROR_HANDLING_INTERVAL
1096
1060
  logger.info(
1097
- f"{num_missing} output pickle files are missing; "
1098
- f"sleep {sleep_time} seconds."
1061
+ f"[FractalSlurmSSHExecutor._completion] START, for {job_ids=}."
1099
1062
  )
1100
- for missing_file in missing_out_paths:
1101
- logger.debug(f"Missing output pickle file: {missing_file}")
1102
- time.sleep(sleep_time)
1103
1063
 
1104
- # Handle all jobs
1105
- for ind_job, job_id in enumerate(job_ids):
1064
+ # Loop over all job_ids, and fetch future and job objects
1065
+ futures: list[Future] = []
1066
+ jobs: list[SlurmJob] = []
1067
+ with self.jobs_lock:
1068
+ for job_id in job_ids:
1069
+ future, job = self.jobs.pop(job_id)
1070
+ futures.append(future)
1071
+ jobs.append(job)
1072
+ if not self.jobs:
1073
+ self.jobs_empty_cond.notify_all()
1074
+
1075
+ # Fetch subfolder from remote host
1106
1076
  try:
1077
+ self._get_subfolder_sftp(jobs=jobs)
1078
+ except NoValidConnectionsError as e:
1079
+ logger.error("NoValidConnectionError")
1080
+ logger.error(f"{str(e)=}")
1081
+ logger.error(f"{e.errors=}")
1082
+ for err in e.errors:
1083
+ logger.error(f"{str(err)}")
1084
+
1085
+ raise e
1086
+
1087
+ # First round of checking whether all output files exist
1088
+ missing_out_paths = []
1089
+ for job in jobs:
1090
+ for ind_out_path, out_path in enumerate(
1091
+ job.output_pickle_files_local
1092
+ ):
1093
+ if not out_path.exists():
1094
+ missing_out_paths.append(out_path)
1095
+ num_missing = len(missing_out_paths)
1096
+ if num_missing > 0:
1097
+ # Output pickle files may be missing e.g. because of some slow
1098
+ # filesystem operation; wait some time before re-trying
1099
+ settings = Inject(get_settings)
1100
+ sleep_time = settings.FRACTAL_SLURM_ERROR_HANDLING_INTERVAL
1101
+ logger.info(
1102
+ f"{num_missing} output pickle files are missing; "
1103
+ f"sleep {sleep_time} seconds."
1104
+ )
1105
+ for missing_file in missing_out_paths:
1106
+ logger.debug(f"Missing output pickle file: {missing_file}")
1107
+ time.sleep(sleep_time)
1108
+
1109
+ # Handle all jobs
1110
+ for ind_job, job_id in enumerate(job_ids):
1107
1111
  # Retrieve job and future objects
1108
1112
  job = jobs[ind_job]
1109
1113
  future = futures[ind_job]
@@ -1128,6 +1132,11 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1128
1132
  remaining_futures=remaining_futures,
1129
1133
  remaining_job_ids=remaining_job_ids,
1130
1134
  )
1135
+ logger.info(
1136
+ "[FractalSlurmSSHExecutor._completion] END, "
1137
+ f"for {job_ids=}, with JobExecutionError due "
1138
+ f"to missing {out_path.as_posix()}."
1139
+ )
1131
1140
  return
1132
1141
  except InvalidStateError:
1133
1142
  logger.warning(
@@ -1141,6 +1150,12 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1141
1150
  remaining_futures=remaining_futures,
1142
1151
  remaining_job_ids=remaining_job_ids,
1143
1152
  )
1153
+ logger.info(
1154
+ "[FractalSlurmSSHExecutor._completion] END, "
1155
+ f"for {job_ids=}, with JobExecutionError/"
1156
+ "InvalidStateError due to "
1157
+ f"missing {out_path.as_posix()}."
1158
+ )
1144
1159
  return
1145
1160
 
1146
1161
  # Read the task output
@@ -1217,16 +1232,22 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1217
1232
  else:
1218
1233
  future.set_result(outputs)
1219
1234
 
1220
- except Exception as e:
1235
+ except Exception as e:
1236
+ logger.warning(
1237
+ "[FractalSlurmSSHExecutor._completion] "
1238
+ f"An exception took place: {str(e)}."
1239
+ )
1240
+ for future in futures:
1221
1241
  try:
1242
+ logger.info(f"Set exception for {future=}")
1222
1243
  future.set_exception(e)
1223
- return
1224
1244
  except InvalidStateError:
1225
- logger.warning(
1226
- f"Future {future} (SLURM job ID: {job_id}) was already"
1227
- " cancelled, exit from"
1228
- " FractalSlurmSSHExecutor._completion."
1229
- )
1245
+ logger.info(f"Future {future} was already cancelled.")
1246
+ logger.info(
1247
+ f"[FractalSlurmSSHExecutor._completion] END, for {job_ids=}, "
1248
+ "from within exception handling."
1249
+ )
1250
+ return
1230
1251
 
1231
1252
  def _get_subfolder_sftp(self, jobs: list[SlurmJob]) -> None:
1232
1253
  """
@@ -1255,16 +1276,9 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1255
1276
  self.workflow_dir_remote / f"{subfolder_name}.tar.gz"
1256
1277
  ).as_posix()
1257
1278
 
1258
- # Remove local tarfile - FIXME SSH: is this needed?
1259
- logger.warning(f"In principle I just removed {tarfile_path_local}")
1260
- logger.warning(f"{Path(tarfile_path_local).exists()=}")
1261
-
1262
- # Remove remote tarfile - FIXME SSH: is this needed?
1263
- # rm_command = f"rm {tarfile_path_remote}"
1264
- # _run_command_over_ssh(cmd=rm_command, fractal_ssh=self.fractal_ssh)
1265
- logger.warning(f"Unlink {tarfile_path_remote=} - START")
1266
- self.fractal_ssh.sftp().unlink(tarfile_path_remote)
1267
- logger.warning(f"Unlink {tarfile_path_remote=} - STOP")
1279
+ # Remove remote tarfile
1280
+ rm_command = f"rm {tarfile_path_remote}"
1281
+ self.fractal_ssh.run_command(cmd=rm_command)
1268
1282
 
1269
1283
  # Create remote tarfile
1270
1284
  tar_command = (
@@ -1278,7 +1292,7 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1278
1292
 
1279
1293
  # Fetch tarfile
1280
1294
  t_0_get = time.perf_counter()
1281
- self.fractal_ssh.get(
1295
+ self.fractal_ssh.fetch_file(
1282
1296
  remote=tarfile_path_remote,
1283
1297
  local=tarfile_path_local,
1284
1298
  )
@@ -1291,6 +1305,11 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1291
1305
  # Extract tarfile locally
1292
1306
  extract_archive(Path(tarfile_path_local))
1293
1307
 
1308
+ # Remove local tarfile
1309
+ if Path(tarfile_path_local).exists():
1310
+ logger.warning(f"Remove existing file {tarfile_path_local}.")
1311
+ Path(tarfile_path_local).unlink()
1312
+
1294
1313
  t_1 = time.perf_counter()
1295
1314
  logger.info("[_get_subfolder_sftp] End - " f"elapsed: {t_1-t_0:.3f} s")
1296
1315
 
@@ -177,11 +177,13 @@ async def submit_workflow(
177
177
  return
178
178
 
179
179
  try:
180
-
181
180
  # Create WORKFLOW_DIR_LOCAL
182
- original_umask = os.umask(0)
183
- WORKFLOW_DIR_LOCAL.mkdir(parents=True, mode=0o755)
184
- os.umask(original_umask)
181
+ if FRACTAL_RUNNER_BACKEND == "slurm":
182
+ original_umask = os.umask(0)
183
+ WORKFLOW_DIR_LOCAL.mkdir(parents=True, mode=0o755)
184
+ os.umask(original_umask)
185
+ else:
186
+ WORKFLOW_DIR_LOCAL.mkdir(parents=True)
185
187
 
186
188
  # Define and create WORKFLOW_DIR_REMOTE
187
189
  if FRACTAL_RUNNER_BACKEND == "local":
@@ -214,15 +216,19 @@ async def submit_workflow(
214
216
  order=order,
215
217
  task_name=task_name,
216
218
  )
217
- original_umask = os.umask(0)
218
- (WORKFLOW_DIR_LOCAL / subfolder_name).mkdir(mode=0o755)
219
- os.umask(original_umask)
220
219
  if FRACTAL_RUNNER_BACKEND == "slurm":
220
+ # Create local subfolder (with 755) and remote one
221
+ # (via `sudo -u`)
222
+ original_umask = os.umask(0)
223
+ (WORKFLOW_DIR_LOCAL / subfolder_name).mkdir(mode=0o755)
224
+ os.umask(original_umask)
221
225
  _mkdir_as_user(
222
226
  folder=str(WORKFLOW_DIR_REMOTE / subfolder_name),
223
227
  user=slurm_user,
224
228
  )
225
229
  else:
230
+ # Create local subfolder (with standard permission set)
231
+ (WORKFLOW_DIR_LOCAL / subfolder_name).mkdir()
226
232
  logger.info("Skip remote-subfolder creation")
227
233
  except Exception as e:
228
234
  error_type = type(e).__name__
@@ -1,6 +1,7 @@
1
1
  from typing import Any
2
2
 
3
3
  from pydantic import BaseModel
4
+ from pydantic import Extra
4
5
  from pydantic import Field
5
6
  from pydantic import validator
6
7
 
@@ -9,9 +10,7 @@ from fractal_server.images import Filters
9
10
  from fractal_server.urls import normalize_url
10
11
 
11
12
 
12
- class TaskOutput(BaseModel):
13
- class Config:
14
- extra = "forbid"
13
+ class TaskOutput(BaseModel, extra=Extra.forbid):
15
14
 
16
15
  image_list_updates: list[SingleImageTaskOutput] = Field(
17
16
  default_factory=list
@@ -43,9 +42,7 @@ class TaskOutput(BaseModel):
43
42
  return [normalize_url(zarr_url) for zarr_url in v]
44
43
 
45
44
 
46
- class InitArgsModel(BaseModel):
47
- class Config:
48
- extra = "forbid"
45
+ class InitArgsModel(BaseModel, extra=Extra.forbid):
49
46
 
50
47
  zarr_url: str
51
48
  init_args: dict[str, Any] = Field(default_factory=dict)
@@ -55,8 +52,6 @@ class InitArgsModel(BaseModel):
55
52
  return normalize_url(v)
56
53
 
57
54
 
58
- class InitTaskOutput(BaseModel):
59
- class Config:
60
- extra = "forbid"
55
+ class InitTaskOutput(BaseModel, extra=Extra.forbid):
61
56
 
62
57
  parallelization_list: list[InitArgsModel] = Field(default_factory=list)
@@ -41,8 +41,7 @@ class UserRead(schemas.BaseUser[int]):
41
41
  """
42
42
 
43
43
  username: Optional[str]
44
- group_names: Optional[list[str]] = None
45
- group_ids: Optional[list[int]] = None
44
+ group_ids_names: Optional[list[tuple[int, str]]] = None
46
45
  oauth_accounts: list[OAuthAccountRead]
47
46
 
48
47
 
@@ -20,20 +20,27 @@ from .project import ProjectUpdateV2 # noqa F401
20
20
  from .task import TaskCreateV2 # noqa F401
21
21
  from .task import TaskExportV2 # noqa F401
22
22
  from .task import TaskImportV2 # noqa F401
23
+ from .task import TaskImportV2Legacy # noqa F401
23
24
  from .task import TaskReadV2 # noqa F401
24
25
  from .task import TaskUpdateV2 # noqa F401
25
26
  from .task_collection import CollectionStateReadV2 # noqa F401
26
27
  from .task_collection import CollectionStatusV2 # noqa F401
27
28
  from .task_collection import TaskCollectCustomV2 # noqa F401
28
29
  from .task_collection import TaskCollectPipV2 # noqa F401
30
+ from .task_group import TaskGroupCreateV2 # noqa F401
31
+ from .task_group import TaskGroupReadV2 # noqa F401
32
+ from .task_group import TaskGroupUpdateV2 # noqa F401
33
+ from .task_group import TaskGroupV2OriginEnum # noqa F401
29
34
  from .workflow import WorkflowCreateV2 # noqa F401
30
35
  from .workflow import WorkflowExportV2 # noqa F401
31
36
  from .workflow import WorkflowImportV2 # noqa F401
32
37
  from .workflow import WorkflowReadV2 # noqa F401
38
+ from .workflow import WorkflowReadV2WithWarnings # noqa F401
33
39
  from .workflow import WorkflowUpdateV2 # noqa F401
34
40
  from .workflowtask import WorkflowTaskCreateV2 # noqa F401
35
41
  from .workflowtask import WorkflowTaskExportV2 # noqa F401
36
42
  from .workflowtask import WorkflowTaskImportV2 # noqa F401
37
43
  from .workflowtask import WorkflowTaskReadV2 # noqa F401
44
+ from .workflowtask import WorkflowTaskReadV2WithWarning # noqa F401
38
45
  from .workflowtask import WorkflowTaskStatusTypeV2 # noqa F401
39
46
  from .workflowtask import WorkflowTaskUpdateV2 # noqa F401
@@ -66,9 +66,7 @@ class DatasetReadV2(BaseModel):
66
66
  )
67
67
 
68
68
 
69
- class DatasetUpdateV2(BaseModel):
70
- class Config:
71
- extra = "forbid"
69
+ class DatasetUpdateV2(BaseModel, extra=Extra.forbid):
72
70
 
73
71
  name: Optional[str]
74
72
  zarr_dir: Optional[str]
@@ -84,7 +82,7 @@ class DatasetUpdateV2(BaseModel):
84
82
  _name = validator("name", allow_reuse=True)(valstr("name"))
85
83
 
86
84
 
87
- class DatasetImportV2(BaseModel):
85
+ class DatasetImportV2(BaseModel, extra=Extra.forbid):
88
86
  """
89
87
  Class for `Dataset` import.
90
88
 
@@ -95,9 +93,6 @@ class DatasetImportV2(BaseModel):
95
93
  filters:
96
94
  """
97
95
 
98
- class Config:
99
- extra = "forbid"
100
-
101
96
  name: str
102
97
  zarr_dir: str
103
98
  images: list[SingleImage] = Field(default_factory=[])
@@ -30,8 +30,7 @@ class TaskDumpV2(BaseModel):
30
30
 
31
31
  command_non_parallel: Optional[str]
32
32
  command_parallel: Optional[str]
33
- source: str
34
- owner: Optional[str]
33
+ source: Optional[str] = None
35
34
  version: Optional[str]
36
35
 
37
36
  input_types: dict[str, bool]
@@ -109,6 +109,6 @@ class JobReadV2(BaseModel):
109
109
  )
110
110
 
111
111
 
112
- class JobUpdateV2(BaseModel):
112
+ class JobUpdateV2(BaseModel, extra=Extra.forbid):
113
113
 
114
114
  status: JobStatusTypeV2
@@ -7,6 +7,8 @@ from pydantic import HttpUrl
7
7
  from pydantic import root_validator
8
8
  from pydantic import validator
9
9
 
10
+ from .._validators import valstr
11
+
10
12
 
11
13
  class TaskManifestV2(BaseModel):
12
14
  """
@@ -50,6 +52,10 @@ class TaskManifestV2(BaseModel):
50
52
  docs_info: Optional[str] = None
51
53
  docs_link: Optional[HttpUrl] = None
52
54
 
55
+ category: Optional[str] = None
56
+ modality: Optional[str] = None
57
+ tags: list[str] = Field(default_factory=list)
58
+
53
59
  @root_validator
54
60
  def validate_executable_args_meta(cls, values):
55
61
 
@@ -128,7 +134,8 @@ class ManifestV2(BaseModel):
128
134
  manifest_version: str
129
135
  task_list: list[TaskManifestV2]
130
136
  has_args_schemas: bool = False
131
- args_schema_version: Optional[str]
137
+ args_schema_version: Optional[str] = None
138
+ authors: Optional[str] = None
132
139
 
133
140
  @root_validator()
134
141
  def _check_args_schemas_are_present(cls, values):
@@ -152,8 +159,25 @@ class ManifestV2(BaseModel):
152
159
  )
153
160
  return values
154
161
 
162
+ @root_validator()
163
+ def _unique_task_names(cls, values):
164
+ task_list = values["task_list"]
165
+ task_list_names = [t.name for t in task_list]
166
+ if len(set(task_list_names)) != len(task_list_names):
167
+ raise ValueError(
168
+ (
169
+ "Task names in manifest must be unique.\n",
170
+ f"Given: {task_list_names}.",
171
+ )
172
+ )
173
+ return values
174
+
155
175
  @validator("manifest_version")
156
176
  def manifest_version_2(cls, value):
157
177
  if value != "2":
158
178
  raise ValueError(f"Wrong manifest version (given {value})")
159
179
  return value
180
+
181
+ _authors = validator("authors", allow_reuse=True)(
182
+ valstr("authors", accept_none=True)
183
+ )
@@ -27,7 +27,7 @@ class ProjectReadV2(BaseModel):
27
27
  )
28
28
 
29
29
 
30
- class ProjectUpdateV2(BaseModel):
30
+ class ProjectUpdateV2(BaseModel, extra=Extra.forbid):
31
31
 
32
32
  name: Optional[str]
33
33
  # Validators