fractal-server 2.7.0a3__py3-none-any.whl → 2.7.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +3 -9
  3. fractal_server/app/models/v2/collection_state.py +1 -0
  4. fractal_server/app/models/v2/task.py +27 -3
  5. fractal_server/app/routes/admin/v2/task.py +4 -17
  6. fractal_server/app/routes/admin/v2/task_group.py +21 -0
  7. fractal_server/app/routes/api/v1/task_collection.py +4 -4
  8. fractal_server/app/routes/api/v2/_aux_functions.py +1 -7
  9. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +75 -2
  10. fractal_server/app/routes/api/v2/task.py +16 -42
  11. fractal_server/app/routes/api/v2/task_collection.py +175 -204
  12. fractal_server/app/routes/api/v2/task_collection_custom.py +31 -58
  13. fractal_server/app/routes/api/v2/task_group.py +29 -1
  14. fractal_server/app/routes/api/v2/workflow.py +11 -46
  15. fractal_server/app/routes/api/v2/workflowtask.py +0 -1
  16. fractal_server/app/routes/auth/_aux_auth.py +15 -12
  17. fractal_server/app/routes/auth/group.py +46 -23
  18. fractal_server/app/runner/v2/task_interface.py +4 -9
  19. fractal_server/app/schemas/v2/dataset.py +2 -7
  20. fractal_server/app/schemas/v2/dumps.py +1 -2
  21. fractal_server/app/schemas/v2/job.py +1 -1
  22. fractal_server/app/schemas/v2/project.py +1 -1
  23. fractal_server/app/schemas/v2/task.py +5 -10
  24. fractal_server/app/schemas/v2/task_collection.py +8 -6
  25. fractal_server/app/schemas/v2/task_group.py +31 -3
  26. fractal_server/app/schemas/v2/workflow.py +2 -2
  27. fractal_server/app/schemas/v2/workflowtask.py +2 -5
  28. fractal_server/data_migrations/2_7_0.py +1 -11
  29. fractal_server/images/models.py +2 -4
  30. fractal_server/main.py +1 -1
  31. fractal_server/migrations/versions/034a469ec2eb_task_groups.py +184 -0
  32. fractal_server/string_tools.py +6 -2
  33. fractal_server/tasks/utils.py +19 -5
  34. fractal_server/tasks/v1/_TaskCollectPip.py +1 -1
  35. fractal_server/tasks/v1/background_operations.py +5 -5
  36. fractal_server/tasks/v1/get_collection_data.py +2 -2
  37. fractal_server/tasks/v2/_venv_pip.py +62 -70
  38. fractal_server/tasks/v2/background_operations.py +170 -51
  39. fractal_server/tasks/v2/background_operations_ssh.py +35 -77
  40. fractal_server/tasks/v2/database_operations.py +7 -17
  41. fractal_server/tasks/v2/endpoint_operations.py +91 -145
  42. fractal_server/tasks/v2/templates/_1_create_venv.sh +9 -5
  43. fractal_server/tasks/v2/utils.py +5 -0
  44. fractal_server/utils.py +3 -2
  45. {fractal_server-2.7.0a3.dist-info → fractal_server-2.7.0a5.dist-info}/METADATA +1 -1
  46. {fractal_server-2.7.0a3.dist-info → fractal_server-2.7.0a5.dist-info}/RECORD +49 -52
  47. fractal_server/migrations/versions/742b74e1cc6e_revamp_taskv2_and_taskgroupv2.py +0 -101
  48. fractal_server/migrations/versions/7cf1baae8fb4_task_group_v2.py +0 -66
  49. fractal_server/migrations/versions/df7cc3501bf7_linkusergroup_timestamp_created.py +0 -42
  50. fractal_server/tasks/v2/_TaskCollectPip.py +0 -132
  51. {fractal_server-2.7.0a3.dist-info → fractal_server-2.7.0a5.dist-info}/LICENSE +0 -0
  52. {fractal_server-2.7.0a3.dist-info → fractal_server-2.7.0a5.dist-info}/WHEEL +0 -0
  53. {fractal_server-2.7.0a3.dist-info → fractal_server-2.7.0a5.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,4 @@
1
- import json
2
1
  from pathlib import Path
3
- from shutil import copy as shell_copy
4
- from tempfile import TemporaryDirectory
5
2
  from typing import Optional
6
3
 
7
4
  from fastapi import APIRouter
@@ -11,7 +8,7 @@ from fastapi import HTTPException
11
8
  from fastapi import Request
12
9
  from fastapi import Response
13
10
  from fastapi import status
14
- from pydantic.error_wrappers import ValidationError
11
+ from pydantic import ValidationError
15
12
  from sqlmodel import select
16
13
 
17
14
  from .....config import get_settings
@@ -21,27 +18,27 @@ from .....syringe import Inject
21
18
  from ....db import AsyncSession
22
19
  from ....db import get_async_db
23
20
  from ....models.v2 import CollectionStateV2
24
- from ....models.v2 import TaskV2
21
+ from ....models.v2 import TaskGroupV2
25
22
  from ....schemas.v2 import CollectionStateReadV2
26
23
  from ....schemas.v2 import CollectionStatusV2
27
24
  from ....schemas.v2 import TaskCollectPipV2
28
- from ....schemas.v2 import TaskReadV2
25
+ from ....schemas.v2 import TaskGroupCreateV2
29
26
  from ...aux.validate_user_settings import validate_user_settings
30
27
  from ._aux_functions_tasks import _get_valid_user_group_id
28
+ from ._aux_functions_tasks import _verify_non_duplication_group_constraint
29
+ from ._aux_functions_tasks import _verify_non_duplication_user_constraint
31
30
  from fractal_server.app.models import UserOAuth
32
31
  from fractal_server.app.routes.auth import current_active_user
33
32
  from fractal_server.app.routes.auth import current_active_verified_user
34
- from fractal_server.string_tools import slugify_task_name_for_source
35
- from fractal_server.tasks.utils import get_absolute_venv_path
36
- from fractal_server.tasks.utils import get_collection_log
37
- from fractal_server.tasks.utils import get_collection_path
38
- from fractal_server.tasks.v2._TaskCollectPip import _TaskCollectPip
33
+ from fractal_server.tasks.utils import _normalize_package_name
34
+ from fractal_server.tasks.utils import get_collection_log_v2
39
35
  from fractal_server.tasks.v2.background_operations import (
40
36
  background_collect_pip,
41
37
  )
42
- from fractal_server.tasks.v2.endpoint_operations import create_package_dir_pip
43
- from fractal_server.tasks.v2.endpoint_operations import download_package
44
- from fractal_server.tasks.v2.endpoint_operations import inspect_package
38
+ from fractal_server.tasks.v2.endpoint_operations import (
39
+ get_package_version_from_pypi,
40
+ )
41
+ from fractal_server.tasks.v2.utils import _parse_wheel_filename
45
42
  from fractal_server.tasks.v2.utils import get_python_interpreter_v2
46
43
 
47
44
  router = APIRouter()
@@ -52,19 +49,6 @@ logger = set_logger(__name__)
52
49
  @router.post(
53
50
  "/collect/pip/",
54
51
  response_model=CollectionStateReadV2,
55
- responses={
56
- 201: dict(
57
- description=(
58
- "Task collection successfully started in the background"
59
- )
60
- ),
61
- 200: dict(
62
- description=(
63
- "Package already collected. Returning info on already "
64
- "available tasks"
65
- )
66
- ),
67
- },
68
52
  )
69
53
  async def collect_tasks_pip(
70
54
  task_collect: TaskCollectPipV2,
@@ -86,35 +70,67 @@ async def collect_tasks_pip(
86
70
  # Get settings
87
71
  settings = Inject(get_settings)
88
72
 
73
+ # Initialize task-group attributes
74
+ task_group_attrs = dict(user_id=user.id)
75
+
89
76
  # Set/check python version
90
77
  if task_collect.python_version is None:
91
- task_collect.python_version = (
92
- settings.FRACTAL_TASKS_PYTHON_DEFAULT_VERSION
93
- )
78
+ task_group_attrs[
79
+ "python_version"
80
+ ] = settings.FRACTAL_TASKS_PYTHON_DEFAULT_VERSION
81
+ else:
82
+ task_group_attrs["python_version"] = task_collect.python_version
94
83
  try:
95
- get_python_interpreter_v2(python_version=task_collect.python_version)
84
+ get_python_interpreter_v2(
85
+ python_version=task_group_attrs["python_version"]
86
+ )
96
87
  except ValueError:
97
88
  raise HTTPException(
98
89
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
99
90
  detail=(
100
- f"Python version {task_collect.python_version} is "
91
+ f"Python version {task_group_attrs['python_version']} is "
101
92
  "not available for Fractal task collection."
102
93
  ),
103
94
  )
104
95
 
105
- # Validate payload
106
- try:
107
- task_pkg = _TaskCollectPip(**task_collect.dict(exclude_unset=True))
108
- except ValidationError as e:
109
- raise HTTPException(
110
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
111
- detail=f"Invalid task-collection object. Original error: {e}",
112
- )
96
+ # Set pip_extras
97
+ if task_collect.package_extras is not None:
98
+ task_group_attrs["pip_extras"] = task_collect.package_extras
113
99
 
114
- # Validate user settings (backend-specific)
115
- user_settings = await validate_user_settings(
116
- user=user, backend=settings.FRACTAL_RUNNER_BACKEND, db=db
117
- )
100
+ # Set pinned_package_versions
101
+ if task_collect.pinned_package_versions is not None:
102
+ task_group_attrs[
103
+ "pinned_package_versions"
104
+ ] = task_collect.pinned_package_versions
105
+
106
+ # Set pkg_name, version, origin and wheel_path
107
+ if task_collect.package.endswith(".whl"):
108
+ try:
109
+ task_group_attrs["wheel_path"] = task_collect.package
110
+ wheel_filename = Path(task_group_attrs["wheel_path"]).name
111
+ wheel_info = _parse_wheel_filename(wheel_filename)
112
+ except ValueError as e:
113
+ raise HTTPException(
114
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
115
+ detail=(
116
+ f"Invalid wheel-file name {wheel_filename}. "
117
+ f"Original error: {str(e)}",
118
+ ),
119
+ )
120
+ task_group_attrs["pkg_name"] = _normalize_package_name(
121
+ wheel_info["distribution"]
122
+ )
123
+ task_group_attrs["version"] = wheel_info["version"]
124
+ task_group_attrs["origin"] = "wheel-file"
125
+ else:
126
+ pkg_name = task_collect.package
127
+ task_group_attrs["pkg_name"] = _normalize_package_name(pkg_name)
128
+ task_group_attrs["origin"] = "pypi"
129
+ latest_version = await get_package_version_from_pypi(
130
+ task_collect.package,
131
+ task_collect.package_version,
132
+ )
133
+ task_group_attrs["version"] = latest_version
118
134
 
119
135
  # Validate query parameters related to user-group ownership
120
136
  user_group_id = await _get_valid_user_group_id(
@@ -124,183 +140,142 @@ async def collect_tasks_pip(
124
140
  db=db,
125
141
  )
126
142
 
127
- # END of SSH/non-SSH common part
143
+ # Set user_group_id
144
+ task_group_attrs["user_group_id"] = user_group_id
128
145
 
129
- if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
146
+ # Validate user settings (backend-specific)
147
+ user_settings = await validate_user_settings(
148
+ user=user, backend=settings.FRACTAL_RUNNER_BACKEND, db=db
149
+ )
130
150
 
131
- from fractal_server.tasks.v2.background_operations_ssh import (
132
- background_collect_pip_ssh,
151
+ # Set path and venv_path
152
+ if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
153
+ base_tasks_path = user_settings.ssh_tasks_dir
154
+ else:
155
+ base_tasks_path = settings.FRACTAL_TASKS_DIR.as_posix()
156
+ task_group_path = (
157
+ Path(base_tasks_path)
158
+ / str(user.id)
159
+ / task_group_attrs["pkg_name"]
160
+ / task_group_attrs["version"]
161
+ ).as_posix()
162
+ task_group_attrs["path"] = task_group_path
163
+ task_group_attrs["venv_path"] = Path(task_group_path, "venv").as_posix()
164
+
165
+ # Validate TaskGroupV2 attributes
166
+ try:
167
+ TaskGroupCreateV2(**task_group_attrs)
168
+ except ValidationError as e:
169
+ raise HTTPException(
170
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
171
+ detail=f"Invalid task-group object. Original error: {e}",
133
172
  )
134
173
 
135
- # Construct and return state
136
- state = CollectionStateV2(
137
- data=dict(
138
- status=CollectionStatusV2.PENDING, package=task_collect.package
139
- )
140
- )
141
- db.add(state)
142
- await db.commit()
174
+ # Database checks
143
175
 
144
- # User appropriate FractalSSH object
145
- ssh_credentials = dict(
146
- user=user_settings.ssh_username,
147
- host=user_settings.ssh_host,
148
- key_path=user_settings.ssh_private_key_path,
149
- )
150
- fractal_ssh_list = request.app.state.fractal_ssh_list
151
- fractal_ssh = fractal_ssh_list.get(**ssh_credentials)
176
+ # Verify non-duplication constraints
177
+ await _verify_non_duplication_user_constraint(
178
+ user_id=user.id,
179
+ pkg_name=task_group_attrs["pkg_name"],
180
+ version=task_group_attrs["version"],
181
+ db=db,
182
+ )
183
+ await _verify_non_duplication_group_constraint(
184
+ user_group_id=task_group_attrs["user_group_id"],
185
+ pkg_name=task_group_attrs["pkg_name"],
186
+ version=task_group_attrs["version"],
187
+ db=db,
188
+ )
152
189
 
153
- background_tasks.add_task(
154
- background_collect_pip_ssh,
155
- state_id=state.id,
156
- task_pkg=task_pkg,
157
- fractal_ssh=fractal_ssh,
158
- tasks_base_dir=user_settings.ssh_tasks_dir,
159
- user_id=user.id,
160
- user_group_id=user_group_id,
190
+ # Verify that task-group path is unique
191
+ stm = select(TaskGroupV2).where(TaskGroupV2.path == task_group_path)
192
+ res = await db.execute(stm)
193
+ for conflicting_task_group in res.scalars().all():
194
+ raise HTTPException(
195
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
196
+ detail=(
197
+ f"Another task-group already has path={task_group_path}.\n"
198
+ f"{conflicting_task_group=}"
199
+ ),
161
200
  )
162
201
 
163
- response.status_code = status.HTTP_201_CREATED
164
- return state
202
+ # On-disk checks
165
203
 
166
- # Actual non-SSH endpoint
167
-
168
- logger = set_logger(logger_name="collect_tasks_pip")
204
+ if settings.FRACTAL_RUNNER_BACKEND != "slurm_ssh":
169
205
 
170
- with TemporaryDirectory() as tmpdir:
171
- try:
172
- # Copy or download the package wheel file to tmpdir
173
- if task_pkg.is_local_package:
174
- shell_copy(task_pkg.package_path.as_posix(), tmpdir)
175
- wheel_path = Path(tmpdir) / task_pkg.package_path.name
176
- else:
177
- logger.info(f"Now download {task_pkg}")
178
- wheel_path = await download_package(
179
- task_pkg=task_pkg, dest=tmpdir
180
- )
181
- # Read package info from wheel file, and override the ones coming
182
- # from the request body. Note that `package_name` was already set
183
- # (and normalized) as part of `_TaskCollectPip` initialization.
184
- pkg_info = inspect_package(wheel_path)
185
- task_pkg.package_version = pkg_info["pkg_version"]
186
- task_pkg.package_manifest = pkg_info["pkg_manifest"]
187
- except Exception as e:
206
+ # Verify that folder does not exist (for local collection)
207
+ if Path(task_group_path).exists():
188
208
  raise HTTPException(
189
209
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
190
- detail=f"Invalid package or manifest. Original error: {e}",
210
+ detail=f"{task_group_path} already exists.",
191
211
  )
192
212
 
193
- try:
194
- venv_path = create_package_dir_pip(task_pkg=task_pkg)
195
- except FileExistsError:
196
- venv_path = create_package_dir_pip(task_pkg=task_pkg, create=False)
197
- try:
198
- package_path = get_absolute_venv_path(venv_path)
199
- collection_path = get_collection_path(package_path)
200
- with collection_path.open("r") as f:
201
- task_collect_data = json.load(f)
202
-
203
- err_msg = (
204
- "Cannot collect package, possible reason: an old version of "
205
- "the same package has already been collected.\n"
206
- f"{str(collection_path)} has invalid content: "
207
- )
208
- if not isinstance(task_collect_data, dict):
213
+ # Verify that wheel file exists
214
+ wheel_path = task_group_attrs.get("wheel_path", None)
215
+ if wheel_path is not None:
216
+ if not Path(wheel_path).exists():
209
217
  raise HTTPException(
210
218
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
211
- detail=f"{err_msg} it's not a Python dictionary.",
212
- )
213
- if "task_list" not in task_collect_data.keys():
214
- raise HTTPException(
215
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
216
- detail=f"{err_msg} it has no key 'task_list'.",
217
- )
218
- if not isinstance(task_collect_data["task_list"], list):
219
- raise HTTPException(
220
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
221
- detail=f"{err_msg} 'task_list' is not a Python list.",
219
+ detail=f"No such file: {wheel_path}.",
222
220
  )
223
221
 
224
- for task_dict in task_collect_data["task_list"]:
225
-
226
- task = TaskReadV2(**task_dict)
227
- db_task = await db.get(TaskV2, task.id)
228
- if (
229
- (not db_task)
230
- or db_task.source != task.source
231
- or db_task.name != task.name
232
- ):
233
- await db.close()
234
- raise HTTPException(
235
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
236
- detail=(
237
- "Cannot collect package. Folder already exists, "
238
- f"but task {task.id} does not exists or it does "
239
- f"not have the expected source ({task.source}) or "
240
- f"name ({task.name})."
241
- ),
242
- )
243
- except FileNotFoundError as e:
244
- await db.close()
245
- raise HTTPException(
246
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
247
- detail=(
248
- "Cannot collect package. Possible reason: another "
249
- "collection of the same package is in progress. "
250
- f"Original FileNotFoundError: {e}"
251
- ),
252
- )
253
- except ValidationError as e:
254
- await db.close()
255
- raise HTTPException(
256
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
257
- detail=(
258
- "Cannot collect package. Possible reason: an old version "
259
- "of the same package has already been collected. "
260
- f"Original ValidationError: {e}"
261
- ),
262
- )
263
- task_collect_data["info"] = "Already installed"
264
- state = CollectionStateV2(data=task_collect_data)
265
- response.status_code == status.HTTP_200_OK
266
- await db.close()
267
- return state
268
- settings = Inject(get_settings)
269
-
270
- # Check that tasks are not already in the DB
271
- for new_task in task_pkg.package_manifest.task_list:
272
- new_task_name_slug = slugify_task_name_for_source(new_task.name)
273
- new_task_source = f"{task_pkg.package_source}:{new_task_name_slug}"
274
- stm = select(TaskV2).where(TaskV2.source == new_task_source)
275
- res = await db.execute(stm)
276
- if res.scalars().all():
277
- raise HTTPException(
278
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
279
- detail=(
280
- "Cannot collect package. Task with source "
281
- f'"{new_task_source}" already exists in the database.'
282
- ),
283
- )
222
+ # Create TaskGroupV2 object
223
+ task_group = TaskGroupV2(**task_group_attrs)
224
+ db.add(task_group)
225
+ await db.commit()
226
+ await db.refresh(task_group)
227
+ db.expunge(task_group)
284
228
 
285
229
  # All checks are OK, proceed with task collection
286
- collection_status = dict(
230
+ collection_state_data = dict(
287
231
  status=CollectionStatusV2.PENDING,
288
- venv_path=venv_path.relative_to(settings.FRACTAL_TASKS_DIR).as_posix(),
289
- package=task_pkg.package,
232
+ package=task_group.pkg_name,
233
+ version=task_group.version,
234
+ path=task_group.path,
235
+ venv_path=task_group.venv_path,
236
+ )
237
+ state = CollectionStateV2(
238
+ data=collection_state_data, taskgroupv2_id=task_group.id
290
239
  )
291
- state = CollectionStateV2(data=collection_status)
292
240
  db.add(state)
293
241
  await db.commit()
294
242
  await db.refresh(state)
295
243
 
296
- background_tasks.add_task(
297
- background_collect_pip,
298
- state_id=state.id,
299
- venv_path=venv_path,
300
- task_pkg=task_pkg,
301
- user_id=user.id,
302
- user_group_id=user_group_id,
303
- )
244
+ logger = set_logger(logger_name="collect_tasks_pip")
245
+
246
+ # END of SSH/non-SSH common part
247
+
248
+ if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
249
+ # SSH task collection
250
+
251
+ from fractal_server.tasks.v2.background_operations_ssh import (
252
+ background_collect_pip_ssh,
253
+ )
254
+
255
+ # User appropriate FractalSSH object
256
+ ssh_credentials = dict(
257
+ user=user_settings.ssh_username,
258
+ host=user_settings.ssh_host,
259
+ key_path=user_settings.ssh_private_key_path,
260
+ )
261
+ fractal_ssh_list = request.app.state.fractal_ssh_list
262
+ fractal_ssh = fractal_ssh_list.get(**ssh_credentials)
263
+
264
+ background_tasks.add_task(
265
+ background_collect_pip_ssh,
266
+ state_id=state.id,
267
+ task_group=task_group,
268
+ fractal_ssh=fractal_ssh,
269
+ tasks_base_dir=user_settings.ssh_tasks_dir,
270
+ )
271
+
272
+ else:
273
+ # Local task collection
274
+ background_tasks.add_task(
275
+ background_collect_pip,
276
+ state_id=state.id,
277
+ task_group=task_group,
278
+ )
304
279
  logger.debug(
305
280
  "Task-collection endpoint: start background collection "
306
281
  "and return state"
@@ -308,11 +283,10 @@ async def collect_tasks_pip(
308
283
  reset_logger_handlers(logger)
309
284
  info = (
310
285
  "Collecting tasks in the background. "
311
- f"GET /task/collect/{state.id} to query collection status"
286
+ f"GET /task/collect/{state.id}/ to query collection status"
312
287
  )
313
288
  state.data["info"] = info
314
289
  response.status_code = status.HTTP_201_CREATED
315
- await db.close()
316
290
 
317
291
  return state
318
292
 
@@ -345,20 +319,17 @@ async def check_collection_status(
345
319
  else:
346
320
  # Non-SSH mode
347
321
  # In some cases (i.e. a successful or ongoing task collection),
348
- # state.data.log is not set; if so, we collect the current logs.
322
+ # state.data["log"] is not set; if so, we collect the current logs.
349
323
  if verbose and not state.data.get("log"):
350
- if "venv_path" not in state.data.keys():
324
+ if "path" not in state.data.keys():
351
325
  await db.close()
352
326
  raise HTTPException(
353
327
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
354
328
  detail=(
355
- f"No 'venv_path' in CollectionStateV2[{state_id}].data"
329
+ f"No 'path' in CollectionStateV2[{state_id}].data"
356
330
  ),
357
331
  )
358
- state.data["log"] = get_collection_log(
359
- Path(state.data["venv_path"])
360
- )
361
- state.data["venv_path"] = str(state.data["venv_path"])
332
+ state.data["log"] = get_collection_log_v2(Path(state.data["path"]))
362
333
 
363
334
  reset_logger_handlers(logger)
364
335
  await db.close()
@@ -8,19 +8,16 @@ from fastapi import Depends
8
8
  from fastapi import HTTPException
9
9
  from fastapi import status
10
10
  from sqlalchemy.ext.asyncio import AsyncSession
11
- from sqlmodel import select
12
11
 
13
12
  from ._aux_functions_tasks import _get_valid_user_group_id
13
+ from ._aux_functions_tasks import _verify_non_duplication_group_constraint
14
+ from ._aux_functions_tasks import _verify_non_duplication_user_constraint
14
15
  from fractal_server.app.db import DBSyncSession
15
16
  from fractal_server.app.db import get_async_db
16
17
  from fractal_server.app.db import get_sync_db
17
18
  from fractal_server.app.models import UserOAuth
18
- from fractal_server.app.models.v1 import Task as TaskV1
19
- from fractal_server.app.models.v2 import TaskV2
19
+ from fractal_server.app.models.v2 import TaskGroupV2
20
20
  from fractal_server.app.routes.auth import current_active_verified_user
21
- from fractal_server.app.routes.aux.validate_user_settings import (
22
- verify_user_has_settings,
23
- )
24
21
  from fractal_server.app.schemas.v2 import TaskCollectCustomV2
25
22
  from fractal_server.app.schemas.v2 import TaskCreateV2
26
23
  from fractal_server.app.schemas.v2 import TaskGroupCreateV2
@@ -33,7 +30,7 @@ from fractal_server.tasks.v2.background_operations import (
33
30
  _prepare_tasks_metadata,
34
31
  )
35
32
  from fractal_server.tasks.v2.database_operations import (
36
- create_db_task_group_and_tasks,
33
+ create_db_tasks_and_update_task_group,
37
34
  )
38
35
 
39
36
  router = APIRouter()
@@ -134,75 +131,51 @@ async def collect_task_custom(
134
131
  else:
135
132
  package_root = Path(task_collect.package_root)
136
133
 
137
- # Set task.owner attribute
138
- if user.username:
139
- owner = user.username
140
- else:
141
- verify_user_has_settings(user)
142
- owner = user.settings.slurm_user
143
- if owner is None:
144
- raise HTTPException(
145
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
146
- detail=(
147
- "Cannot add a new task because current user does not "
148
- "have `username` or `slurm_user` attributes."
149
- ),
150
- )
151
- source = f"{owner}:{task_collect.source}"
152
-
153
134
  task_list: list[TaskCreateV2] = _prepare_tasks_metadata(
154
135
  package_manifest=task_collect.manifest,
155
- package_source=source,
156
136
  python_bin=Path(task_collect.python_interpreter),
157
137
  package_root=package_root,
158
138
  package_version=task_collect.version,
159
139
  )
160
- # Verify that source is not already in use (note: this check is only useful
161
- # to provide a user-friendly error message, but `task.source` uniqueness is
162
- # already guaranteed by a constraint in the table definition).
163
- sources = [task.source for task in task_list]
164
- stm = select(TaskV2).where(TaskV2.source.in_(sources))
165
- res = db_sync.execute(stm)
166
- overlapping_sources_v2 = res.scalars().all()
167
- if overlapping_sources_v2:
168
- overlapping_tasks_v2_source_and_id = [
169
- f"TaskV2 with ID {task.id} already has source='{task.source}'"
170
- for task in overlapping_sources_v2
171
- ]
172
- raise HTTPException(
173
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
174
- detail="\n".join(overlapping_tasks_v2_source_and_id),
175
- )
176
- stm = select(TaskV1).where(TaskV1.source.in_(sources))
177
- res = db_sync.execute(stm)
178
- overlapping_sources_v1 = res.scalars().all()
179
- if overlapping_sources_v1:
180
- overlapping_tasks_v1_source_and_id = [
181
- f"TaskV1 with ID {task.id} already has source='{task.source}'\n"
182
- for task in overlapping_sources_v1
183
- ]
184
- raise HTTPException(
185
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
186
- detail="\n".join(overlapping_tasks_v1_source_and_id),
187
- )
188
140
 
189
141
  # Prepare task-group attributes
190
142
  task_group_attrs = dict(
191
143
  origin="other",
192
- pkg_name=task_collect.source, # FIXME
144
+ pkg_name=task_collect.label,
145
+ user_id=user.id,
146
+ user_group_id=user_group_id,
193
147
  )
148
+ TaskGroupCreateV2(**task_group_attrs)
194
149
 
195
- task_group = create_db_task_group_and_tasks(
196
- task_list=task_list,
197
- task_group_obj=TaskGroupCreateV2(**task_group_attrs),
150
+ # Verify non-duplication constraints
151
+ await _verify_non_duplication_user_constraint(
198
152
  user_id=user.id,
199
- user_group_id=user_group_id,
153
+ pkg_name=task_group_attrs["pkg_name"],
154
+ version=None,
155
+ db=db,
156
+ )
157
+ await _verify_non_duplication_group_constraint(
158
+ user_group_id=task_group_attrs["user_group_id"],
159
+ pkg_name=task_group_attrs["pkg_name"],
160
+ version=None,
161
+ db=db,
162
+ )
163
+
164
+ task_group = TaskGroupV2(**task_group_attrs)
165
+ db.add(task_group)
166
+ await db.commit()
167
+ await db.refresh(task_group)
168
+ db.expunge(task_group)
169
+
170
+ task_group = create_db_tasks_and_update_task_group(
171
+ task_list=task_list,
172
+ task_group_id=task_group.id,
200
173
  db=db_sync,
201
174
  )
202
175
 
203
176
  logger.debug(
204
177
  f"Custom-environment task collection by user {user.email} completed, "
205
- f"for package with {source=}"
178
+ f"for package {task_collect}"
206
179
  )
207
180
 
208
181
  return task_group.task_list
@@ -8,10 +8,12 @@ from sqlmodel import select
8
8
 
9
9
  from ._aux_functions_tasks import _get_task_group_full_access
10
10
  from ._aux_functions_tasks import _get_task_group_read_access
11
+ from ._aux_functions_tasks import _verify_non_duplication_group_constraint
11
12
  from fractal_server.app.db import AsyncSession
12
13
  from fractal_server.app.db import get_async_db
13
14
  from fractal_server.app.models import LinkUserGroup
14
15
  from fractal_server.app.models import UserOAuth
16
+ from fractal_server.app.models.v2 import CollectionStateV2
15
17
  from fractal_server.app.models.v2 import TaskGroupV2
16
18
  from fractal_server.app.models.v2 import WorkflowTaskV2
17
19
  from fractal_server.app.routes.auth import current_active_user
@@ -103,6 +105,23 @@ async def delete_task_group(
103
105
  detail=f"TaskV2 {workflow_tasks[0].task_id} is still in use",
104
106
  )
105
107
 
108
+ # Cascade operations: set foreign-keys to null for CollectionStateV2 which
109
+ # are in relationship with the current TaskGroupV2
110
+ logger.debug("Start of cascade operations on CollectionStateV2.")
111
+ stm = select(CollectionStateV2).where(
112
+ CollectionStateV2.taskgroupv2_id == task_group_id
113
+ )
114
+ res = await db.execute(stm)
115
+ collection_states = res.scalars().all()
116
+ for collection_state in collection_states:
117
+ logger.debug(
118
+ f"Setting CollectionStateV2[{collection_state.id}].taskgroupv2_id "
119
+ "to None."
120
+ )
121
+ collection_state.taskgroupv2_id = None
122
+ db.add(collection_state)
123
+ logger.debug("End of cascade operations on CollectionStateV2.")
124
+
106
125
  await db.delete(task_group)
107
126
  await db.commit()
108
127
 
@@ -124,7 +143,16 @@ async def patch_task_group(
124
143
  user_id=user.id,
125
144
  db=db,
126
145
  )
127
-
146
+ if (
147
+ "user_group_id" in task_group_update.dict(exclude_unset=True)
148
+ and task_group_update.user_group_id != task_group.user_group_id
149
+ ):
150
+ await _verify_non_duplication_group_constraint(
151
+ db=db,
152
+ pkg_name=task_group.pkg_name,
153
+ version=task_group.version,
154
+ user_group_id=task_group_update.user_group_id,
155
+ )
128
156
  for key, value in task_group_update.dict(exclude_unset=True).items():
129
157
  if (key == "user_group_id") and (value is not None):
130
158
  await _verify_user_belongs_to_group(