fractal-server 2.7.0a5__py3-none-any.whl → 2.7.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +4 -0
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +72 -11
- fractal_server/app/routes/api/v2/workflow.py +14 -82
- fractal_server/app/routes/api/v2/workflow_import.py +357 -0
- fractal_server/app/routes/auth/group.py +27 -0
- fractal_server/app/runner/v2/__init__.py +13 -7
- fractal_server/app/schemas/v2/__init__.py +1 -0
- fractal_server/app/schemas/v2/manifest.py +13 -0
- fractal_server/app/schemas/v2/task.py +20 -5
- fractal_server/app/schemas/v2/workflowtask.py +3 -1
- fractal_server/data_migrations/2_7_0.py +62 -3
- {fractal_server-2.7.0a5.dist-info → fractal_server-2.7.0a7.dist-info}/METADATA +1 -1
- {fractal_server-2.7.0a5.dist-info → fractal_server-2.7.0a7.dist-info}/RECORD +17 -16
- {fractal_server-2.7.0a5.dist-info → fractal_server-2.7.0a7.dist-info}/LICENSE +0 -0
- {fractal_server-2.7.0a5.dist-info → fractal_server-2.7.0a7.dist-info}/WHEEL +0 -0
- {fractal_server-2.7.0a5.dist-info → fractal_server-2.7.0a7.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.7.
|
1
|
+
__VERSION__ = "2.7.0a7"
|
@@ -14,6 +14,7 @@ from .task_collection import router as task_collection_router_v2
|
|
14
14
|
from .task_collection_custom import router as task_collection_router_v2_custom
|
15
15
|
from .task_group import router as task_group_router_v2
|
16
16
|
from .workflow import router as workflow_router_v2
|
17
|
+
from .workflow_import import router as workflow_import_router_v2
|
17
18
|
from .workflowtask import router as workflowtask_router_v2
|
18
19
|
from fractal_server.config import get_settings
|
19
20
|
from fractal_server.syringe import Inject
|
@@ -42,5 +43,8 @@ router_api_v2.include_router(
|
|
42
43
|
task_group_router_v2, prefix="/task-group", tags=["V2 TaskGroup"]
|
43
44
|
)
|
44
45
|
router_api_v2.include_router(workflow_router_v2, tags=["V2 Workflow"])
|
46
|
+
router_api_v2.include_router(
|
47
|
+
workflow_import_router_v2, tags=["V2 Workflow Import"]
|
48
|
+
)
|
45
49
|
router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"])
|
46
50
|
router_api_v2.include_router(status_router_v2, tags=["V2 Status"])
|
@@ -9,13 +9,21 @@ from fastapi import HTTPException
|
|
9
9
|
from fastapi import status
|
10
10
|
from sqlmodel import select
|
11
11
|
|
12
|
-
from
|
13
|
-
from
|
14
|
-
from
|
15
|
-
from
|
16
|
-
from
|
17
|
-
from
|
18
|
-
from
|
12
|
+
from fractal_server.app.db import AsyncSession
|
13
|
+
from fractal_server.app.models import LinkUserGroup
|
14
|
+
from fractal_server.app.models import UserGroup
|
15
|
+
from fractal_server.app.models import UserOAuth
|
16
|
+
from fractal_server.app.models.v2 import CollectionStateV2
|
17
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
18
|
+
from fractal_server.app.models.v2 import TaskV2
|
19
|
+
from fractal_server.app.models.v2 import WorkflowTaskV2
|
20
|
+
from fractal_server.app.routes.auth._aux_auth import _get_default_usergroup_id
|
21
|
+
from fractal_server.app.routes.auth._aux_auth import (
|
22
|
+
_verify_user_belongs_to_group,
|
23
|
+
)
|
24
|
+
from fractal_server.logger import set_logger
|
25
|
+
|
26
|
+
logger = set_logger(__name__)
|
19
27
|
|
20
28
|
|
21
29
|
async def _get_task_group_or_404(
|
@@ -211,6 +219,33 @@ async def _get_valid_user_group_id(
|
|
211
219
|
return user_group_id
|
212
220
|
|
213
221
|
|
222
|
+
async def _get_collection_status_message(
|
223
|
+
task_group: TaskGroupV2, db: AsyncSession
|
224
|
+
) -> str:
|
225
|
+
res = await db.execute(
|
226
|
+
select(CollectionStateV2).where(
|
227
|
+
CollectionStateV2.taskgroupv2_id == task_group.id
|
228
|
+
)
|
229
|
+
)
|
230
|
+
states = res.scalars().all()
|
231
|
+
if len(states) > 1:
|
232
|
+
msg = (
|
233
|
+
"Expected one CollectionStateV2 associated to TaskGroup "
|
234
|
+
f"{task_group.id}, found {len(states)} "
|
235
|
+
f"(IDs: {[state.id for state in states]}).\n"
|
236
|
+
"Warning: this should have not happened, please contact an admin."
|
237
|
+
)
|
238
|
+
elif len(states) == 1:
|
239
|
+
msg = (
|
240
|
+
f"\nThere exists a task-collection state (ID={states[0].id}) for "
|
241
|
+
f"such task group (ID={task_group.id}), with status "
|
242
|
+
f"'{states[0].data.get('status')}'."
|
243
|
+
)
|
244
|
+
else:
|
245
|
+
msg = ""
|
246
|
+
return msg
|
247
|
+
|
248
|
+
|
214
249
|
async def _verify_non_duplication_user_constraint(
|
215
250
|
db: AsyncSession,
|
216
251
|
user_id: int,
|
@@ -226,11 +261,24 @@ async def _verify_non_duplication_user_constraint(
|
|
226
261
|
res = await db.execute(stm)
|
227
262
|
duplicate = res.scalars().all()
|
228
263
|
if duplicate:
|
264
|
+
user = await db.get(UserOAuth, user_id)
|
265
|
+
if len(duplicate) > 1:
|
266
|
+
raise HTTPException(
|
267
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
268
|
+
detail=(
|
269
|
+
"Invalid state:\n"
|
270
|
+
f"User '{user.email}' already owns {len(duplicate)} task "
|
271
|
+
f"groups with name='{pkg_name}' and {version=} "
|
272
|
+
f"(IDs: {[group.id for group in duplicate]}).\n"
|
273
|
+
"This should have not happened: please contact an admin."
|
274
|
+
),
|
275
|
+
)
|
276
|
+
state_msg = await _get_collection_status_message(duplicate[0], db)
|
229
277
|
raise HTTPException(
|
230
278
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
231
279
|
detail=(
|
232
|
-
"
|
233
|
-
f"
|
280
|
+
f"User '{user.email}' already owns a task group "
|
281
|
+
f"with name='{pkg_name}' and {version=}.{state_msg}"
|
234
282
|
),
|
235
283
|
)
|
236
284
|
|
@@ -253,11 +301,24 @@ async def _verify_non_duplication_group_constraint(
|
|
253
301
|
res = await db.execute(stm)
|
254
302
|
duplicate = res.scalars().all()
|
255
303
|
if duplicate:
|
304
|
+
user_group = await db.get(UserGroup, user_group_id)
|
305
|
+
if len(duplicate) > 1:
|
306
|
+
raise HTTPException(
|
307
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
308
|
+
detail=(
|
309
|
+
"Invalid state:\n"
|
310
|
+
f"UserGroup '{user_group.name}' already owns "
|
311
|
+
f"{len(duplicate)} task groups with name='{pkg_name}' and "
|
312
|
+
f"{version=} (IDs: {[group.id for group in duplicate]}).\n"
|
313
|
+
"This should have not happened: please contact an admin."
|
314
|
+
),
|
315
|
+
)
|
316
|
+
state_msg = await _get_collection_status_message(duplicate[0], db)
|
256
317
|
raise HTTPException(
|
257
318
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
258
319
|
detail=(
|
259
|
-
"
|
260
|
-
f"
|
320
|
+
f"UserGroup {user_group.name} already owns a task group "
|
321
|
+
f"with {pkg_name=} and {version=}.{state_msg}"
|
261
322
|
),
|
262
323
|
)
|
263
324
|
|
@@ -11,25 +11,21 @@ from ....db import AsyncSession
|
|
11
11
|
from ....db import get_async_db
|
12
12
|
from ....models.v2 import JobV2
|
13
13
|
from ....models.v2 import ProjectV2
|
14
|
-
from ....models.v2 import TaskV2
|
15
14
|
from ....models.v2 import WorkflowV2
|
16
15
|
from ....schemas.v2 import WorkflowCreateV2
|
17
16
|
from ....schemas.v2 import WorkflowExportV2
|
18
|
-
from ....schemas.v2 import WorkflowImportV2
|
19
17
|
from ....schemas.v2 import WorkflowReadV2
|
20
18
|
from ....schemas.v2 import WorkflowReadV2WithWarnings
|
21
|
-
from ....schemas.v2 import WorkflowTaskCreateV2
|
22
19
|
from ....schemas.v2 import WorkflowUpdateV2
|
23
20
|
from ._aux_functions import _check_workflow_exists
|
24
21
|
from ._aux_functions import _get_project_check_owner
|
25
22
|
from ._aux_functions import _get_submitted_jobs_statement
|
26
23
|
from ._aux_functions import _get_workflow_check_owner
|
27
|
-
from ._aux_functions import _workflow_insert_task
|
28
24
|
from ._aux_functions_tasks import _add_warnings_to_workflow_tasks
|
29
25
|
from fractal_server.app.models import UserOAuth
|
26
|
+
from fractal_server.app.models.v2.task import TaskGroupV2
|
30
27
|
from fractal_server.app.routes.auth import current_active_user
|
31
28
|
|
32
|
-
|
33
29
|
router = APIRouter()
|
34
30
|
|
35
31
|
|
@@ -256,85 +252,21 @@ async def export_worfklow(
|
|
256
252
|
user_id=user.id,
|
257
253
|
db=db,
|
258
254
|
)
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
async def import_workflow(
|
268
|
-
project_id: int,
|
269
|
-
workflow: WorkflowImportV2,
|
270
|
-
user: UserOAuth = Depends(current_active_user),
|
271
|
-
db: AsyncSession = Depends(get_async_db),
|
272
|
-
) -> Optional[WorkflowReadV2]:
|
273
|
-
"""
|
274
|
-
Import an existing workflow into a project
|
275
|
-
|
276
|
-
Also create all required objects (i.e. Workflow and WorkflowTask's) along
|
277
|
-
the way.
|
278
|
-
"""
|
279
|
-
|
280
|
-
# Preliminary checks
|
281
|
-
await _get_project_check_owner(
|
282
|
-
project_id=project_id,
|
283
|
-
user_id=user.id,
|
284
|
-
db=db,
|
285
|
-
)
|
286
|
-
|
287
|
-
await _check_workflow_exists(
|
288
|
-
name=workflow.name, project_id=project_id, db=db
|
289
|
-
)
|
290
|
-
|
291
|
-
# Check that all required tasks are available
|
292
|
-
source_to_id = {}
|
293
|
-
|
294
|
-
for wf_task in workflow.task_list:
|
295
|
-
|
296
|
-
source = wf_task.task.source
|
297
|
-
if source not in source_to_id.keys():
|
298
|
-
stm = select(TaskV2).where(TaskV2.source == source)
|
299
|
-
tasks_by_source = (await db.execute(stm)).scalars().all()
|
300
|
-
if len(tasks_by_source) != 1:
|
301
|
-
raise HTTPException(
|
302
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
303
|
-
detail=(
|
304
|
-
f"Found {len(tasks_by_source)} tasks "
|
305
|
-
f"with {source=}."
|
306
|
-
),
|
307
|
-
)
|
308
|
-
source_to_id[source] = tasks_by_source[0].id
|
309
|
-
|
310
|
-
# Create new Workflow (with empty task_list)
|
311
|
-
db_workflow = WorkflowV2(
|
312
|
-
project_id=project_id,
|
313
|
-
**workflow.dict(exclude_none=True, exclude={"task_list"}),
|
314
|
-
)
|
315
|
-
db.add(db_workflow)
|
316
|
-
await db.commit()
|
317
|
-
await db.refresh(db_workflow)
|
318
|
-
|
319
|
-
# Insert tasks
|
320
|
-
|
321
|
-
for wf_task in workflow.task_list:
|
322
|
-
source = wf_task.task.source
|
323
|
-
task_id = source_to_id[source]
|
324
|
-
|
325
|
-
new_wf_task = WorkflowTaskCreateV2(
|
326
|
-
**wf_task.dict(exclude_none=True, exclude={"task"})
|
327
|
-
)
|
328
|
-
# Insert task
|
329
|
-
await _workflow_insert_task(
|
330
|
-
**new_wf_task.dict(),
|
331
|
-
workflow_id=db_workflow.id,
|
332
|
-
task_id=task_id,
|
333
|
-
db=db,
|
255
|
+
wf_task_list = []
|
256
|
+
for wftask in workflow.task_list:
|
257
|
+
task_group = await db.get(TaskGroupV2, wftask.task.taskgroupv2_id)
|
258
|
+
wf_task_list.append(wftask.dict())
|
259
|
+
wf_task_list[-1]["task"] = dict(
|
260
|
+
pkg_name=task_group.pkg_name,
|
261
|
+
version=task_group.version,
|
262
|
+
name=wftask.task.name,
|
334
263
|
)
|
335
264
|
|
336
|
-
|
337
|
-
|
265
|
+
wf = WorkflowExportV2(
|
266
|
+
**workflow.model_dump(),
|
267
|
+
task_list=wf_task_list,
|
268
|
+
)
|
269
|
+
return wf
|
338
270
|
|
339
271
|
|
340
272
|
@router.get("/workflow/", response_model=list[WorkflowReadV2])
|
@@ -0,0 +1,357 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from fastapi import APIRouter
|
4
|
+
from fastapi import Depends
|
5
|
+
from fastapi import HTTPException
|
6
|
+
from fastapi import status
|
7
|
+
from sqlmodel import or_
|
8
|
+
from sqlmodel import select
|
9
|
+
|
10
|
+
from ....db import AsyncSession
|
11
|
+
from ....db import get_async_db
|
12
|
+
from ....models.v2 import TaskV2
|
13
|
+
from ....models.v2 import WorkflowV2
|
14
|
+
from ....schemas.v2 import TaskImportV2Legacy
|
15
|
+
from ....schemas.v2 import WorkflowImportV2
|
16
|
+
from ....schemas.v2 import WorkflowReadV2WithWarnings
|
17
|
+
from ....schemas.v2 import WorkflowTaskCreateV2
|
18
|
+
from ._aux_functions import _check_workflow_exists
|
19
|
+
from ._aux_functions import _get_project_check_owner
|
20
|
+
from ._aux_functions import _workflow_insert_task
|
21
|
+
from ._aux_functions_tasks import _add_warnings_to_workflow_tasks
|
22
|
+
from fractal_server.app.models import LinkUserGroup
|
23
|
+
from fractal_server.app.models import UserOAuth
|
24
|
+
from fractal_server.app.models.v2.task import TaskGroupV2
|
25
|
+
from fractal_server.app.routes.auth import current_active_user
|
26
|
+
from fractal_server.app.routes.auth._aux_auth import _get_default_usergroup_id
|
27
|
+
from fractal_server.app.schemas.v2.task import TaskImportV2
|
28
|
+
from fractal_server.logger import set_logger
|
29
|
+
|
30
|
+
router = APIRouter()
|
31
|
+
|
32
|
+
|
33
|
+
logger = set_logger(__name__)
|
34
|
+
|
35
|
+
|
36
|
+
async def _get_user_accessible_taskgroups(
|
37
|
+
*,
|
38
|
+
user_id: int,
|
39
|
+
db: AsyncSession,
|
40
|
+
) -> list[TaskGroupV2]:
|
41
|
+
"""
|
42
|
+
Retrieve list of task groups that the user has access to.
|
43
|
+
"""
|
44
|
+
stm = select(TaskGroupV2).where(
|
45
|
+
or_(
|
46
|
+
TaskGroupV2.user_id == user_id,
|
47
|
+
TaskGroupV2.user_group_id.in_(
|
48
|
+
select(LinkUserGroup.group_id).where(
|
49
|
+
LinkUserGroup.user_id == user_id
|
50
|
+
)
|
51
|
+
),
|
52
|
+
)
|
53
|
+
)
|
54
|
+
res = await db.execute(stm)
|
55
|
+
accessible_task_groups = res.scalars().all()
|
56
|
+
logger.info(
|
57
|
+
f"Found {len(accessible_task_groups)} accessible "
|
58
|
+
f"task groups for {user_id=}."
|
59
|
+
)
|
60
|
+
return accessible_task_groups
|
61
|
+
|
62
|
+
|
63
|
+
async def _get_task_by_source(
|
64
|
+
source: str,
|
65
|
+
task_groups_list: list[TaskGroupV2],
|
66
|
+
) -> Optional[int]:
|
67
|
+
"""
|
68
|
+
Find task with a given source.
|
69
|
+
|
70
|
+
Args:
|
71
|
+
task_import: Info on task to be imported.
|
72
|
+
user_id: ID of current user.
|
73
|
+
default_group_id: ID of default user group.
|
74
|
+
task_group_list: Current list of valid task groups.
|
75
|
+
db: Asynchronous db session
|
76
|
+
|
77
|
+
Return:
|
78
|
+
`id` of the matching task, or `None`.
|
79
|
+
"""
|
80
|
+
task_id = next(
|
81
|
+
iter(
|
82
|
+
task.id
|
83
|
+
for task_group in task_groups_list
|
84
|
+
for task in task_group.task_list
|
85
|
+
if task.source == source
|
86
|
+
),
|
87
|
+
None,
|
88
|
+
)
|
89
|
+
return task_id
|
90
|
+
|
91
|
+
|
92
|
+
async def _disambiguate_task_groups(
|
93
|
+
*,
|
94
|
+
matching_task_groups: list[TaskGroupV2],
|
95
|
+
user_id: int,
|
96
|
+
db: AsyncSession,
|
97
|
+
default_group_id: int,
|
98
|
+
) -> Optional[TaskV2]:
|
99
|
+
"""
|
100
|
+
Disambiguate task groups based on ownership information.
|
101
|
+
"""
|
102
|
+
# Highest priority: task groups created by user
|
103
|
+
for task_group in matching_task_groups:
|
104
|
+
if task_group.user_id == user_id:
|
105
|
+
logger.info(
|
106
|
+
"[_disambiguate_task_groups] "
|
107
|
+
f"Found task group {task_group.id} with {user_id=}, return."
|
108
|
+
)
|
109
|
+
return task_group
|
110
|
+
logger.info(
|
111
|
+
"[_disambiguate_task_groups] "
|
112
|
+
f"No task group found with {user_id=}, continue."
|
113
|
+
)
|
114
|
+
|
115
|
+
# Medium priority: task groups owned by default user group
|
116
|
+
for task_group in matching_task_groups:
|
117
|
+
if task_group.user_group_id == default_group_id:
|
118
|
+
logger.info(
|
119
|
+
"[_disambiguate_task_groups] "
|
120
|
+
f"Found task group {task_group.id} with user_group_id="
|
121
|
+
f"{default_group_id}, return."
|
122
|
+
)
|
123
|
+
return task_group
|
124
|
+
logger.info(
|
125
|
+
"[_disambiguate_task_groups] "
|
126
|
+
"No task group found with user_group_id="
|
127
|
+
f"{default_group_id}, continue."
|
128
|
+
)
|
129
|
+
|
130
|
+
# Lowest priority: task groups owned by other groups, sorted
|
131
|
+
# according to age of the user/usergroup link
|
132
|
+
logger.info(
|
133
|
+
"[_disambiguate_task_groups] "
|
134
|
+
"Now sorting remaining task groups by oldest-user-link."
|
135
|
+
)
|
136
|
+
user_group_ids = [
|
137
|
+
task_group.user_group_id for task_group in matching_task_groups
|
138
|
+
]
|
139
|
+
stm = (
|
140
|
+
select(LinkUserGroup.group_id)
|
141
|
+
.where(LinkUserGroup.user_id == user_id)
|
142
|
+
.where(LinkUserGroup.group_id.in_(user_group_ids))
|
143
|
+
.order_by(LinkUserGroup.timestamp_created.asc())
|
144
|
+
)
|
145
|
+
res = await db.execute(stm)
|
146
|
+
oldest_user_group_id = res.scalars().first()
|
147
|
+
logger.info(
|
148
|
+
"[_disambiguate_task_groups] "
|
149
|
+
f"Result of sorting: {oldest_user_group_id=}."
|
150
|
+
)
|
151
|
+
task_group = next(
|
152
|
+
iter(
|
153
|
+
task_group
|
154
|
+
for task_group in matching_task_groups
|
155
|
+
if task_group.user_group_id == oldest_user_group_id
|
156
|
+
),
|
157
|
+
None,
|
158
|
+
)
|
159
|
+
return task_group
|
160
|
+
|
161
|
+
|
162
|
+
async def _get_task_by_taskimport(
|
163
|
+
*,
|
164
|
+
task_import: TaskImportV2,
|
165
|
+
task_groups_list: list[TaskGroupV2],
|
166
|
+
user_id: int,
|
167
|
+
default_group_id: int,
|
168
|
+
db: AsyncSession,
|
169
|
+
) -> Optional[int]:
|
170
|
+
"""
|
171
|
+
Find a task based on `task_import`.
|
172
|
+
|
173
|
+
Args:
|
174
|
+
task_import: Info on task to be imported.
|
175
|
+
user_id: ID of current user.
|
176
|
+
default_group_id: ID of default user group.
|
177
|
+
task_group_list: Current list of valid task groups.
|
178
|
+
db: Asynchronous db session
|
179
|
+
|
180
|
+
Return:
|
181
|
+
`id` of the matching task, or `None`.
|
182
|
+
"""
|
183
|
+
|
184
|
+
logger.info(f"[_get_task_by_taskimport] START, {task_import=}")
|
185
|
+
|
186
|
+
# Filter by `pkg_name` and by presence of a task with given `name`.
|
187
|
+
matching_task_groups = [
|
188
|
+
task_group
|
189
|
+
for task_group in task_groups_list
|
190
|
+
if (
|
191
|
+
task_group.pkg_name == task_import.pkg_name
|
192
|
+
and task_import.name
|
193
|
+
in [task.name for task in task_group.task_list]
|
194
|
+
)
|
195
|
+
]
|
196
|
+
if len(matching_task_groups) < 1:
|
197
|
+
logger.info(
|
198
|
+
"[_get_task_by_taskimport] "
|
199
|
+
f"No task group with {task_import.pkg_name=} "
|
200
|
+
f"and a task with {task_import.name=}."
|
201
|
+
)
|
202
|
+
return None
|
203
|
+
|
204
|
+
# Determine target `version`
|
205
|
+
# Note that task_import.version cannot be "", due to a validator
|
206
|
+
if task_import.version is None:
|
207
|
+
logger.info(
|
208
|
+
"[_get_task_by_taskimport] "
|
209
|
+
"No version requested, looking for latest."
|
210
|
+
)
|
211
|
+
latest_task = max(
|
212
|
+
matching_task_groups, key=lambda tg: tg.version or ""
|
213
|
+
)
|
214
|
+
version = latest_task.version
|
215
|
+
logger.info(
|
216
|
+
f"[_get_task_by_taskimport] Latest version set to {version}."
|
217
|
+
)
|
218
|
+
else:
|
219
|
+
version = task_import.version
|
220
|
+
|
221
|
+
# Filter task groups by version
|
222
|
+
final_matching_task_groups = list(
|
223
|
+
filter(lambda tg: tg.version == version, task_groups_list)
|
224
|
+
)
|
225
|
+
|
226
|
+
if len(final_matching_task_groups) < 1:
|
227
|
+
logger.info(
|
228
|
+
"[_get_task_by_taskimport] "
|
229
|
+
"No task group left after filtering by version."
|
230
|
+
)
|
231
|
+
return None
|
232
|
+
elif len(final_matching_task_groups) == 1:
|
233
|
+
final_task_group = final_matching_task_groups[0]
|
234
|
+
logger.info(
|
235
|
+
"[_get_task_by_taskimport] "
|
236
|
+
"Found a single task group, after filtering by version."
|
237
|
+
)
|
238
|
+
else:
|
239
|
+
logger.info(
|
240
|
+
"[_get_task_by_taskimport] "
|
241
|
+
"Found many task groups, after filtering by version."
|
242
|
+
)
|
243
|
+
final_task_group = await _disambiguate_task_groups(
|
244
|
+
matching_task_groups, user_id, db, default_group_id
|
245
|
+
)
|
246
|
+
if final_task_group is None:
|
247
|
+
logger.info(
|
248
|
+
"[_get_task_by_taskimport] Disambiguation returned None."
|
249
|
+
)
|
250
|
+
return None
|
251
|
+
|
252
|
+
# Find task with given name
|
253
|
+
task_id = next(
|
254
|
+
iter(
|
255
|
+
task.id
|
256
|
+
for task in final_task_group.task_list
|
257
|
+
if task.name == task_import.name
|
258
|
+
),
|
259
|
+
None,
|
260
|
+
)
|
261
|
+
|
262
|
+
logger.info(f"[_get_task_by_taskimport] END, {task_import=}, {task_id=}.")
|
263
|
+
|
264
|
+
return task_id
|
265
|
+
|
266
|
+
|
267
|
+
@router.post(
|
268
|
+
"/project/{project_id}/workflow/import/",
|
269
|
+
response_model=WorkflowReadV2WithWarnings,
|
270
|
+
status_code=status.HTTP_201_CREATED,
|
271
|
+
)
|
272
|
+
async def import_workflow(
|
273
|
+
project_id: int,
|
274
|
+
workflow_import: WorkflowImportV2,
|
275
|
+
user: UserOAuth = Depends(current_active_user),
|
276
|
+
db: AsyncSession = Depends(get_async_db),
|
277
|
+
) -> WorkflowReadV2WithWarnings:
|
278
|
+
"""
|
279
|
+
Import an existing workflow into a project and create required objects.
|
280
|
+
"""
|
281
|
+
|
282
|
+
# Preliminary checks
|
283
|
+
await _get_project_check_owner(
|
284
|
+
project_id=project_id,
|
285
|
+
user_id=user.id,
|
286
|
+
db=db,
|
287
|
+
)
|
288
|
+
await _check_workflow_exists(
|
289
|
+
name=workflow_import.name,
|
290
|
+
project_id=project_id,
|
291
|
+
db=db,
|
292
|
+
)
|
293
|
+
|
294
|
+
task_group_list = await _get_user_accessible_taskgroups(
|
295
|
+
user_id=user.id,
|
296
|
+
db=db,
|
297
|
+
)
|
298
|
+
default_group_id = await _get_default_usergroup_id(db)
|
299
|
+
|
300
|
+
list_wf_tasks = []
|
301
|
+
list_task_ids = []
|
302
|
+
for wf_task in workflow_import.task_list:
|
303
|
+
task_import = wf_task.task
|
304
|
+
if isinstance(task_import, TaskImportV2Legacy):
|
305
|
+
task_id = await _get_task_by_source(
|
306
|
+
source=task_import.source,
|
307
|
+
task_groups_list=task_group_list,
|
308
|
+
)
|
309
|
+
else:
|
310
|
+
task_id = await _get_task_by_taskimport(
|
311
|
+
task_import=task_import,
|
312
|
+
user_id=user.id,
|
313
|
+
default_group_id=default_group_id,
|
314
|
+
task_groups_list=task_group_list,
|
315
|
+
db=db,
|
316
|
+
)
|
317
|
+
if task_id is None:
|
318
|
+
raise HTTPException(
|
319
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
320
|
+
detail=f"Could not find a task matching with {wf_task.task}.",
|
321
|
+
)
|
322
|
+
new_wf_task = WorkflowTaskCreateV2(
|
323
|
+
**wf_task.dict(exclude_none=True, exclude={"task"})
|
324
|
+
)
|
325
|
+
list_wf_tasks.append(new_wf_task)
|
326
|
+
list_task_ids.append(task_id)
|
327
|
+
|
328
|
+
# Create new Workflow
|
329
|
+
db_workflow = WorkflowV2(
|
330
|
+
project_id=project_id,
|
331
|
+
**workflow_import.dict(exclude_none=True, exclude={"task_list"}),
|
332
|
+
)
|
333
|
+
db.add(db_workflow)
|
334
|
+
await db.commit()
|
335
|
+
await db.refresh(db_workflow)
|
336
|
+
|
337
|
+
# Insert task into the workflow
|
338
|
+
for ind, new_wf_task in enumerate(list_wf_tasks):
|
339
|
+
await _workflow_insert_task(
|
340
|
+
**new_wf_task.dict(),
|
341
|
+
workflow_id=db_workflow.id,
|
342
|
+
task_id=list_task_ids[ind],
|
343
|
+
db=db,
|
344
|
+
)
|
345
|
+
|
346
|
+
# Add warnings for non-active tasks (or non-accessible tasks,
|
347
|
+
# although that should never happen)
|
348
|
+
wftask_list_with_warnings = await _add_warnings_to_workflow_tasks(
|
349
|
+
wftask_list=db_workflow.task_list, user_id=user.id, db=db
|
350
|
+
)
|
351
|
+
workflow_data = dict(
|
352
|
+
**db_workflow.model_dump(),
|
353
|
+
project=db_workflow.project,
|
354
|
+
task_list=wftask_list_with_warnings,
|
355
|
+
)
|
356
|
+
|
357
|
+
return workflow_data
|
@@ -19,10 +19,12 @@ from fractal_server.app.db import get_async_db
|
|
19
19
|
from fractal_server.app.models import LinkUserGroup
|
20
20
|
from fractal_server.app.models import UserGroup
|
21
21
|
from fractal_server.app.models import UserOAuth
|
22
|
+
from fractal_server.app.models import UserSettings
|
22
23
|
from fractal_server.app.models.v2 import TaskGroupV2
|
23
24
|
from fractal_server.app.schemas.user_group import UserGroupCreate
|
24
25
|
from fractal_server.app.schemas.user_group import UserGroupRead
|
25
26
|
from fractal_server.app.schemas.user_group import UserGroupUpdate
|
27
|
+
from fractal_server.app.schemas.user_settings import UserSettingsUpdate
|
26
28
|
from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
|
27
29
|
from fractal_server.logger import set_logger
|
28
30
|
|
@@ -212,3 +214,28 @@ async def delete_single_group(
|
|
212
214
|
await db.commit()
|
213
215
|
|
214
216
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
217
|
+
|
218
|
+
|
219
|
+
@router_group.patch("/group/{group_id}/user-settings/", status_code=200)
|
220
|
+
async def patch_user_settings_bulk(
|
221
|
+
group_id: int,
|
222
|
+
settings_update: UserSettingsUpdate,
|
223
|
+
superuser: UserOAuth = Depends(current_active_superuser),
|
224
|
+
db: AsyncSession = Depends(get_async_db),
|
225
|
+
):
|
226
|
+
await _usergroup_or_404(group_id, db)
|
227
|
+
res = await db.execute(
|
228
|
+
select(UserSettings)
|
229
|
+
.join(UserOAuth)
|
230
|
+
.where(LinkUserGroup.user_id == UserOAuth.id)
|
231
|
+
.where(LinkUserGroup.group_id == group_id)
|
232
|
+
)
|
233
|
+
settings_list = res.scalars().all()
|
234
|
+
update = settings_update.dict(exclude_unset=True)
|
235
|
+
for settings in settings_list:
|
236
|
+
for k, v in update.items():
|
237
|
+
setattr(settings, k, v)
|
238
|
+
db.add(settings)
|
239
|
+
await db.commit()
|
240
|
+
|
241
|
+
return Response(status_code=status.HTTP_200_OK)
|
@@ -177,11 +177,13 @@ async def submit_workflow(
|
|
177
177
|
return
|
178
178
|
|
179
179
|
try:
|
180
|
-
|
181
180
|
# Create WORKFLOW_DIR_LOCAL
|
182
|
-
|
183
|
-
|
184
|
-
|
181
|
+
if FRACTAL_RUNNER_BACKEND == "slurm":
|
182
|
+
original_umask = os.umask(0)
|
183
|
+
WORKFLOW_DIR_LOCAL.mkdir(parents=True, mode=0o755)
|
184
|
+
os.umask(original_umask)
|
185
|
+
else:
|
186
|
+
WORKFLOW_DIR_LOCAL.mkdir(parents=True)
|
185
187
|
|
186
188
|
# Define and create WORKFLOW_DIR_REMOTE
|
187
189
|
if FRACTAL_RUNNER_BACKEND == "local":
|
@@ -214,15 +216,19 @@ async def submit_workflow(
|
|
214
216
|
order=order,
|
215
217
|
task_name=task_name,
|
216
218
|
)
|
217
|
-
original_umask = os.umask(0)
|
218
|
-
(WORKFLOW_DIR_LOCAL / subfolder_name).mkdir(mode=0o755)
|
219
|
-
os.umask(original_umask)
|
220
219
|
if FRACTAL_RUNNER_BACKEND == "slurm":
|
220
|
+
# Create local subfolder (with 755) and remote one
|
221
|
+
# (via `sudo -u`)
|
222
|
+
original_umask = os.umask(0)
|
223
|
+
(WORKFLOW_DIR_LOCAL / subfolder_name).mkdir(mode=0o755)
|
224
|
+
os.umask(original_umask)
|
221
225
|
_mkdir_as_user(
|
222
226
|
folder=str(WORKFLOW_DIR_REMOTE / subfolder_name),
|
223
227
|
user=slurm_user,
|
224
228
|
)
|
225
229
|
else:
|
230
|
+
# Create local subfolder (with standard permission set)
|
231
|
+
(WORKFLOW_DIR_LOCAL / subfolder_name).mkdir()
|
226
232
|
logger.info("Skip remote-subfolder creation")
|
227
233
|
except Exception as e:
|
228
234
|
error_type = type(e).__name__
|
@@ -20,6 +20,7 @@ from .project import ProjectUpdateV2 # noqa F401
|
|
20
20
|
from .task import TaskCreateV2 # noqa F401
|
21
21
|
from .task import TaskExportV2 # noqa F401
|
22
22
|
from .task import TaskImportV2 # noqa F401
|
23
|
+
from .task import TaskImportV2Legacy # noqa F401
|
23
24
|
from .task import TaskReadV2 # noqa F401
|
24
25
|
from .task import TaskUpdateV2 # noqa F401
|
25
26
|
from .task_collection import CollectionStateReadV2 # noqa F401
|
@@ -159,6 +159,19 @@ class ManifestV2(BaseModel):
|
|
159
159
|
)
|
160
160
|
return values
|
161
161
|
|
162
|
+
@root_validator()
|
163
|
+
def _unique_task_names(cls, values):
|
164
|
+
task_list = values["task_list"]
|
165
|
+
task_list_names = [t.name for t in task_list]
|
166
|
+
if len(set(task_list_names)) != len(task_list_names):
|
167
|
+
raise ValueError(
|
168
|
+
(
|
169
|
+
"Task names in manifest must be unique.\n",
|
170
|
+
f"Given: {task_list_names}.",
|
171
|
+
)
|
172
|
+
)
|
173
|
+
return values
|
174
|
+
|
162
175
|
@validator("manifest_version")
|
163
176
|
def manifest_version_2(cls, value):
|
164
177
|
if value != "2":
|
@@ -134,7 +134,6 @@ class TaskReadV2(BaseModel):
|
|
134
134
|
|
135
135
|
class TaskUpdateV2(BaseModel, extra=Extra.forbid):
|
136
136
|
|
137
|
-
name: Optional[str] = None
|
138
137
|
command_parallel: Optional[str] = None
|
139
138
|
command_non_parallel: Optional[str] = None
|
140
139
|
input_types: Optional[dict[str, bool]] = None
|
@@ -152,8 +151,6 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
152
151
|
raise ValueError
|
153
152
|
return v
|
154
153
|
|
155
|
-
_name = validator("name", allow_reuse=True)(valstr("name"))
|
156
|
-
|
157
154
|
_command_parallel = validator("command_parallel", allow_reuse=True)(
|
158
155
|
valstr("command_parallel")
|
159
156
|
)
|
@@ -186,11 +183,29 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
186
183
|
|
187
184
|
class TaskImportV2(BaseModel, extra=Extra.forbid):
|
188
185
|
|
186
|
+
pkg_name: str
|
187
|
+
version: Optional[str] = None
|
188
|
+
name: str
|
189
|
+
_pkg_name = validator("pkg_name", allow_reuse=True)(valstr("pkg_name"))
|
190
|
+
_version = validator("version", allow_reuse=True)(
|
191
|
+
valstr("version", accept_none=True)
|
192
|
+
)
|
193
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
194
|
+
|
195
|
+
|
196
|
+
class TaskImportV2Legacy(BaseModel):
|
189
197
|
source: str
|
190
198
|
_source = validator("source", allow_reuse=True)(valstr("source"))
|
191
199
|
|
192
200
|
|
193
201
|
class TaskExportV2(BaseModel):
|
194
202
|
|
195
|
-
|
196
|
-
|
203
|
+
pkg_name: str
|
204
|
+
version: Optional[str] = None
|
205
|
+
name: str
|
206
|
+
|
207
|
+
_pkg_name = validator("pkg_name", allow_reuse=True)(valstr("pkg_name"))
|
208
|
+
_version = validator("version", allow_reuse=True)(
|
209
|
+
valstr("version", accept_none=True)
|
210
|
+
)
|
211
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
@@ -1,6 +1,7 @@
|
|
1
1
|
from enum import Enum
|
2
2
|
from typing import Any
|
3
3
|
from typing import Optional
|
4
|
+
from typing import Union
|
4
5
|
|
5
6
|
from pydantic import BaseModel
|
6
7
|
from pydantic import Extra
|
@@ -10,6 +11,7 @@ from pydantic import validator
|
|
10
11
|
from .._validators import valdictkeys
|
11
12
|
from .task import TaskExportV2
|
12
13
|
from .task import TaskImportV2
|
14
|
+
from .task import TaskImportV2Legacy
|
13
15
|
from .task import TaskReadV2
|
14
16
|
from fractal_server.images import Filters
|
15
17
|
|
@@ -157,7 +159,7 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
|
|
157
159
|
|
158
160
|
input_filters: Optional[Filters] = None
|
159
161
|
|
160
|
-
task: TaskImportV2
|
162
|
+
task: Union[TaskImportV2, TaskImportV2Legacy]
|
161
163
|
|
162
164
|
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
163
165
|
valdictkeys("meta_non_parallel")
|
@@ -1,24 +1,68 @@
|
|
1
|
+
import asyncio
|
1
2
|
import logging
|
2
3
|
import os
|
4
|
+
import sys
|
3
5
|
from pathlib import Path
|
4
6
|
from typing import Any
|
7
|
+
from typing import Optional
|
5
8
|
|
9
|
+
from fastapi import HTTPException
|
6
10
|
from sqlalchemy import select
|
7
11
|
from sqlalchemy.orm import Session
|
8
12
|
|
13
|
+
from fractal_server.app.db import get_async_db
|
9
14
|
from fractal_server.app.db import get_sync_db
|
10
15
|
from fractal_server.app.models import TaskGroupV2
|
11
16
|
from fractal_server.app.models import TaskV2
|
12
17
|
from fractal_server.app.models import UserGroup
|
13
18
|
from fractal_server.app.models import UserOAuth
|
14
19
|
from fractal_server.app.models import UserSettings
|
20
|
+
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
21
|
+
_verify_non_duplication_group_constraint,
|
22
|
+
)
|
23
|
+
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
24
|
+
_verify_non_duplication_user_constraint,
|
25
|
+
)
|
15
26
|
from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
|
16
27
|
from fractal_server.data_migrations.tools import _check_current_version
|
28
|
+
from fractal_server.tasks.utils import _normalize_package_name
|
17
29
|
from fractal_server.utils import get_timestamp
|
18
30
|
|
19
31
|
logger = logging.getLogger("fix_db")
|
20
32
|
|
21
33
|
|
34
|
+
async def check_non_duplication_constraints(
|
35
|
+
*,
|
36
|
+
user_id: int,
|
37
|
+
pkg_name: str,
|
38
|
+
version: Optional[str] = None,
|
39
|
+
user_group_id: Optional[int] = None,
|
40
|
+
):
|
41
|
+
try:
|
42
|
+
async for db_async in get_async_db():
|
43
|
+
await _verify_non_duplication_user_constraint(
|
44
|
+
user_id=user_id,
|
45
|
+
pkg_name=pkg_name,
|
46
|
+
version=version,
|
47
|
+
db=db_async,
|
48
|
+
)
|
49
|
+
await _verify_non_duplication_group_constraint(
|
50
|
+
user_group_id=user_group_id,
|
51
|
+
pkg_name=pkg_name,
|
52
|
+
version=version,
|
53
|
+
db=db_async,
|
54
|
+
)
|
55
|
+
except HTTPException as e:
|
56
|
+
logger.error(
|
57
|
+
"Adding a `TaskGroupV2` with "
|
58
|
+
f"{user_id=}, {pkg_name=}, {version=} and {user_group_id=} "
|
59
|
+
"would break the non-duplication constraint."
|
60
|
+
)
|
61
|
+
logger.error(f"Original error: {str(e)}")
|
62
|
+
|
63
|
+
sys.exit("ERROR")
|
64
|
+
|
65
|
+
|
22
66
|
def get_unique_value(list_of_objects: list[dict[str, Any]], key: str):
|
23
67
|
"""
|
24
68
|
Loop over `list_of_objects` and extract (unique) value for `key`.
|
@@ -29,8 +73,7 @@ def get_unique_value(list_of_objects: list[dict[str, Any]], key: str):
|
|
29
73
|
unique_values.add(this_value)
|
30
74
|
if len(unique_values) != 1:
|
31
75
|
raise RuntimeError(
|
32
|
-
f"There must be a single taskgroup `{key}`, "
|
33
|
-
f"but {unique_values=}"
|
76
|
+
f"There must be a single taskgroup `{key}`, but {unique_values=}"
|
34
77
|
)
|
35
78
|
return unique_values.pop()
|
36
79
|
|
@@ -85,7 +128,6 @@ def get_default_user_group_id(db):
|
|
85
128
|
|
86
129
|
|
87
130
|
def get_default_user_id(db):
|
88
|
-
|
89
131
|
DEFAULT_USER_EMAIL = os.getenv("FRACTAL_V27_DEFAULT_USER_EMAIL")
|
90
132
|
if DEFAULT_USER_EMAIL is None:
|
91
133
|
raise ValueError(
|
@@ -129,6 +171,7 @@ def prepare_task_groups(
|
|
129
171
|
python_version,
|
130
172
|
name,
|
131
173
|
) = source_fields
|
174
|
+
pkg_name = _normalize_package_name(pkg_name)
|
132
175
|
task_group_key = ":".join(
|
133
176
|
[pkg_name, version, extras, python_version]
|
134
177
|
)
|
@@ -235,6 +278,21 @@ def prepare_task_groups(
|
|
235
278
|
|
236
279
|
print()
|
237
280
|
|
281
|
+
# Verify non-duplication constraints
|
282
|
+
asyncio.run(
|
283
|
+
check_non_duplication_constraints(
|
284
|
+
user_id=task_group_attributes["user_id"],
|
285
|
+
user_group_id=task_group_attributes["user_group_id"],
|
286
|
+
pkg_name=task_group_attributes["pkg_name"],
|
287
|
+
version=task_group_attributes["version"],
|
288
|
+
)
|
289
|
+
)
|
290
|
+
logger.warning(
|
291
|
+
"Non-duplication-constraint check is OK, "
|
292
|
+
"proceed and create TaskGroupV2."
|
293
|
+
)
|
294
|
+
|
295
|
+
# Create the TaskGroupV2 object and commit it
|
238
296
|
task_group = TaskGroupV2(**task_group_attributes)
|
239
297
|
db.add(task_group)
|
240
298
|
db.commit()
|
@@ -262,3 +320,4 @@ def fix_db():
|
|
262
320
|
)
|
263
321
|
|
264
322
|
logger.warning("END of execution of fix_db function")
|
323
|
+
print()
|
@@ -1,4 +1,4 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=qb0d5KhPMIrdMhNLuCkWNA8JPWuC8eSdJQbmF0UTepA,24
|
2
2
|
fractal_server/__main__.py,sha256=dEkCfzLLQrIlxsGC-HBfoR-RBMWnJDgNrxYTyzmE9c0,6146
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -41,9 +41,9 @@ fractal_server/app/routes/api/v1/task.py,sha256=eW89nMCjpD4G6tHXDo2qGBKqWaPirjH6
|
|
41
41
|
fractal_server/app/routes/api/v1/task_collection.py,sha256=5EMh3yhS1Z4x25kp5Iaxalrf7RgJh-XD1nBjrFvgwsg,9072
|
42
42
|
fractal_server/app/routes/api/v1/workflow.py,sha256=2T93DuEnSshaDCue-JPmjuvGCtbk6lt9pFMuPt783t8,11217
|
43
43
|
fractal_server/app/routes/api/v1/workflowtask.py,sha256=OYYConwJbmNULDw5I3T-UbSJKrbbBiAHbbBeVcpoFKQ,5785
|
44
|
-
fractal_server/app/routes/api/v2/__init__.py,sha256=
|
44
|
+
fractal_server/app/routes/api/v2/__init__.py,sha256=jybEV-vrknPoQvbgKJl0QQvHDPHOJXbDUG5vatHeis4,1963
|
45
45
|
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=mb4R_qqFxeW0LAis2QJIIfVx8Sydv1jTYaRIMsMxnIk,11720
|
46
|
-
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=
|
46
|
+
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=3OB5bz2pzI8nDZDf3RZsU37hdZV1se4XEjpDpjbnhQg,10561
|
47
47
|
fractal_server/app/routes/api/v2/dataset.py,sha256=Eilf_BAGjicIhqUiVwI86jlW45ineA5sVzxXW4b2GoQ,8329
|
48
48
|
fractal_server/app/routes/api/v2/images.py,sha256=JR1rR6qEs81nacjriOXAOBQjAbCXF4Ew7M7mkWdxBU0,7920
|
49
49
|
fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
|
@@ -54,12 +54,13 @@ fractal_server/app/routes/api/v2/task.py,sha256=R_1bCinQvNrkEh6uAguNNfimduz1uJzg
|
|
54
54
|
fractal_server/app/routes/api/v2/task_collection.py,sha256=gCxOwigT_tfs8lCDNoE7nxl9-9iuRp1gW__3YXqsioc,11478
|
55
55
|
fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=9T0U_4gqrQbJCy6uFDCMSZ-b1sfNIzyz_qm4P41W2Gs,6133
|
56
56
|
fractal_server/app/routes/api/v2/task_group.py,sha256=wOLoqBnoeOIefRN5juhrjm2luGK6E_sF79umKirwWu8,5313
|
57
|
-
fractal_server/app/routes/api/v2/workflow.py,sha256=
|
57
|
+
fractal_server/app/routes/api/v2/workflow.py,sha256=PyvkrUHHzFGUGZE5X0VW5u3DPQA7wtXXNcEpG7-N66I,8687
|
58
|
+
fractal_server/app/routes/api/v2/workflow_import.py,sha256=3qX3iHnLJb62TQFLQKakcC_mYh-tWXkYL-4PuyEkwRs,10895
|
58
59
|
fractal_server/app/routes/api/v2/workflowtask.py,sha256=ciHTwXXFiFnMF7ZpJ3Xs0q6YfuZrFvIjqndlzAEdZpo,6969
|
59
60
|
fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
|
60
61
|
fractal_server/app/routes/auth/_aux_auth.py,sha256=ifkNocTYatBSMYGwiR14qohmvR9SfMldceiEj6uJBrU,4783
|
61
62
|
fractal_server/app/routes/auth/current_user.py,sha256=v767HGi8k076ZHoErlU4Vv0_c8HQqYmi8ncjzZZDaDE,4455
|
62
|
-
fractal_server/app/routes/auth/group.py,sha256=
|
63
|
+
fractal_server/app/routes/auth/group.py,sha256=dSS7r8J2cejZ6sKnOWAPSDKynxD9VyBNtqDbFpySzIU,7489
|
63
64
|
fractal_server/app/routes/auth/login.py,sha256=tSu6OBLOieoBtMZB4JkBAdEgH2Y8KqPGSbwy7NIypIo,566
|
64
65
|
fractal_server/app/routes/auth/oauth.py,sha256=AnFHbjqL2AgBX3eksI931xD6RTtmbciHBEuGf9YJLjU,1895
|
65
66
|
fractal_server/app/routes/auth/register.py,sha256=DlHq79iOvGd_gt2v9uwtsqIKeO6i_GKaW59VIkllPqY,587
|
@@ -106,7 +107,7 @@ fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=KO9c694d318adoPQh9UG
|
|
106
107
|
fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6pQNNx997bLIfLp0guF09t_O0ZYRXnbEGLktSAcKnic,5999
|
107
108
|
fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
|
108
109
|
fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
|
109
|
-
fractal_server/app/runner/v2/__init__.py,sha256=
|
110
|
+
fractal_server/app/runner/v2/__init__.py,sha256=4RTlY34bOqgmzqVHXER0-lpnKaG15boMgDyf1L40JWg,17362
|
110
111
|
fractal_server/app/runner/v2/_local/__init__.py,sha256=KTj14K6jH8fXGUi5P7u5_RqEE1zF4aXtgPxCKzw46iw,5971
|
111
112
|
fractal_server/app/runner/v2/_local/_local_config.py,sha256=9oi209Dlp35ANfxb_DISqmMKKc6DPaMsmYVWbZLseME,3630
|
112
113
|
fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=MucNOo8Er0F5ZIwH7CnTeXgnFMc6d3pKPkv563QNVi0,1630
|
@@ -144,22 +145,22 @@ fractal_server/app/schemas/v1/state.py,sha256=GYeOE_1PtDOgu5W4t_3gw3DBHXH2aCGzIN
|
|
144
145
|
fractal_server/app/schemas/v1/task.py,sha256=7BxOZ_qoRQ8n3YbQpDvB7VMcxB5fSYQmR5RLIWhuJ5U,3704
|
145
146
|
fractal_server/app/schemas/v1/task_collection.py,sha256=uvq9bcMaGD_qHsh7YtcpoSAkVAbw12eY4DocIO3MKOg,3057
|
146
147
|
fractal_server/app/schemas/v1/workflow.py,sha256=tuOs5E5Q_ozA8if7YPZ07cQjzqB_QMkBS4u92qo4Ro0,4618
|
147
|
-
fractal_server/app/schemas/v2/__init__.py,sha256=
|
148
|
+
fractal_server/app/schemas/v2/__init__.py,sha256=G44JgD_i_zCpV7yjXcoS5ygOS3IfsIWoktLVZao6TaE,2323
|
148
149
|
fractal_server/app/schemas/v2/dataset.py,sha256=865ia13E9mWu1DaYyppKW2csNYglaInrScrprdVYX7A,2552
|
149
150
|
fractal_server/app/schemas/v2/dumps.py,sha256=s6dg-pHZFui6t2Ktm0SMxjKDN-v-ZqBHz9iTsBQF3eU,1712
|
150
151
|
fractal_server/app/schemas/v2/job.py,sha256=oYSLYkQ0HL83QyjEGIaggtZ117FndzFlONMKWd9sTXM,3270
|
151
|
-
fractal_server/app/schemas/v2/manifest.py,sha256=
|
152
|
+
fractal_server/app/schemas/v2/manifest.py,sha256=Uqtd7DbyOkf9bxBOKkU7Sv7nToBIFGUcfjY7rd5iO7c,6981
|
152
153
|
fractal_server/app/schemas/v2/project.py,sha256=UXEA0UUUe0bFFOVLLmVtvDFLBO5vmD1JVI7EeTIcwDo,756
|
153
154
|
fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
|
154
|
-
fractal_server/app/schemas/v2/task.py,sha256=
|
155
|
+
fractal_server/app/schemas/v2/task.py,sha256=FFAbYwDlqowB8gVMdjFVPVHvAM0T89PYLixUth49xfQ,6870
|
155
156
|
fractal_server/app/schemas/v2/task_collection.py,sha256=Ddw_7QaQ93kdEIwWQvzLQDu03gho_OHdhah3n0ioK3M,6296
|
156
157
|
fractal_server/app/schemas/v2/task_group.py,sha256=F40u64z-wXHNPFjx9RHozzl_SySTHfKFc-sBFyn_e0I,2352
|
157
158
|
fractal_server/app/schemas/v2/workflow.py,sha256=HSNQSrBRdoBzh8Igr76FUWCAWvVzykrqmUv1vGv-8og,2026
|
158
|
-
fractal_server/app/schemas/v2/workflowtask.py,sha256=
|
159
|
+
fractal_server/app/schemas/v2/workflowtask.py,sha256=vDdMktYbHeYBgB5OuWSv6wRPRXWqvetkeqQ7IC5YtfA,5751
|
159
160
|
fractal_server/app/security/__init__.py,sha256=V1NOWlmaFZHMR6SrkMl62jyAuqYONyo8lyGvR6UZesM,12312
|
160
161
|
fractal_server/app/user_settings.py,sha256=aZgQ3i0JkHfgwLGW1ee6Gzr1ae3IioFfJKKSsSS8Svk,1312
|
161
162
|
fractal_server/config.py,sha256=gX0aYwDwbC5y7JNorifON84YMveubb7XTb4sH14N3KM,23667
|
162
|
-
fractal_server/data_migrations/2_7_0.py,sha256=
|
163
|
+
fractal_server/data_migrations/2_7_0.py,sha256=DQQJ_tLYFteH3Jw246ovIh3Dac_9SaAefoy7FLw5Cso,11145
|
163
164
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
164
165
|
fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
|
165
166
|
fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
|
@@ -222,8 +223,8 @@ fractal_server/tasks/v2/utils.py,sha256=MnY6MhcxDRo4rPuXo2tQ252eWEPZF3OlCGe-p5Mr
|
|
222
223
|
fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
|
223
224
|
fractal_server/utils.py,sha256=jrlCBPmC7F0ptBVcDac-EbZNsdYTLbHfX9oxkXthS5Q,2193
|
224
225
|
fractal_server/zip_tools.py,sha256=xYpzBshysD2nmxkD5WLYqMzPYUcCRM3kYy-7n9bJL-U,4426
|
225
|
-
fractal_server-2.7.
|
226
|
-
fractal_server-2.7.
|
227
|
-
fractal_server-2.7.
|
228
|
-
fractal_server-2.7.
|
229
|
-
fractal_server-2.7.
|
226
|
+
fractal_server-2.7.0a7.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
227
|
+
fractal_server-2.7.0a7.dist-info/METADATA,sha256=6chShCqesJ4MnkCnJPb38-RdhMfwfNZekkVSkejj3dg,4630
|
228
|
+
fractal_server-2.7.0a7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
229
|
+
fractal_server-2.7.0a7.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
230
|
+
fractal_server-2.7.0a7.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|