fractal-server 2.0.0a3__py3-none-any.whl → 2.0.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/routes/admin/__init__.py +0 -0
- fractal_server/app/routes/admin/v2.py +7 -2
- fractal_server/app/routes/api/v1/task_collection.py +10 -0
- fractal_server/app/routes/api/v2/__init__.py +6 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +12 -8
- fractal_server/app/routes/api/v2/submit.py +3 -4
- fractal_server/app/routes/api/v2/task_collection.py +11 -1
- fractal_server/app/routes/api/v2/task_legacy.py +59 -0
- fractal_server/app/runner/__init__.py +0 -0
- fractal_server/app/runner/executors/__init__.py +0 -0
- fractal_server/app/runner/executors/slurm/executor.py +0 -2
- fractal_server/app/runner/v1/_common.py +1 -1
- fractal_server/app/runner/v1/_local/__init__.py +2 -1
- fractal_server/app/runner/v1/_slurm/__init__.py +4 -2
- fractal_server/app/runner/v1/_slurm/_submit_setup.py +2 -2
- fractal_server/app/runner/v2/__init__.py +3 -3
- fractal_server/app/runner/v2/_local/__init__.py +11 -16
- fractal_server/app/runner/v2/_slurm/__init__.py +4 -2
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +2 -3
- fractal_server/app/runner/v2/deduplicate_list.py +2 -1
- fractal_server/app/runner/v2/handle_failed_job.py +9 -7
- fractal_server/app/runner/v2/runner.py +19 -0
- fractal_server/app/runner/v2/task_interface.py +4 -2
- fractal_server/app/schemas/_validators.py +22 -0
- fractal_server/app/schemas/v2/__init__.py +1 -0
- fractal_server/app/schemas/v2/dumps.py +1 -1
- fractal_server/app/schemas/v2/task.py +32 -1
- fractal_server/app/schemas/v2/task_collection.py +4 -0
- fractal_server/app/schemas/v2/workflowtask.py +38 -9
- fractal_server/images/__init__.py +1 -0
- fractal_server/images/models.py +48 -1
- fractal_server/tasks/v1/__init__.py +0 -0
- fractal_server/tasks/v2/__init__.py +0 -0
- {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a5.dist-info}/METADATA +7 -7
- {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a5.dist-info}/RECORD +39 -34
- fractal_server/app/schemas/json_schemas/manifest.json +0 -81
- {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a5.dist-info}/LICENSE +0 -0
- {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a5.dist-info}/WHEEL +0 -0
- {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a5.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.0.
|
1
|
+
__VERSION__ = "2.0.0a5"
|
File without changes
|
@@ -12,6 +12,7 @@ from fastapi import HTTPException
|
|
12
12
|
from fastapi import Response
|
13
13
|
from fastapi import status
|
14
14
|
from fastapi.responses import StreamingResponse
|
15
|
+
from pydantic import BaseModel
|
15
16
|
from sqlmodel import select
|
16
17
|
|
17
18
|
from ....config import get_settings
|
@@ -279,13 +280,17 @@ async def download_job_logs(
|
|
279
280
|
)
|
280
281
|
|
281
282
|
|
283
|
+
class TaskCompatibility(BaseModel):
|
284
|
+
is_v2_compatible: bool
|
285
|
+
|
286
|
+
|
282
287
|
@router_admin_v2.patch(
|
283
288
|
"/task-v1/{task_id}/",
|
284
289
|
status_code=status.HTTP_200_OK,
|
285
290
|
)
|
286
291
|
async def flag_task_v1_as_v2_compatible(
|
287
292
|
task_id: int,
|
288
|
-
|
293
|
+
compatibility: TaskCompatibility,
|
289
294
|
user: User = Depends(current_active_superuser),
|
290
295
|
db: AsyncSession = Depends(get_async_db),
|
291
296
|
) -> Response:
|
@@ -297,7 +302,7 @@ async def flag_task_v1_as_v2_compatible(
|
|
297
302
|
detail=f"Task {task_id} not found",
|
298
303
|
)
|
299
304
|
|
300
|
-
task.is_v2_compatible = is_v2_compatible
|
305
|
+
task.is_v2_compatible = compatibility.is_v2_compatible
|
301
306
|
await db.commit()
|
302
307
|
await db.close()
|
303
308
|
|
@@ -140,6 +140,16 @@ async def collect_tasks_pip(
|
|
140
140
|
f"Original error: {e}"
|
141
141
|
),
|
142
142
|
)
|
143
|
+
except ValidationError as e:
|
144
|
+
await db.close()
|
145
|
+
raise HTTPException(
|
146
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
147
|
+
detail=(
|
148
|
+
"Cannot collect package. Possible reason: an old version "
|
149
|
+
"of the same package has already been collected. "
|
150
|
+
f"Original error: {e}"
|
151
|
+
),
|
152
|
+
)
|
143
153
|
task_collect_status.info = "Already installed"
|
144
154
|
state = State(data=task_collect_status.sanitised_dict())
|
145
155
|
response.status_code == status.HTTP_200_OK
|
@@ -10,6 +10,7 @@ from .project import router as project_router_v2
|
|
10
10
|
from .submit import router as submit_job_router_v2
|
11
11
|
from .task import router as task_router_v2
|
12
12
|
from .task_collection import router as task_collection_router_v2
|
13
|
+
from .task_legacy import router as task_legacy_router_v2
|
13
14
|
from .workflow import router as workflow_router_v2
|
14
15
|
from .workflowtask import router as workflowtask_router_v2
|
15
16
|
|
@@ -19,10 +20,13 @@ router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"])
|
|
19
20
|
router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
|
20
21
|
router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
|
21
22
|
router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
|
22
|
-
router_api_v2.include_router(submit_job_router_v2, tags=["V2
|
23
|
-
router_api_v2.include_router(task_router_v2, prefix="/task", tags=["V2 Task"])
|
23
|
+
router_api_v2.include_router(submit_job_router_v2, tags=["V2 Job"])
|
24
24
|
router_api_v2.include_router(
|
25
25
|
task_collection_router_v2, prefix="/task", tags=["V2 Task Collection"]
|
26
26
|
)
|
27
|
+
router_api_v2.include_router(task_router_v2, prefix="/task", tags=["V2 Task"])
|
28
|
+
router_api_v2.include_router(
|
29
|
+
task_legacy_router_v2, prefix="/task-legacy", tags=["V2 Task Legacy"]
|
30
|
+
)
|
27
31
|
router_api_v2.include_router(workflow_router_v2, tags=["V2 Workflow"])
|
28
32
|
router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"])
|
@@ -39,7 +39,6 @@ async def _get_project_check_owner(
|
|
39
39
|
project_id:
|
40
40
|
user_id:
|
41
41
|
db:
|
42
|
-
version:
|
43
42
|
|
44
43
|
Returns:
|
45
44
|
The project object
|
@@ -382,8 +381,8 @@ async def _get_task_check_owner(
|
|
382
381
|
def _get_submitted_jobs_statement() -> SelectOfScalar:
|
383
382
|
"""
|
384
383
|
Returns:
|
385
|
-
A sqlmodel statement that selects all `
|
386
|
-
`
|
384
|
+
A sqlmodel statement that selects all `Job`s with
|
385
|
+
`Job.status` equal to `submitted`.
|
387
386
|
"""
|
388
387
|
stm = select(JobV2).where(JobV2.status == JobStatusTypeV1.SUBMITTED)
|
389
388
|
return stm
|
@@ -406,11 +405,16 @@ async def _workflow_insert_task(
|
|
406
405
|
Insert a new WorkflowTask into Workflow.task_list
|
407
406
|
|
408
407
|
Args:
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
order:
|
413
|
-
|
408
|
+
workflow_id:
|
409
|
+
task_id:
|
410
|
+
is_legacy_task:
|
411
|
+
order:
|
412
|
+
meta_parallel:
|
413
|
+
meta_non_parallel:
|
414
|
+
args_non_parallel:
|
415
|
+
args_parallel:
|
416
|
+
input_filters:
|
417
|
+
db:
|
414
418
|
"""
|
415
419
|
db_workflow = await db.get(WorkflowV2, workflow_id)
|
416
420
|
if db_workflow is None:
|
@@ -148,7 +148,7 @@ async def apply_workflow(
|
|
148
148
|
if len(user.slurm_accounts) > 0:
|
149
149
|
job_create.slurm_account = user.slurm_accounts[0]
|
150
150
|
|
151
|
-
# Add new
|
151
|
+
# Add new Job object to DB
|
152
152
|
job = JobV2(
|
153
153
|
project_id=project_id,
|
154
154
|
dataset_id=dataset_id,
|
@@ -192,9 +192,8 @@ async def apply_workflow(
|
|
192
192
|
raise HTTPException(
|
193
193
|
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
194
194
|
detail=(
|
195
|
-
f"The endpoint 'POST /api/v2/project/{project_id}/
|
196
|
-
|
197
|
-
"was called several times within an interval of less "
|
195
|
+
f"The endpoint 'POST /api/v2/project/{project_id}/job/submit/'"
|
196
|
+
" was called several times within an interval of less "
|
198
197
|
f"than {settings.FRACTAL_API_SUBMIT_RATE_LIMIT} seconds, using"
|
199
198
|
" the same foreign keys. If it was intentional, please wait "
|
200
199
|
"and try again."
|
@@ -137,7 +137,17 @@ async def collect_tasks_pip(
|
|
137
137
|
detail=(
|
138
138
|
"Cannot collect package. Possible reason: another "
|
139
139
|
"collection of the same package is in progress. "
|
140
|
-
f"Original
|
140
|
+
f"Original FileNotFoundError: {e}"
|
141
|
+
),
|
142
|
+
)
|
143
|
+
except ValidationError as e:
|
144
|
+
await db.close()
|
145
|
+
raise HTTPException(
|
146
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
147
|
+
detail=(
|
148
|
+
"Cannot collect package. Possible reason: an old version "
|
149
|
+
"of the same package has already been collected. "
|
150
|
+
f"Original ValidationError: {e}"
|
141
151
|
),
|
142
152
|
)
|
143
153
|
task_collect_status.info = "Already installed"
|
@@ -0,0 +1,59 @@
|
|
1
|
+
from fastapi import APIRouter
|
2
|
+
from fastapi import Depends
|
3
|
+
from fastapi import HTTPException
|
4
|
+
from fastapi import status
|
5
|
+
from sqlmodel import select
|
6
|
+
|
7
|
+
from .....logger import set_logger
|
8
|
+
from ....db import AsyncSession
|
9
|
+
from ....db import get_async_db
|
10
|
+
from ....models.v1 import Task as TaskV1
|
11
|
+
from ....schemas.v2 import TaskLegacyReadV2
|
12
|
+
from ....security import current_active_user
|
13
|
+
from ....security import User
|
14
|
+
|
15
|
+
router = APIRouter()
|
16
|
+
|
17
|
+
logger = set_logger(__name__)
|
18
|
+
|
19
|
+
|
20
|
+
@router.get("/", response_model=list[TaskLegacyReadV2])
|
21
|
+
async def get_list_task_legacy(
|
22
|
+
args_schema: bool = True,
|
23
|
+
only_v2_compatible: bool = False,
|
24
|
+
user: User = Depends(current_active_user),
|
25
|
+
db: AsyncSession = Depends(get_async_db),
|
26
|
+
) -> list[TaskLegacyReadV2]:
|
27
|
+
"""
|
28
|
+
Get list of available legacy tasks
|
29
|
+
"""
|
30
|
+
stm = select(TaskV1)
|
31
|
+
if only_v2_compatible:
|
32
|
+
stm = stm.where(TaskV1.is_v2_compatible)
|
33
|
+
res = await db.execute(stm)
|
34
|
+
task_list = res.scalars().all()
|
35
|
+
await db.close()
|
36
|
+
if args_schema is False:
|
37
|
+
for task in task_list:
|
38
|
+
setattr(task, "args_schema", None)
|
39
|
+
|
40
|
+
return task_list
|
41
|
+
|
42
|
+
|
43
|
+
@router.get("/{task_id}/", response_model=TaskLegacyReadV2)
|
44
|
+
async def get_task_legacy(
|
45
|
+
task_id: int,
|
46
|
+
user: User = Depends(current_active_user),
|
47
|
+
db: AsyncSession = Depends(get_async_db),
|
48
|
+
) -> TaskLegacyReadV2:
|
49
|
+
"""
|
50
|
+
Get info on a specific legacy task
|
51
|
+
"""
|
52
|
+
task = await db.get(TaskV1, task_id)
|
53
|
+
await db.close()
|
54
|
+
if not task:
|
55
|
+
raise HTTPException(
|
56
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
57
|
+
detail=f"TaskV1[{task_id}] not found",
|
58
|
+
)
|
59
|
+
return task
|
File without changes
|
File without changes
|
@@ -207,7 +207,7 @@ def call_single_task(
|
|
207
207
|
with task_files.metadiff.open("r") as f_metadiff:
|
208
208
|
diff_metadata = json.load(f_metadiff)
|
209
209
|
except FileNotFoundError as e:
|
210
|
-
logger.
|
210
|
+
logger.warning(
|
211
211
|
f"Skip collection of updated metadata. Original error: {str(e)}"
|
212
212
|
)
|
213
213
|
diff_metadata = {}
|
@@ -49,7 +49,8 @@ def _process_workflow(
|
|
49
49
|
|
50
50
|
Schedules the workflow using a `FractalThreadPoolExecutor`.
|
51
51
|
|
52
|
-
Cf.
|
52
|
+
Cf.
|
53
|
+
[process_workflow][fractal_server.app.runner.v1._local.process_workflow]
|
53
54
|
for the call signature.
|
54
55
|
"""
|
55
56
|
|
@@ -66,7 +66,8 @@ def _process_workflow(
|
|
66
66
|
workflow working dir and user to impersonate. It then schedules the
|
67
67
|
workflow tasks and returns the output dataset metadata.
|
68
68
|
|
69
|
-
Cf.
|
69
|
+
Cf.
|
70
|
+
[process_workflow][fractal_server.app.runner.v1._local.process_workflow]
|
70
71
|
|
71
72
|
Returns:
|
72
73
|
output_dataset_metadata: Metadata of the output dataset
|
@@ -132,7 +133,8 @@ async def process_workflow(
|
|
132
133
|
"""
|
133
134
|
Process workflow (SLURM backend public interface)
|
134
135
|
|
135
|
-
Cf.
|
136
|
+
Cf.
|
137
|
+
[process_workflow][fractal_server.app.runner.v1._local.process_workflow]
|
136
138
|
"""
|
137
139
|
|
138
140
|
# Set values of first_task_index and last_task_index
|
@@ -12,7 +12,7 @@
|
|
12
12
|
"""
|
13
13
|
Submodule to define _slurm_submit_setup, which is also the reference
|
14
14
|
implementation of `submit_setup_call` in
|
15
|
-
[fractal_server.app.runner._common][]).
|
15
|
+
[fractal_server.app.runner.v1._common][]).
|
16
16
|
"""
|
17
17
|
from pathlib import Path
|
18
18
|
|
@@ -38,7 +38,7 @@ def _slurm_submit_setup(
|
|
38
38
|
|
39
39
|
For now, this is the reference implementation for the argument
|
40
40
|
`submit_setup_call` of
|
41
|
-
[fractal_server.app.runner._common.execute_tasks][].
|
41
|
+
[fractal_server.app.runner.v1._common.execute_tasks][].
|
42
42
|
|
43
43
|
Arguments:
|
44
44
|
wftask:
|
@@ -248,7 +248,7 @@ async def submit_workflow(
|
|
248
248
|
job,
|
249
249
|
dataset,
|
250
250
|
workflow,
|
251
|
-
|
251
|
+
logger_name=logger_name,
|
252
252
|
failed_wftask=failed_wftask,
|
253
253
|
)
|
254
254
|
latest_filters = assemble_filters_failed_job(job)
|
@@ -283,7 +283,7 @@ async def submit_workflow(
|
|
283
283
|
job,
|
284
284
|
dataset,
|
285
285
|
workflow,
|
286
|
-
|
286
|
+
logger_name=logger_name,
|
287
287
|
)
|
288
288
|
latest_filters = assemble_filters_failed_job(job)
|
289
289
|
if latest_filters is not None:
|
@@ -313,7 +313,7 @@ async def submit_workflow(
|
|
313
313
|
job,
|
314
314
|
dataset,
|
315
315
|
workflow,
|
316
|
-
|
316
|
+
logger_name=logger_name,
|
317
317
|
)
|
318
318
|
latest_filters = assemble_filters_failed_job(job)
|
319
319
|
if latest_filters is not None:
|
@@ -45,7 +45,8 @@ def _process_workflow(
|
|
45
45
|
|
46
46
|
Schedules the workflow using a `FractalThreadPoolExecutor`.
|
47
47
|
|
48
|
-
Cf.
|
48
|
+
Cf.
|
49
|
+
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
49
50
|
for the call signature.
|
50
51
|
"""
|
51
52
|
|
@@ -91,21 +92,21 @@ async def process_workflow(
|
|
91
92
|
Args:
|
92
93
|
workflow:
|
93
94
|
The workflow to be run
|
94
|
-
|
95
|
-
|
96
|
-
workflow
|
97
|
-
output_path:
|
98
|
-
The destination path for the last task of the workflow
|
99
|
-
input_metadata:
|
100
|
-
Initial metadata, passed to the first task
|
101
|
-
logger_name:
|
102
|
-
Name of the logger to log information on the run to
|
95
|
+
dataset:
|
96
|
+
Initial dataset.
|
103
97
|
workflow_dir:
|
104
98
|
Working directory for this run.
|
105
99
|
workflow_dir_user:
|
106
100
|
Working directory for this run, on the user side. This argument is
|
107
101
|
present for compatibility with the standard backend interface, but
|
108
102
|
for the `local` backend it cannot be different from `workflow_dir`.
|
103
|
+
first_task_index:
|
104
|
+
Positional index of the first task to execute; if `None`, start
|
105
|
+
from `0`.
|
106
|
+
last_task_index:
|
107
|
+
Positional index of the last task to execute; if `None`, proceed
|
108
|
+
until the last task.
|
109
|
+
logger_name: Logger name
|
109
110
|
slurm_user:
|
110
111
|
Username to impersonate to run the workflow. This argument is
|
111
112
|
present for compatibility with the standard backend interface, but
|
@@ -123,12 +124,6 @@ async def process_workflow(
|
|
123
124
|
to the backend executor. This argument is present for compatibility
|
124
125
|
with the standard backend interface, but is ignored in the `local`
|
125
126
|
backend.
|
126
|
-
first_task_index:
|
127
|
-
Positional index of the first task to execute; if `None`, start
|
128
|
-
from `0`.
|
129
|
-
last_task_index:
|
130
|
-
Positional index of the last task to execute; if `None`, proceed
|
131
|
-
until the last task.
|
132
127
|
|
133
128
|
Raises:
|
134
129
|
TaskExecutionError: wrapper for errors raised during tasks' execution
|
@@ -56,7 +56,8 @@ def _process_workflow(
|
|
56
56
|
workflow working dir and user to impersonate. It then schedules the
|
57
57
|
workflow tasks and returns the new dataset attributes
|
58
58
|
|
59
|
-
Cf.
|
59
|
+
Cf.
|
60
|
+
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
60
61
|
|
61
62
|
Returns:
|
62
63
|
new_dataset_attributes:
|
@@ -112,7 +113,8 @@ async def process_workflow(
|
|
112
113
|
"""
|
113
114
|
Process workflow (SLURM backend public interface)
|
114
115
|
|
115
|
-
Cf.
|
116
|
+
Cf.
|
117
|
+
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
116
118
|
"""
|
117
119
|
|
118
120
|
# Set values of first_task_index and last_task_index
|
@@ -11,8 +11,7 @@
|
|
11
11
|
# Zurich.
|
12
12
|
"""
|
13
13
|
Submodule to define _slurm_submit_setup, which is also the reference
|
14
|
-
implementation of `submit_setup_call
|
15
|
-
[fractal_server.app.runner._common][]).
|
14
|
+
implementation of `submit_setup_call`.
|
16
15
|
"""
|
17
16
|
from pathlib import Path
|
18
17
|
from typing import Literal
|
@@ -40,7 +39,7 @@ def _slurm_submit_setup(
|
|
40
39
|
|
41
40
|
For now, this is the reference implementation for the argument
|
42
41
|
`submit_setup_call` of
|
43
|
-
[fractal_server.app.runner.
|
42
|
+
[fractal_server.app.runner.v2.runner][].
|
44
43
|
|
45
44
|
Arguments:
|
46
45
|
wftask:
|
@@ -1,9 +1,10 @@
|
|
1
1
|
from typing import TypeVar
|
2
2
|
|
3
3
|
from ....images import SingleImage
|
4
|
+
from ....images import SingleImageTaskOutput
|
4
5
|
from .task_interface import InitArgsModel
|
5
6
|
|
6
|
-
T = TypeVar("T", SingleImage, InitArgsModel)
|
7
|
+
T = TypeVar("T", SingleImage, SingleImageTaskOutput, InitArgsModel)
|
7
8
|
|
8
9
|
|
9
10
|
def deduplicate_list(
|
@@ -32,7 +32,7 @@ def assemble_history_failed_job(
|
|
32
32
|
job: JobV2,
|
33
33
|
dataset: DatasetV2,
|
34
34
|
workflow: WorkflowV2,
|
35
|
-
|
35
|
+
logger_name: Optional[str] = None,
|
36
36
|
failed_wftask: Optional[WorkflowTaskV2] = None,
|
37
37
|
) -> list[dict[str, Any]]:
|
38
38
|
"""
|
@@ -40,12 +40,12 @@ def assemble_history_failed_job(
|
|
40
40
|
|
41
41
|
Args:
|
42
42
|
job:
|
43
|
-
The failed `
|
44
|
-
|
45
|
-
The `
|
43
|
+
The failed `JobV2` object.
|
44
|
+
dataset:
|
45
|
+
The `DatasetV2` object associated to `job`.
|
46
46
|
workflow:
|
47
|
-
The `
|
48
|
-
|
47
|
+
The `WorkflowV2` object associated to `job`.
|
48
|
+
logger_name: A logger name.
|
49
49
|
failed_wftask:
|
50
50
|
If set, append it to `history` during step 3; if `None`, infer
|
51
51
|
it by comparing the job task list and the one in
|
@@ -53,9 +53,11 @@ def assemble_history_failed_job(
|
|
53
53
|
|
54
54
|
Returns:
|
55
55
|
The new value of `history`, to be merged into
|
56
|
-
`
|
56
|
+
`dataset.meta`.
|
57
57
|
"""
|
58
58
|
|
59
|
+
logger = logging.getLogger(logger_name)
|
60
|
+
|
59
61
|
# The final value of the history attribute should include up to three
|
60
62
|
# parts, coming from: the database, the temporary file, the failed-task
|
61
63
|
# information.
|
@@ -157,6 +157,20 @@ def execute_tasks_v2(
|
|
157
157
|
updated_types.update(image["types"])
|
158
158
|
updated_types.update(task.output_types)
|
159
159
|
|
160
|
+
# Unset attributes with None value
|
161
|
+
updated_attributes = {
|
162
|
+
key: value
|
163
|
+
for key, value in updated_attributes.items()
|
164
|
+
if value is not None
|
165
|
+
}
|
166
|
+
|
167
|
+
# Validate new image
|
168
|
+
SingleImage(
|
169
|
+
zarr_url=image["zarr_url"],
|
170
|
+
types=updated_types,
|
171
|
+
attributes=updated_attributes,
|
172
|
+
)
|
173
|
+
|
160
174
|
# Update image in the dataset image list
|
161
175
|
tmp_images[original_index]["attributes"] = updated_attributes
|
162
176
|
tmp_images[original_index]["types"] = updated_types
|
@@ -182,6 +196,11 @@ def execute_tasks_v2(
|
|
182
196
|
updated_types = copy(original_img["types"])
|
183
197
|
# Update image attributes/types with task output and manifest
|
184
198
|
updated_attributes.update(image["attributes"])
|
199
|
+
updated_attributes = {
|
200
|
+
key: value
|
201
|
+
for key, value in updated_attributes.items()
|
202
|
+
if value is not None
|
203
|
+
}
|
185
204
|
updated_types.update(image["types"])
|
186
205
|
updated_types.update(task.output_types)
|
187
206
|
new_image = dict(
|
@@ -3,7 +3,7 @@ from typing import Any
|
|
3
3
|
from pydantic import BaseModel
|
4
4
|
from pydantic import Field
|
5
5
|
|
6
|
-
from ....images import
|
6
|
+
from ....images import SingleImageTaskOutput
|
7
7
|
from fractal_server.images import Filters
|
8
8
|
|
9
9
|
|
@@ -11,7 +11,9 @@ class TaskOutput(BaseModel):
|
|
11
11
|
class Config:
|
12
12
|
extra = "forbid"
|
13
13
|
|
14
|
-
image_list_updates: list[
|
14
|
+
image_list_updates: list[SingleImageTaskOutput] = Field(
|
15
|
+
default_factory=list
|
16
|
+
)
|
15
17
|
image_list_removals: list[str] = Field(default_factory=list)
|
16
18
|
filters: Filters = Field(default_factory=Filters)
|
17
19
|
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import os
|
2
2
|
from datetime import datetime
|
3
3
|
from datetime import timezone
|
4
|
+
from typing import Any
|
4
5
|
|
5
6
|
|
6
7
|
def valstr(attribute: str, accept_none: bool = False):
|
@@ -27,6 +28,27 @@ def valstr(attribute: str, accept_none: bool = False):
|
|
27
28
|
return val
|
28
29
|
|
29
30
|
|
31
|
+
def valdictkeys(attribute: str):
|
32
|
+
def val(d: dict[str, Any]):
|
33
|
+
"""
|
34
|
+
Apply valstr to every key of the dictionary, and fail if there are
|
35
|
+
identical keys.
|
36
|
+
"""
|
37
|
+
if d is not None:
|
38
|
+
old_keys = list(d.keys())
|
39
|
+
new_keys = [valstr(f"{attribute}[{key}]")(key) for key in old_keys]
|
40
|
+
if len(new_keys) != len(set(new_keys)):
|
41
|
+
raise ValueError(
|
42
|
+
f"Dictionary contains multiple identical keys: {d}."
|
43
|
+
)
|
44
|
+
for old_key, new_key in zip(old_keys, new_keys):
|
45
|
+
if new_key != old_key:
|
46
|
+
d[new_key] = d.pop(old_key)
|
47
|
+
return d
|
48
|
+
|
49
|
+
return val
|
50
|
+
|
51
|
+
|
30
52
|
def valint(attribute: str, min_val: int = 1):
|
31
53
|
"""
|
32
54
|
Check that an integer attribute (e.g. if it is meant to be the ID of a
|
@@ -17,6 +17,7 @@ from .project import ProjectUpdateV2 # noqa F401
|
|
17
17
|
from .task import TaskCreateV2 # noqa F401
|
18
18
|
from .task import TaskExportV2 # noqa F401
|
19
19
|
from .task import TaskImportV2 # noqa F401
|
20
|
+
from .task import TaskLegacyReadV2 # noqa F401
|
20
21
|
from .task import TaskReadV2 # noqa F401
|
21
22
|
from .task import TaskUpdateV2 # noqa F401
|
22
23
|
from .task_collection import TaskCollectPipV2 # noqa F401
|
@@ -5,7 +5,7 @@ Dump models differ from their Read counterpart in that:
|
|
5
5
|
* They may only include a subset of the Read attributes.
|
6
6
|
|
7
7
|
These models are used in at least two situations:
|
8
|
-
1. In the "*_dump" attributes of
|
8
|
+
1. In the "*_dump" attributes of Job models;
|
9
9
|
2. In the `_DatasetHistoryItem.workflowtask` model, to trim its size.
|
10
10
|
"""
|
11
11
|
from typing import Optional
|
@@ -9,7 +9,9 @@ from pydantic import HttpUrl
|
|
9
9
|
from pydantic import root_validator
|
10
10
|
from pydantic import validator
|
11
11
|
|
12
|
+
from .._validators import valdictkeys
|
12
13
|
from .._validators import valstr
|
14
|
+
from ..v1.task import TaskReadV1
|
13
15
|
|
14
16
|
|
15
17
|
class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
@@ -20,8 +22,8 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
20
22
|
command_parallel: Optional[str]
|
21
23
|
source: str
|
22
24
|
|
23
|
-
meta_parallel: Optional[dict[str, Any]]
|
24
25
|
meta_non_parallel: Optional[dict[str, Any]]
|
26
|
+
meta_parallel: Optional[dict[str, Any]]
|
25
27
|
version: Optional[str]
|
26
28
|
args_schema_non_parallel: Optional[dict[str, Any]]
|
27
29
|
args_schema_parallel: Optional[dict[str, Any]]
|
@@ -53,9 +55,28 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
53
55
|
)
|
54
56
|
_source = validator("source", allow_reuse=True)(valstr("source"))
|
55
57
|
_version = validator("version", allow_reuse=True)(valstr("version"))
|
58
|
+
|
59
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
60
|
+
valdictkeys("meta_non_parallel")
|
61
|
+
)
|
62
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
63
|
+
valdictkeys("meta_parallel")
|
64
|
+
)
|
65
|
+
_args_schema_non_parallel = validator(
|
66
|
+
"args_schema_non_parallel", allow_reuse=True
|
67
|
+
)(valdictkeys("args_schema_non_parallel"))
|
68
|
+
_args_schema_parallel = validator(
|
69
|
+
"args_schema_parallel", allow_reuse=True
|
70
|
+
)(valdictkeys("args_schema_parallel"))
|
56
71
|
_args_schema_version = validator("args_schema_version", allow_reuse=True)(
|
57
72
|
valstr("args_schema_version")
|
58
73
|
)
|
74
|
+
_input_types = validator("input_types", allow_reuse=True)(
|
75
|
+
valdictkeys("input_types")
|
76
|
+
)
|
77
|
+
_output_types = validator("output_types", allow_reuse=True)(
|
78
|
+
valdictkeys("output_types")
|
79
|
+
)
|
59
80
|
|
60
81
|
|
61
82
|
class TaskReadV2(BaseModel):
|
@@ -80,6 +101,10 @@ class TaskReadV2(BaseModel):
|
|
80
101
|
output_types: dict[str, bool]
|
81
102
|
|
82
103
|
|
104
|
+
class TaskLegacyReadV2(TaskReadV1):
|
105
|
+
is_v2_compatible: bool
|
106
|
+
|
107
|
+
|
83
108
|
class TaskUpdateV2(BaseModel):
|
84
109
|
|
85
110
|
name: Optional[str]
|
@@ -106,6 +131,12 @@ class TaskUpdateV2(BaseModel):
|
|
106
131
|
_command_non_parallel = validator(
|
107
132
|
"command_non_parallel", allow_reuse=True
|
108
133
|
)(valstr("command_non_parallel"))
|
134
|
+
_input_types = validator("input_types", allow_reuse=True)(
|
135
|
+
valdictkeys("input_types")
|
136
|
+
)
|
137
|
+
_output_types = validator("output_types", allow_reuse=True)(
|
138
|
+
valdictkeys("output_types")
|
139
|
+
)
|
109
140
|
|
110
141
|
|
111
142
|
class TaskImportV2(BaseModel):
|
@@ -6,6 +6,7 @@ from pydantic import BaseModel
|
|
6
6
|
from pydantic import Field
|
7
7
|
from pydantic import validator
|
8
8
|
|
9
|
+
from .._validators import valdictkeys
|
9
10
|
from .._validators import valstr
|
10
11
|
from .task import TaskReadV2
|
11
12
|
|
@@ -43,6 +44,9 @@ class TaskCollectPipV2(BaseModel):
|
|
43
44
|
python_version: Optional[str] = None
|
44
45
|
pinned_package_versions: Optional[dict[str, str]] = None
|
45
46
|
|
47
|
+
_pinned_package_versions = validator(
|
48
|
+
"pinned_package_versions", allow_reuse=True
|
49
|
+
)(valdictkeys("pinned_package_versions"))
|
46
50
|
_package_extras = validator("package_extras", allow_reuse=True)(
|
47
51
|
valstr("package_extras")
|
48
52
|
)
|
@@ -8,6 +8,7 @@ from pydantic import Field
|
|
8
8
|
from pydantic import root_validator
|
9
9
|
from pydantic import validator
|
10
10
|
|
11
|
+
from .._validators import valdictkeys
|
11
12
|
from .._validators import valint
|
12
13
|
from ..v1.task import TaskExportV1
|
13
14
|
from ..v1.task import TaskImportV1
|
@@ -49,7 +50,18 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
49
50
|
is_legacy_task: bool = False
|
50
51
|
|
51
52
|
# Validators
|
52
|
-
|
53
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
54
|
+
valdictkeys("meta_non_parallel")
|
55
|
+
)
|
56
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
57
|
+
valdictkeys("meta_parallel")
|
58
|
+
)
|
59
|
+
_args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
|
60
|
+
valdictkeys("args_non_parallel")
|
61
|
+
)
|
62
|
+
_args_parallel = validator("args_parallel", allow_reuse=True)(
|
63
|
+
valdictkeys("args_parallel")
|
64
|
+
)
|
53
65
|
_order = validator("order", allow_reuse=True)(valint("order", min_val=0))
|
54
66
|
|
55
67
|
@root_validator
|
@@ -96,14 +108,18 @@ class WorkflowTaskUpdateV2(BaseModel):
|
|
96
108
|
input_filters: Optional[Filters]
|
97
109
|
|
98
110
|
# Validators
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
111
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
112
|
+
valdictkeys("meta_non_parallel")
|
113
|
+
)
|
114
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
115
|
+
valdictkeys("meta_parallel")
|
116
|
+
)
|
117
|
+
_args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
|
118
|
+
valdictkeys("args_non_parallel")
|
119
|
+
)
|
120
|
+
_args_parallel = validator("args_parallel", allow_reuse=True)(
|
121
|
+
valdictkeys("args_parallel")
|
122
|
+
)
|
107
123
|
|
108
124
|
|
109
125
|
class WorkflowTaskImportV2(BaseModel):
|
@@ -119,6 +135,19 @@ class WorkflowTaskImportV2(BaseModel):
|
|
119
135
|
task: Optional[TaskImportV2] = None
|
120
136
|
task_legacy: Optional[TaskImportV1] = None
|
121
137
|
|
138
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
139
|
+
valdictkeys("meta_non_parallel")
|
140
|
+
)
|
141
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
142
|
+
valdictkeys("meta_parallel")
|
143
|
+
)
|
144
|
+
_args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
|
145
|
+
valdictkeys("args_non_parallel")
|
146
|
+
)
|
147
|
+
_args_parallel = validator("args_parallel", allow_reuse=True)(
|
148
|
+
valdictkeys("args_parallel")
|
149
|
+
)
|
150
|
+
|
122
151
|
|
123
152
|
class WorkflowTaskExportV2(BaseModel):
|
124
153
|
|
fractal_server/images/models.py
CHANGED
@@ -6,8 +6,19 @@ from pydantic import BaseModel
|
|
6
6
|
from pydantic import Field
|
7
7
|
from pydantic import validator
|
8
8
|
|
9
|
+
from fractal_server.app.schemas._validators import valdictkeys
|
9
10
|
|
10
|
-
|
11
|
+
|
12
|
+
class SingleImageBase(BaseModel):
|
13
|
+
"""
|
14
|
+
Base for SingleImage and SingleImageTaskOutput.
|
15
|
+
|
16
|
+
Attributes:
|
17
|
+
zarr_url:
|
18
|
+
origin:
|
19
|
+
attributes:
|
20
|
+
types:
|
21
|
+
"""
|
11
22
|
|
12
23
|
zarr_url: str
|
13
24
|
origin: Optional[str] = None
|
@@ -15,6 +26,37 @@ class SingleImage(BaseModel):
|
|
15
26
|
attributes: dict[str, Any] = Field(default_factory=dict)
|
16
27
|
types: dict[str, bool] = Field(default_factory=dict)
|
17
28
|
|
29
|
+
# Validators
|
30
|
+
_attributes = validator("attributes", allow_reuse=True)(
|
31
|
+
valdictkeys("attributes")
|
32
|
+
)
|
33
|
+
_types = validator("types", allow_reuse=True)(valdictkeys("types"))
|
34
|
+
|
35
|
+
|
36
|
+
class SingleImageTaskOutput(SingleImageBase):
|
37
|
+
"""
|
38
|
+
`SingleImageBase`, with scalar `attributes` values (`None` included).
|
39
|
+
"""
|
40
|
+
|
41
|
+
@validator("attributes")
|
42
|
+
def validate_attributes(
|
43
|
+
cls, v: dict[str, Any]
|
44
|
+
) -> dict[str, Union[int, float, str, bool, None]]:
|
45
|
+
for key, value in v.items():
|
46
|
+
if not isinstance(value, (int, float, str, bool, type(None))):
|
47
|
+
raise ValueError(
|
48
|
+
f"SingleImageTaskOutput.attributes[{key}] must be a "
|
49
|
+
"scalar (int, float, str or bool). "
|
50
|
+
f"Given {value} ({type(value)})"
|
51
|
+
)
|
52
|
+
return v
|
53
|
+
|
54
|
+
|
55
|
+
class SingleImage(SingleImageBase):
|
56
|
+
"""
|
57
|
+
`SingleImageBase`, with scalar `attributes` values (`None` excluded).
|
58
|
+
"""
|
59
|
+
|
18
60
|
@validator("attributes")
|
19
61
|
def validate_attributes(
|
20
62
|
cls, v: dict[str, Any]
|
@@ -36,6 +78,11 @@ class Filters(BaseModel):
|
|
36
78
|
extra = "forbid"
|
37
79
|
|
38
80
|
# Validators
|
81
|
+
_attributes = validator("attributes", allow_reuse=True)(
|
82
|
+
valdictkeys("attributes")
|
83
|
+
)
|
84
|
+
_types = validator("types", allow_reuse=True)(valdictkeys("types"))
|
85
|
+
|
39
86
|
@validator("attributes")
|
40
87
|
def validate_attributes(
|
41
88
|
cls, v: dict[str, Any]
|
File without changes
|
File without changes
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fractal-server
|
3
|
-
Version: 2.0.
|
3
|
+
Version: 2.0.0a5
|
4
4
|
Summary: Server component of the Fractal analytics platform
|
5
5
|
Home-page: https://github.com/fractal-analytics-platform/fractal-server
|
6
6
|
License: BSD-3-Clause
|
@@ -16,21 +16,21 @@ Classifier: Programming Language :: Python :: 3.12
|
|
16
16
|
Provides-Extra: gunicorn
|
17
17
|
Provides-Extra: postgres
|
18
18
|
Requires-Dist: aiosqlite (>=0.19.0,<0.20.0)
|
19
|
-
Requires-Dist: alembic (>=1.
|
19
|
+
Requires-Dist: alembic (>=1.13.1,<2.0.0)
|
20
20
|
Requires-Dist: asyncpg (>=0.29.0,<0.30.0) ; extra == "postgres"
|
21
21
|
Requires-Dist: bcrypt (==4.0.1)
|
22
|
-
Requires-Dist: cloudpickle (>=
|
22
|
+
Requires-Dist: cloudpickle (>=3.0.0,<3.1.0)
|
23
23
|
Requires-Dist: clusterfutures (>=0.5,<0.6)
|
24
|
-
Requires-Dist: fastapi (>=0.
|
24
|
+
Requires-Dist: fastapi (>=0.110.0,<0.111.0)
|
25
25
|
Requires-Dist: fastapi-users[oauth] (>=12.1.0,<13.0.0)
|
26
26
|
Requires-Dist: gunicorn (>=21.2.0,<22.0.0) ; extra == "gunicorn"
|
27
27
|
Requires-Dist: packaging (>=23.2,<24.0)
|
28
28
|
Requires-Dist: psycopg2 (>=2.9.5,<3.0.0) ; extra == "postgres"
|
29
29
|
Requires-Dist: pydantic (>=1.10.8,<2)
|
30
|
-
Requires-Dist: python-dotenv (>=0.
|
30
|
+
Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
|
31
31
|
Requires-Dist: sqlalchemy[asyncio] (>=2.0.23,<2.1)
|
32
|
-
Requires-Dist: sqlmodel (>=0.0.
|
33
|
-
Requires-Dist: uvicorn (>=0.
|
32
|
+
Requires-Dist: sqlmodel (>=0.0.16,<0.0.17)
|
33
|
+
Requires-Dist: uvicorn (>=0.29.0,<0.30.0)
|
34
34
|
Project-URL: Changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
|
35
35
|
Project-URL: Documentation, https://fractal-analytics-platform.github.io/fractal-server
|
36
36
|
Project-URL: Repository, https://github.com/fractal-analytics-platform/fractal-server
|
@@ -1,4 +1,4 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=oMW8PG9hYE8hZFrzx7V0lWEhIDsTi7x10oIzOXR7EYc,24
|
2
2
|
fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -21,8 +21,9 @@ fractal_server/app/models/v2/task.py,sha256=9ZPhug3VWyeqgT8wQ9_8ZXQ2crSiiicRipxr
|
|
21
21
|
fractal_server/app/models/v2/workflow.py,sha256=4pSTeZC78OQbgHHC5S0ge6pK1AP6ak7Qew_0ZNM9xuw,1256
|
22
22
|
fractal_server/app/models/v2/workflowtask.py,sha256=f2a85MSAyBAdC7oG6SR8mViMNqlomQWaIB08n3ZhT-0,2727
|
23
23
|
fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
|
+
fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
25
|
fractal_server/app/routes/admin/v1.py,sha256=uY6H1znlAlrM9e1MG2EThTqwciCl87Twew34JM5W6IU,13981
|
25
|
-
fractal_server/app/routes/admin/v2.py,sha256=
|
26
|
+
fractal_server/app/routes/admin/v2.py,sha256=c2hk8lB9ilXvFDJ6AHv7aPd_nyaXyobH0S7CaaDqFMI,9826
|
26
27
|
fractal_server/app/routes/api/__init__.py,sha256=EVyZrEq3I_1643QGTPCC5lgCp4xH_auYbrFfogTm4pc,315
|
27
28
|
fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
|
28
29
|
fractal_server/app/routes/api/v1/_aux_functions.py,sha256=eC5exnGj9jnJqx0ccecoNaipxDeK2ZsR1ev0syH5x-Y,11955
|
@@ -30,18 +31,19 @@ fractal_server/app/routes/api/v1/dataset.py,sha256=7z57FGBTCyz_G6Ivr1PeGIXGyd15f
|
|
30
31
|
fractal_server/app/routes/api/v1/job.py,sha256=NwXyhvvzdPDor0ts8Im__9-I0P1H943s4NXIRgaz7PM,5436
|
31
32
|
fractal_server/app/routes/api/v1/project.py,sha256=keqA0gYM48lyFP8zJgZ6cv34V6Js8DD-gbzE316H46k,15765
|
32
33
|
fractal_server/app/routes/api/v1/task.py,sha256=4zUXMtq5M95XjaZs1t9oibYHiDIwxpM-3sTAxN95aRk,6123
|
33
|
-
fractal_server/app/routes/api/v1/task_collection.py,sha256=
|
34
|
+
fractal_server/app/routes/api/v1/task_collection.py,sha256=_cY3pPRGchdWPuJ1XudMZMVJ0IC0_XVH0XwLTiAbRGg,8873
|
34
35
|
fractal_server/app/routes/api/v1/workflow.py,sha256=ZObifWTPi100oRQ1wEER8Sgsr3Neo8QVdCCFQnWMNZ0,10930
|
35
36
|
fractal_server/app/routes/api/v1/workflowtask.py,sha256=ox-DIIqYV4K35hCu86eGa2SHnR5IQml-I00UHEwnmHQ,5579
|
36
|
-
fractal_server/app/routes/api/v2/__init__.py,sha256=
|
37
|
-
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=
|
37
|
+
fractal_server/app/routes/api/v2/__init__.py,sha256=x56HcY1uBNCgq4BRVj-0j6bAj6OsTN97RNDqY8NefJ8,1373
|
38
|
+
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=TCHf3aM-KQxaNJen10CGX1Da5IIra00xRF39FUTU698,14301
|
38
39
|
fractal_server/app/routes/api/v2/dataset.py,sha256=qQi9jfT9YLu6DrRCPh280J3MoFWs9yMiejkCNaauCyQ,9680
|
39
40
|
fractal_server/app/routes/api/v2/images.py,sha256=b1NM9Y0ocuRYRec-3UcVAizB0vFkmzPEHfObaoCnIMY,5956
|
40
41
|
fractal_server/app/routes/api/v2/job.py,sha256=9mXaKCX_N3FXM0GIxdE49nWl_hJZ8CBLBIaMMhaCKOM,5334
|
41
42
|
fractal_server/app/routes/api/v2/project.py,sha256=i9a19HAqE36N92G60ZYgObIP9nv-hR7Jt5nd9Dkhz1g,6024
|
42
|
-
fractal_server/app/routes/api/v2/submit.py,sha256=
|
43
|
+
fractal_server/app/routes/api/v2/submit.py,sha256=egu5jE93sU7sRu3x_4Rp9t3uUJFATK4dwj4Pl3iJPN0,7171
|
43
44
|
fractal_server/app/routes/api/v2/task.py,sha256=gJ0LruSk-Q1iMw8ZOX8C0wrZ4S4DGlQTr_5SdJJud0Q,7130
|
44
|
-
fractal_server/app/routes/api/v2/task_collection.py,sha256=
|
45
|
+
fractal_server/app/routes/api/v2/task_collection.py,sha256=iw74UF8qdQa9pJf0DvSjihng6ri2k2HtW2UhMS_a8Zc,8904
|
46
|
+
fractal_server/app/routes/api/v2/task_legacy.py,sha256=P_VJv9v0yzFUBuS-DQHhMVSOe20ecGJJcFBqiiFciOM,1628
|
45
47
|
fractal_server/app/routes/api/v2/workflow.py,sha256=sw-1phO_rrmDAcWX9Zqb9M8SfrWF78-02AuLB1-D1PU,11845
|
46
48
|
fractal_server/app/routes/api/v2/workflowtask.py,sha256=L4hYpb-ihKNfPxM5AnZqPhCdiojI9Eq5TR0wf-0vP_s,8414
|
47
49
|
fractal_server/app/routes/auth.py,sha256=Xv80iqdyfY3lyicYs2Y8B6zEDEnyUu_H6_6psYtv3R4,4885
|
@@ -49,50 +51,51 @@ fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
|
|
49
51
|
fractal_server/app/routes/aux/_job.py,sha256=5gKgvArAruSkMQuPN34Vvzi89WJbwWPsx0oDAa_iXu4,1248
|
50
52
|
fractal_server/app/routes/aux/_runner.py,sha256=psW6fsoo_VrAHrD5UQPbqFYikCp0m16VRymC-U1yUTk,675
|
51
53
|
fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
|
54
|
+
fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
52
55
|
fractal_server/app/runner/async_wrap.py,sha256=_O6f8jftKYXG_DozkmlrDBhoiK9QhE9MablOyECq2_M,829
|
53
56
|
fractal_server/app/runner/components.py,sha256=ZF8ct_Ky5k8IAcrmpYOZ-bc6OBgdELEighYVqFDEbZg,119
|
54
57
|
fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrGrLjZcnQl7A,4159
|
58
|
+
fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
59
|
fractal_server/app/runner/executors/slurm/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
|
56
60
|
fractal_server/app/runner/executors/slurm/_batching.py,sha256=1P6CgrAOCK9u_EvNFTumcQ-PcZMpocCaSAyNr0YB1js,8841
|
57
61
|
fractal_server/app/runner/executors/slurm/_check_jobs_status.py,sha256=8d29a7DQ2xoWxoFQCnFfTpHER-qBX8mEatl4Dw5HU_o,1908
|
58
62
|
fractal_server/app/runner/executors/slurm/_executor_wait_thread.py,sha256=J3tjAx33nBgW4eHAXDte7hDs7Oe9FLEZaElEt8inrbg,4421
|
59
63
|
fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=rF37XDImX1QoWx37MC5hSM9AuY_KfHU5gaWwN4vl4Zk,15552
|
60
64
|
fractal_server/app/runner/executors/slurm/_subprocess_run_as_user.py,sha256=8CCtxWCuB5UDst3C_WJxBU77xwPrpDyq7iMCZMnodXU,5123
|
61
|
-
fractal_server/app/runner/executors/slurm/executor.py,sha256=
|
65
|
+
fractal_server/app/runner/executors/slurm/executor.py,sha256=O9h6ZPAKM95BUJrZkHCdFJZrw2zR2XmxeB5fCoGp97w,44451
|
62
66
|
fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
|
63
67
|
fractal_server/app/runner/filenames.py,sha256=9lwu3yB4C67yiijYw8XIKaLFn3mJUt6_TCyVFM_aZUQ,206
|
64
68
|
fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
|
65
69
|
fractal_server/app/runner/task_files.py,sha256=c5mggMy7BIK_yBUvbimFgvKFZPKKDu6RRfWepwinBVk,3219
|
66
70
|
fractal_server/app/runner/v1/__init__.py,sha256=meqMG2UejFa_1hm5xlsmkDxsM7Y_hqftsexuteQXOrE,13608
|
67
|
-
fractal_server/app/runner/v1/_common.py,sha256=
|
68
|
-
fractal_server/app/runner/v1/_local/__init__.py,sha256=
|
71
|
+
fractal_server/app/runner/v1/_common.py,sha256=fIt8BVW7u6ReZbHgQ5kV2kDtWoHMQNjPoeuWP5YfWQk,21240
|
72
|
+
fractal_server/app/runner/v1/_local/__init__.py,sha256=8PjeyPLvj6KHdZ3HyzWZCdlrubgedA1hZLXGAsLNOKI,6926
|
69
73
|
fractal_server/app/runner/v1/_local/_local_config.py,sha256=hM7SPxR07luXPcXdrWXRpEB2uOyjSSRUdqW3QBKJn9c,3147
|
70
74
|
fractal_server/app/runner/v1/_local/_submit_setup.py,sha256=kvNPT7ey2mEamORzPMMVThbFHtzZcSr-0A9tYw9uVDA,1493
|
71
75
|
fractal_server/app/runner/v1/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
|
72
|
-
fractal_server/app/runner/v1/_slurm/__init__.py,sha256=
|
73
|
-
fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=
|
76
|
+
fractal_server/app/runner/v1/_slurm/__init__.py,sha256=KN98RO8E3EG4MLNFa--D3DilRHjUyHrVicC6pHtu5L0,10853
|
77
|
+
fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=llTgSOCnCVMvm7Q0SoVpLZshorAOZZUDz927ij0LZEA,2738
|
74
78
|
fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6TLWQon8hSicsD7c3yXK4P9xeId0s_H3HOOeMUVGVss,5977
|
75
79
|
fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
|
76
80
|
fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
|
77
|
-
fractal_server/app/runner/v2/__init__.py,sha256=
|
78
|
-
fractal_server/app/runner/v2/_local/__init__.py,sha256=
|
81
|
+
fractal_server/app/runner/v2/__init__.py,sha256=RwIOSLCChMZWHix5QuUNRPtRwgf1UmFDk3YufRCTOoc,12482
|
82
|
+
fractal_server/app/runner/v2/_local/__init__.py,sha256=Q1s-DwXleUq6w1ZNv6tlh3tZv6cyBqxB_hMvZlqVYaM,5881
|
79
83
|
fractal_server/app/runner/v2/_local/_local_config.py,sha256=lR0Js-l63mQUzN9hK0HkfdLsrTf-W6GHvPvbPC64amY,3630
|
80
84
|
fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=deagsLSy6A3ZHKaSDcQqrdvbQVM3i4kgyTcbVc0tC5U,1614
|
81
85
|
fractal_server/app/runner/v2/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
|
82
|
-
fractal_server/app/runner/v2/_slurm/__init__.py,sha256=
|
83
|
-
fractal_server/app/runner/v2/_slurm/_submit_setup.py,sha256=
|
86
|
+
fractal_server/app/runner/v2/_slurm/__init__.py,sha256=srxn5-KdQxqD8cWJmOJlSoctbXYlyCMM249xWGY9bhI,4409
|
87
|
+
fractal_server/app/runner/v2/_slurm/_submit_setup.py,sha256=tsZHQdVy3VxENMdsBzHltrVWzugBppq0cFrHtaVzoUA,2793
|
84
88
|
fractal_server/app/runner/v2/_slurm/get_slurm_config.py,sha256=sqP-hs58TPt849rx10VRFKWX_DgLDPQcKZJcE0zKBXs,6621
|
85
|
-
fractal_server/app/runner/v2/deduplicate_list.py,sha256
|
86
|
-
fractal_server/app/runner/v2/handle_failed_job.py,sha256=
|
89
|
+
fractal_server/app/runner/v2/deduplicate_list.py,sha256=-imwO7OB7ATADEnqVbTElUwoY0YIJCTf_SbWJNN9OZg,639
|
90
|
+
fractal_server/app/runner/v2/handle_failed_job.py,sha256=fipRJT5Y8UY0US4bXUX-4ORTAQ1AetZcCAOVCjDO3_c,5202
|
87
91
|
fractal_server/app/runner/v2/merge_outputs.py,sha256=IHuHqbKmk97K35BFvTrKVBs60z3e_--OzXTnsvmA02c,1281
|
88
|
-
fractal_server/app/runner/v2/runner.py,sha256=
|
92
|
+
fractal_server/app/runner/v2/runner.py,sha256=rBRehRDduGU0TUOkgQN6WbIGhDWZ6GOat4bv7IVB8cA,11784
|
89
93
|
fractal_server/app/runner/v2/runner_functions.py,sha256=LfO1-FJF70_Qh78NQTCHJWyzyr011wvvtnzB6nTj5ZM,10087
|
90
94
|
fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=Pp3hsj1i1t4ExDMcUBkQ27yEi7kjlvymY6q6eDiC8DM,3845
|
91
|
-
fractal_server/app/runner/v2/task_interface.py,sha256=
|
95
|
+
fractal_server/app/runner/v2/task_interface.py,sha256=NJZUMHtEs5C3bFdXX42Kv1GMzQ7xPW2v5ZRxGNANOec,1410
|
92
96
|
fractal_server/app/runner/v2/v1_compat.py,sha256=6UijuRYbB2ry2mM073u1fW4CSTeelB11lmoj_TOGtm4,511
|
93
97
|
fractal_server/app/schemas/__init__.py,sha256=VL55f3CTFngXHYkOsFaLBEEkEEewEWI5ODlcGTI7cqA,157
|
94
|
-
fractal_server/app/schemas/_validators.py,sha256=
|
95
|
-
fractal_server/app/schemas/json_schemas/manifest.json,sha256=yXYKHbYXPYSkSXMTLfTpfCUGBtmQuPTk1xuSXscdba4,1787
|
98
|
+
fractal_server/app/schemas/_validators.py,sha256=Pdff5plJJmoUTf_nZpMA24tZlFJb84EdRSnLwRZDxfE,3264
|
96
99
|
fractal_server/app/schemas/state.py,sha256=t4XM04aqxeluh8MfvD7LfEc-8-dOmUVluZHhLsfxxkc,692
|
97
100
|
fractal_server/app/schemas/user.py,sha256=rE8WgBz-ceVUs0Sz2ZwcjUrSTZTnS0ys5SBtD2XD9r8,3113
|
98
101
|
fractal_server/app/schemas/v1/__init__.py,sha256=gZLfkANl4YtZ7aV3PFoUj5w0m1-riQv9iRomJhZRLZo,2078
|
@@ -104,21 +107,21 @@ fractal_server/app/schemas/v1/project.py,sha256=TO2TjI4m9FO-A9IB9lUCld7E4Ld0k4Ma
|
|
104
107
|
fractal_server/app/schemas/v1/task.py,sha256=7BxOZ_qoRQ8n3YbQpDvB7VMcxB5fSYQmR5RLIWhuJ5U,3704
|
105
108
|
fractal_server/app/schemas/v1/task_collection.py,sha256=uvq9bcMaGD_qHsh7YtcpoSAkVAbw12eY4DocIO3MKOg,3057
|
106
109
|
fractal_server/app/schemas/v1/workflow.py,sha256=tuOs5E5Q_ozA8if7YPZ07cQjzqB_QMkBS4u92qo4Ro0,4618
|
107
|
-
fractal_server/app/schemas/v2/__init__.py,sha256=
|
110
|
+
fractal_server/app/schemas/v2/__init__.py,sha256=zlCYrplCWwnCL9-BYsExRMfVzhBy21IMBfdHPMgJZYk,1752
|
108
111
|
fractal_server/app/schemas/v2/dataset.py,sha256=ThUwme1uVhamZhlvlN0873bTDTbhTaoFanQBlgp0F5k,1839
|
109
|
-
fractal_server/app/schemas/v2/dumps.py,sha256=
|
112
|
+
fractal_server/app/schemas/v2/dumps.py,sha256=CPJ5hS5z6S0sPJ5frAMe7yFvF5Yv76c07jiddqQpRyU,2037
|
110
113
|
fractal_server/app/schemas/v2/job.py,sha256=zfF9K3v4jWUJ7M482ta2CkqUJ4tVT4XfVt60p9IRhP0,3250
|
111
114
|
fractal_server/app/schemas/v2/manifest.py,sha256=N37IWohcfO3_y2l8rVM0h_1nZq7m4Izxk9iL1vtwBJw,6243
|
112
115
|
fractal_server/app/schemas/v2/project.py,sha256=Okm9n4KqUUs8oxFo6yIV3Y_4mJznLeKCI2ccjY0X8Vo,814
|
113
|
-
fractal_server/app/schemas/v2/task.py,sha256=
|
114
|
-
fractal_server/app/schemas/v2/task_collection.py,sha256=
|
116
|
+
fractal_server/app/schemas/v2/task.py,sha256=7IfxiZkaVqlARy7WYE_H8m7j_IEcuQaZORUrs6b5YuY,4672
|
117
|
+
fractal_server/app/schemas/v2/task_collection.py,sha256=sY29NQfJrbjiidmVkVjSIH-20wIsmh7G1QOdr05KoDQ,3171
|
115
118
|
fractal_server/app/schemas/v2/workflow.py,sha256=KnzsuTQZ8S1wwoRDY3poWTnO3GbogFTLqCoBJNYzIFU,1831
|
116
|
-
fractal_server/app/schemas/v2/workflowtask.py,sha256=
|
119
|
+
fractal_server/app/schemas/v2/workflowtask.py,sha256=vRyPca8smu6fzwd9gO1eOd3qdPLJ-Zq2AAAbSLCou3I,5051
|
117
120
|
fractal_server/app/security/__init__.py,sha256=wxosoHc3mJYPCdPMyWnRD8w_2OgnKYp2aDkdmwrZh5k,11203
|
118
121
|
fractal_server/config.py,sha256=CA8ASObADaME5chDiBXawAJZ3MvjTRpCKP0jvdYtSh8,15080
|
119
122
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
120
|
-
fractal_server/images/__init__.py,sha256=
|
121
|
-
fractal_server/images/models.py,sha256=
|
123
|
+
fractal_server/images/__init__.py,sha256=KWLVMlWqTY85qq1VUpzaJi5Sl2VOYWEn0vIEiD-QZ5k,144
|
124
|
+
fractal_server/images/models.py,sha256=hgDQf1-SsMJw504GFUufVETedPPEweCQxUhA2uDfdao,2904
|
122
125
|
fractal_server/images/tools.py,sha256=Q7jM60r_jq5bttrt1b4bU29n717RSUMMPbAbAkzWjgw,2234
|
123
126
|
fractal_server/logger.py,sha256=95duXY8eSxf1HWg0CVn8SUGNzgJw9ZR0FlapDDF6WAY,3924
|
124
127
|
fractal_server/main.py,sha256=7CpwPfCsHxBAo5fWuXPCsYOFCpbBI0F7Z0jsgCQdou8,3001
|
@@ -147,14 +150,16 @@ fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39N
|
|
147
150
|
fractal_server/tasks/endpoint_operations.py,sha256=D1WSJd8dIfIumKezon1NYX5a0QNPqqlbj9uRq-ur9CQ,5379
|
148
151
|
fractal_server/tasks/utils.py,sha256=R1_SKfXTwveT7CJJOrvkwi0vNpr9MBIiNh7qv8EK3Wc,3278
|
149
152
|
fractal_server/tasks/v1/_TaskCollectPip.py,sha256=16Gn8lVYHBuwNLBHdcdx0X8s9QXXsbfPwSzcCcM6fRg,3775
|
153
|
+
fractal_server/tasks/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
150
154
|
fractal_server/tasks/v1/background_operations.py,sha256=T5L-ghgGEJIGcGoZB_r0cjH96UkEfAPkhr2ciTSaQlQ,11725
|
151
155
|
fractal_server/tasks/v1/get_collection_data.py,sha256=bi9tuApLgoKZNMIG1kR4GoKI9S6Y040gFfNQapw4ikM,502
|
152
156
|
fractal_server/tasks/v2/_TaskCollectPip.py,sha256=QeCqXDgOnMjk3diVlC5bgGEywyQjYFm5637Rke49vJY,3775
|
157
|
+
fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
153
158
|
fractal_server/tasks/v2/background_operations.py,sha256=zr6j3uoWmCeW2EA9auxWNZ0sG3SHgSxUVTC1OpQXE3Y,12803
|
154
159
|
fractal_server/tasks/v2/get_collection_data.py,sha256=Qhf2T_aaqAfqu9_KpUSlXsS7EJoZQbEPEreHHa2jco8,502
|
155
160
|
fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
|
156
|
-
fractal_server-2.0.
|
157
|
-
fractal_server-2.0.
|
158
|
-
fractal_server-2.0.
|
159
|
-
fractal_server-2.0.
|
160
|
-
fractal_server-2.0.
|
161
|
+
fractal_server-2.0.0a5.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
162
|
+
fractal_server-2.0.0a5.dist-info/METADATA,sha256=xM14fc51eG07eC21M_JzXJeENmKObX-mn4ASQ6GkV7I,4204
|
163
|
+
fractal_server-2.0.0a5.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
164
|
+
fractal_server-2.0.0a5.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
165
|
+
fractal_server-2.0.0a5.dist-info/RECORD,,
|
@@ -1,81 +0,0 @@
|
|
1
|
-
{
|
2
|
-
"title": "ManifestV1",
|
3
|
-
"description": "Manifest schema version 1.\n\nAttributes:\n task_list:",
|
4
|
-
"type": "object",
|
5
|
-
"properties": {
|
6
|
-
"manifest_version": {
|
7
|
-
"title": "Manifest Version",
|
8
|
-
"type": "string"
|
9
|
-
},
|
10
|
-
"task_list": {
|
11
|
-
"title": "Task List",
|
12
|
-
"type": "array",
|
13
|
-
"items": {
|
14
|
-
"$ref": "#/definitions/TaskManifestV1"
|
15
|
-
}
|
16
|
-
},
|
17
|
-
"has_args_schemas": {
|
18
|
-
"title": "Has Args Schemas",
|
19
|
-
"default": false,
|
20
|
-
"type": "boolean"
|
21
|
-
},
|
22
|
-
"args_schema_version": {
|
23
|
-
"title": "Args Schema Version",
|
24
|
-
"type": "string"
|
25
|
-
}
|
26
|
-
},
|
27
|
-
"required": [
|
28
|
-
"manifest_version",
|
29
|
-
"task_list"
|
30
|
-
],
|
31
|
-
"definitions": {
|
32
|
-
"TaskManifestV1": {
|
33
|
-
"title": "TaskManifestV1",
|
34
|
-
"description": "Task manifest schema version 1.",
|
35
|
-
"type": "object",
|
36
|
-
"properties": {
|
37
|
-
"name": {
|
38
|
-
"title": "Name",
|
39
|
-
"type": "string"
|
40
|
-
},
|
41
|
-
"executable": {
|
42
|
-
"title": "Executable",
|
43
|
-
"type": "string"
|
44
|
-
},
|
45
|
-
"input_type": {
|
46
|
-
"title": "Input Type",
|
47
|
-
"type": "string"
|
48
|
-
},
|
49
|
-
"output_type": {
|
50
|
-
"title": "Output Type",
|
51
|
-
"type": "string"
|
52
|
-
},
|
53
|
-
"meta": {
|
54
|
-
"title": "Meta",
|
55
|
-
"type": "object"
|
56
|
-
},
|
57
|
-
"args_schema": {
|
58
|
-
"title": "Args Schema",
|
59
|
-
"type": "object"
|
60
|
-
},
|
61
|
-
"docs_info": {
|
62
|
-
"title": "Docs Info",
|
63
|
-
"type": "string"
|
64
|
-
},
|
65
|
-
"docs_link": {
|
66
|
-
"title": "Docs Link",
|
67
|
-
"minLength": 1,
|
68
|
-
"maxLength": 2083,
|
69
|
-
"format": "uri",
|
70
|
-
"type": "string"
|
71
|
-
}
|
72
|
-
},
|
73
|
-
"required": [
|
74
|
-
"name",
|
75
|
-
"executable",
|
76
|
-
"input_type",
|
77
|
-
"output_type"
|
78
|
-
]
|
79
|
-
}
|
80
|
-
}
|
81
|
-
}
|
File without changes
|
File without changes
|
File without changes
|