fractal-server 2.0.0a3__py3-none-any.whl → 2.0.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/routes/admin/__init__.py +0 -0
  3. fractal_server/app/routes/admin/v2.py +7 -2
  4. fractal_server/app/routes/api/v2/__init__.py +1 -1
  5. fractal_server/app/routes/api/v2/_aux_functions.py +12 -8
  6. fractal_server/app/routes/api/v2/submit.py +3 -4
  7. fractal_server/app/runner/__init__.py +0 -0
  8. fractal_server/app/runner/executors/__init__.py +0 -0
  9. fractal_server/app/runner/executors/slurm/executor.py +0 -2
  10. fractal_server/app/runner/v1/_local/__init__.py +2 -1
  11. fractal_server/app/runner/v1/_slurm/__init__.py +4 -2
  12. fractal_server/app/runner/v1/_slurm/_submit_setup.py +2 -2
  13. fractal_server/app/runner/v2/__init__.py +3 -3
  14. fractal_server/app/runner/v2/_local/__init__.py +11 -16
  15. fractal_server/app/runner/v2/_slurm/__init__.py +4 -2
  16. fractal_server/app/runner/v2/_slurm/_submit_setup.py +2 -3
  17. fractal_server/app/runner/v2/handle_failed_job.py +9 -7
  18. fractal_server/app/schemas/_validators.py +22 -0
  19. fractal_server/app/schemas/v1/task.py +1 -0
  20. fractal_server/app/schemas/v2/dumps.py +1 -1
  21. fractal_server/app/schemas/v2/task.py +27 -1
  22. fractal_server/app/schemas/v2/task_collection.py +4 -0
  23. fractal_server/app/schemas/v2/workflowtask.py +38 -9
  24. fractal_server/images/models.py +13 -0
  25. fractal_server/tasks/v1/__init__.py +0 -0
  26. fractal_server/tasks/v2/__init__.py +0 -0
  27. {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a4.dist-info}/METADATA +7 -7
  28. {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a4.dist-info}/RECORD +31 -27
  29. fractal_server/app/schemas/json_schemas/manifest.json +0 -81
  30. {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a4.dist-info}/LICENSE +0 -0
  31. {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a4.dist-info}/WHEEL +0 -0
  32. {fractal_server-2.0.0a3.dist-info → fractal_server-2.0.0a4.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.0.0a3"
1
+ __VERSION__ = "2.0.0a4"
File without changes
@@ -12,6 +12,7 @@ from fastapi import HTTPException
12
12
  from fastapi import Response
13
13
  from fastapi import status
14
14
  from fastapi.responses import StreamingResponse
15
+ from pydantic import BaseModel
15
16
  from sqlmodel import select
16
17
 
17
18
  from ....config import get_settings
@@ -279,13 +280,17 @@ async def download_job_logs(
279
280
  )
280
281
 
281
282
 
283
+ class TaskCompatibility(BaseModel):
284
+ is_v2_compatible: bool
285
+
286
+
282
287
  @router_admin_v2.patch(
283
288
  "/task-v1/{task_id}/",
284
289
  status_code=status.HTTP_200_OK,
285
290
  )
286
291
  async def flag_task_v1_as_v2_compatible(
287
292
  task_id: int,
288
- is_v2_compatible: bool,
293
+ compatibility: TaskCompatibility,
289
294
  user: User = Depends(current_active_superuser),
290
295
  db: AsyncSession = Depends(get_async_db),
291
296
  ) -> Response:
@@ -297,7 +302,7 @@ async def flag_task_v1_as_v2_compatible(
297
302
  detail=f"Task {task_id} not found",
298
303
  )
299
304
 
300
- task.is_v2_compatible = is_v2_compatible
305
+ task.is_v2_compatible = compatibility.is_v2_compatible
301
306
  await db.commit()
302
307
  await db.close()
303
308
 
@@ -19,7 +19,7 @@ router_api_v2.include_router(dataset_router_v2, tags=["V2 Dataset"])
19
19
  router_api_v2.include_router(job_router_v2, tags=["V2 Job"])
20
20
  router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
21
21
  router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
22
- router_api_v2.include_router(submit_job_router_v2, tags=["V2 Submit Job"])
22
+ router_api_v2.include_router(submit_job_router_v2, tags=["V2 Job"])
23
23
  router_api_v2.include_router(task_router_v2, prefix="/task", tags=["V2 Task"])
24
24
  router_api_v2.include_router(
25
25
  task_collection_router_v2, prefix="/task", tags=["V2 Task Collection"]
@@ -39,7 +39,6 @@ async def _get_project_check_owner(
39
39
  project_id:
40
40
  user_id:
41
41
  db:
42
- version:
43
42
 
44
43
  Returns:
45
44
  The project object
@@ -382,8 +381,8 @@ async def _get_task_check_owner(
382
381
  def _get_submitted_jobs_statement() -> SelectOfScalar:
383
382
  """
384
383
  Returns:
385
- A sqlmodel statement that selects all `ApplyWorkflow`s with
386
- `ApplyWorkflow.status` equal to `submitted`.
384
+ A sqlmodel statement that selects all `Job`s with
385
+ `Job.status` equal to `submitted`.
387
386
  """
388
387
  stm = select(JobV2).where(JobV2.status == JobStatusTypeV1.SUBMITTED)
389
388
  return stm
@@ -406,11 +405,16 @@ async def _workflow_insert_task(
406
405
  Insert a new WorkflowTask into Workflow.task_list
407
406
 
408
407
  Args:
409
- task_id: TBD
410
- args: TBD
411
- meta: TBD
412
- order: TBD
413
- db: TBD
408
+ workflow_id:
409
+ task_id:
410
+ is_legacy_task:
411
+ order:
412
+ meta_parallel:
413
+ meta_non_parallel:
414
+ args_non_parallel:
415
+ args_parallel:
416
+ input_filters:
417
+ db:
414
418
  """
415
419
  db_workflow = await db.get(WorkflowV2, workflow_id)
416
420
  if db_workflow is None:
@@ -148,7 +148,7 @@ async def apply_workflow(
148
148
  if len(user.slurm_accounts) > 0:
149
149
  job_create.slurm_account = user.slurm_accounts[0]
150
150
 
151
- # Add new ApplyWorkflow object to DB
151
+ # Add new Job object to DB
152
152
  job = JobV2(
153
153
  project_id=project_id,
154
154
  dataset_id=dataset_id,
@@ -192,9 +192,8 @@ async def apply_workflow(
192
192
  raise HTTPException(
193
193
  status_code=status.HTTP_429_TOO_MANY_REQUESTS,
194
194
  detail=(
195
- f"The endpoint 'POST /api/v2/project/{project_id}/workflow/"
196
- f"{workflow_id}/apply/' "
197
- "was called several times within an interval of less "
195
+ f"The endpoint 'POST /api/v2/project/{project_id}/job/submit/'"
196
+ " was called several times within an interval of less "
198
197
  f"than {settings.FRACTAL_API_SUBMIT_RATE_LIMIT} seconds, using"
199
198
  " the same foreign keys. If it was intentional, please wait "
200
199
  "and try again."
File without changes
File without changes
@@ -417,8 +417,6 @@ class FractalSlurmExecutor(SlurmExecutor):
417
417
  A `TaskFiles` object; if `None`, use
418
418
  `self.get_default_task_files()`.
419
419
 
420
- Returns:
421
- An iterator of results.
422
420
  """
423
421
 
424
422
  def _result_or_cancel(fut):
@@ -49,7 +49,8 @@ def _process_workflow(
49
49
 
50
50
  Schedules the workflow using a `FractalThreadPoolExecutor`.
51
51
 
52
- Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
52
+ Cf.
53
+ [process_workflow][fractal_server.app.runner.v1._local.process_workflow]
53
54
  for the call signature.
54
55
  """
55
56
 
@@ -66,7 +66,8 @@ def _process_workflow(
66
66
  workflow working dir and user to impersonate. It then schedules the
67
67
  workflow tasks and returns the output dataset metadata.
68
68
 
69
- Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
69
+ Cf.
70
+ [process_workflow][fractal_server.app.runner.v1._local.process_workflow]
70
71
 
71
72
  Returns:
72
73
  output_dataset_metadata: Metadata of the output dataset
@@ -132,7 +133,8 @@ async def process_workflow(
132
133
  """
133
134
  Process workflow (SLURM backend public interface)
134
135
 
135
- Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
136
+ Cf.
137
+ [process_workflow][fractal_server.app.runner.v1._local.process_workflow]
136
138
  """
137
139
 
138
140
  # Set values of first_task_index and last_task_index
@@ -12,7 +12,7 @@
12
12
  """
13
13
  Submodule to define _slurm_submit_setup, which is also the reference
14
14
  implementation of `submit_setup_call` in
15
- [fractal_server.app.runner._common][]).
15
+ [fractal_server.app.runner.v1._common][]).
16
16
  """
17
17
  from pathlib import Path
18
18
 
@@ -38,7 +38,7 @@ def _slurm_submit_setup(
38
38
 
39
39
  For now, this is the reference implementation for the argument
40
40
  `submit_setup_call` of
41
- [fractal_server.app.runner._common.execute_tasks][].
41
+ [fractal_server.app.runner.v1._common.execute_tasks][].
42
42
 
43
43
  Arguments:
44
44
  wftask:
@@ -248,7 +248,7 @@ async def submit_workflow(
248
248
  job,
249
249
  dataset,
250
250
  workflow,
251
- logger,
251
+ logger_name=logger_name,
252
252
  failed_wftask=failed_wftask,
253
253
  )
254
254
  latest_filters = assemble_filters_failed_job(job)
@@ -283,7 +283,7 @@ async def submit_workflow(
283
283
  job,
284
284
  dataset,
285
285
  workflow,
286
- logger,
286
+ logger_name=logger_name,
287
287
  )
288
288
  latest_filters = assemble_filters_failed_job(job)
289
289
  if latest_filters is not None:
@@ -313,7 +313,7 @@ async def submit_workflow(
313
313
  job,
314
314
  dataset,
315
315
  workflow,
316
- logger,
316
+ logger_name=logger_name,
317
317
  )
318
318
  latest_filters = assemble_filters_failed_job(job)
319
319
  if latest_filters is not None:
@@ -45,7 +45,8 @@ def _process_workflow(
45
45
 
46
46
  Schedules the workflow using a `FractalThreadPoolExecutor`.
47
47
 
48
- Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
48
+ Cf.
49
+ [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
49
50
  for the call signature.
50
51
  """
51
52
 
@@ -91,21 +92,21 @@ async def process_workflow(
91
92
  Args:
92
93
  workflow:
93
94
  The workflow to be run
94
- input_paths:
95
- The paths to the input files to pass to the first task of the
96
- workflow
97
- output_path:
98
- The destination path for the last task of the workflow
99
- input_metadata:
100
- Initial metadata, passed to the first task
101
- logger_name:
102
- Name of the logger to log information on the run to
95
+ dataset:
96
+ Initial dataset.
103
97
  workflow_dir:
104
98
  Working directory for this run.
105
99
  workflow_dir_user:
106
100
  Working directory for this run, on the user side. This argument is
107
101
  present for compatibility with the standard backend interface, but
108
102
  for the `local` backend it cannot be different from `workflow_dir`.
103
+ first_task_index:
104
+ Positional index of the first task to execute; if `None`, start
105
+ from `0`.
106
+ last_task_index:
107
+ Positional index of the last task to execute; if `None`, proceed
108
+ until the last task.
109
+ logger_name: Logger name
109
110
  slurm_user:
110
111
  Username to impersonate to run the workflow. This argument is
111
112
  present for compatibility with the standard backend interface, but
@@ -123,12 +124,6 @@ async def process_workflow(
123
124
  to the backend executor. This argument is present for compatibility
124
125
  with the standard backend interface, but is ignored in the `local`
125
126
  backend.
126
- first_task_index:
127
- Positional index of the first task to execute; if `None`, start
128
- from `0`.
129
- last_task_index:
130
- Positional index of the last task to execute; if `None`, proceed
131
- until the last task.
132
127
 
133
128
  Raises:
134
129
  TaskExecutionError: wrapper for errors raised during tasks' execution
@@ -56,7 +56,8 @@ def _process_workflow(
56
56
  workflow working dir and user to impersonate. It then schedules the
57
57
  workflow tasks and returns the new dataset attributes
58
58
 
59
- Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
59
+ Cf.
60
+ [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
60
61
 
61
62
  Returns:
62
63
  new_dataset_attributes:
@@ -112,7 +113,8 @@ async def process_workflow(
112
113
  """
113
114
  Process workflow (SLURM backend public interface)
114
115
 
115
- Cf. [process_workflow][fractal_server.app.runner._local.process_workflow]
116
+ Cf.
117
+ [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
116
118
  """
117
119
 
118
120
  # Set values of first_task_index and last_task_index
@@ -11,8 +11,7 @@
11
11
  # Zurich.
12
12
  """
13
13
  Submodule to define _slurm_submit_setup, which is also the reference
14
- implementation of `submit_setup_call` in
15
- [fractal_server.app.runner._common][]).
14
+ implementation of `submit_setup_call`.
16
15
  """
17
16
  from pathlib import Path
18
17
  from typing import Literal
@@ -40,7 +39,7 @@ def _slurm_submit_setup(
40
39
 
41
40
  For now, this is the reference implementation for the argument
42
41
  `submit_setup_call` of
43
- [fractal_server.app.runner._common.execute_tasks][].
42
+ [fractal_server.app.runner.v2.runner][].
44
43
 
45
44
  Arguments:
46
45
  wftask:
@@ -32,7 +32,7 @@ def assemble_history_failed_job(
32
32
  job: JobV2,
33
33
  dataset: DatasetV2,
34
34
  workflow: WorkflowV2,
35
- logger: logging.Logger,
35
+ logger_name: Optional[str] = None,
36
36
  failed_wftask: Optional[WorkflowTaskV2] = None,
37
37
  ) -> list[dict[str, Any]]:
38
38
  """
@@ -40,12 +40,12 @@ def assemble_history_failed_job(
40
40
 
41
41
  Args:
42
42
  job:
43
- The failed `ApplyWorkflow` object.
44
- output_dataset:
45
- The `dataset` associated to `job`.
43
+ The failed `JobV2` object.
44
+ dataset:
45
+ The `DatasetV2` object associated to `job`.
46
46
  workflow:
47
- The `workflow` associated to `job`.
48
- logger: A logger instance.
47
+ The `WorkflowV2` object associated to `job`.
48
+ logger_name: A logger name.
49
49
  failed_wftask:
50
50
  If set, append it to `history` during step 3; if `None`, infer
51
51
  it by comparing the job task list and the one in
@@ -53,9 +53,11 @@ def assemble_history_failed_job(
53
53
 
54
54
  Returns:
55
55
  The new value of `history`, to be merged into
56
- `output_dataset.meta`.
56
+ `dataset.meta`.
57
57
  """
58
58
 
59
+ logger = logging.getLogger(logger_name)
60
+
59
61
  # The final value of the history attribute should include up to three
60
62
  # parts, coming from: the database, the temporary file, the failed-task
61
63
  # information.
@@ -1,6 +1,7 @@
1
1
  import os
2
2
  from datetime import datetime
3
3
  from datetime import timezone
4
+ from typing import Any
4
5
 
5
6
 
6
7
  def valstr(attribute: str, accept_none: bool = False):
@@ -27,6 +28,27 @@ def valstr(attribute: str, accept_none: bool = False):
27
28
  return val
28
29
 
29
30
 
31
+ def valdictkeys(attribute: str):
32
+ def val(d: dict[str, Any]):
33
+ """
34
+ Apply valstr to every key of the dictionary, and fail if there are
35
+ identical keys.
36
+ """
37
+ if d is not None:
38
+ old_keys = list(d.keys())
39
+ new_keys = [valstr(f"{attribute}[{key}]")(key) for key in old_keys]
40
+ if len(new_keys) != len(set(new_keys)):
41
+ raise ValueError(
42
+ f"Dictionary contains multiple identical keys: {d}."
43
+ )
44
+ for old_key, new_key in zip(old_keys, new_keys):
45
+ if new_key != old_key:
46
+ d[new_key] = d.pop(old_key)
47
+ return d
48
+
49
+ return val
50
+
51
+
30
52
  def valint(attribute: str, min_val: int = 1):
31
53
  """
32
54
  Check that an integer attribute (e.g. if it is meant to be the ID of a
@@ -122,6 +122,7 @@ class TaskReadV1(_TaskBaseV1):
122
122
  args_schema_version: Optional[str]
123
123
  docs_info: Optional[str]
124
124
  docs_link: Optional[HttpUrl]
125
+ is_v2_compatible: bool
125
126
 
126
127
 
127
128
  class TaskCreateV1(_TaskBaseV1):
@@ -5,7 +5,7 @@ Dump models differ from their Read counterpart in that:
5
5
  * They may only include a subset of the Read attributes.
6
6
 
7
7
  These models are used in at least two situations:
8
- 1. In the "*_dump" attributes of ApplyWorkflow models;
8
+ 1. In the "*_dump" attributes of Job models;
9
9
  2. In the `_DatasetHistoryItem.workflowtask` model, to trim its size.
10
10
  """
11
11
  from typing import Optional
@@ -9,6 +9,7 @@ from pydantic import HttpUrl
9
9
  from pydantic import root_validator
10
10
  from pydantic import validator
11
11
 
12
+ from .._validators import valdictkeys
12
13
  from .._validators import valstr
13
14
 
14
15
 
@@ -20,8 +21,8 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
20
21
  command_parallel: Optional[str]
21
22
  source: str
22
23
 
23
- meta_parallel: Optional[dict[str, Any]]
24
24
  meta_non_parallel: Optional[dict[str, Any]]
25
+ meta_parallel: Optional[dict[str, Any]]
25
26
  version: Optional[str]
26
27
  args_schema_non_parallel: Optional[dict[str, Any]]
27
28
  args_schema_parallel: Optional[dict[str, Any]]
@@ -53,9 +54,28 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
53
54
  )
54
55
  _source = validator("source", allow_reuse=True)(valstr("source"))
55
56
  _version = validator("version", allow_reuse=True)(valstr("version"))
57
+
58
+ _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
59
+ valdictkeys("meta_non_parallel")
60
+ )
61
+ _meta_parallel = validator("meta_parallel", allow_reuse=True)(
62
+ valdictkeys("meta_parallel")
63
+ )
64
+ _args_schema_non_parallel = validator(
65
+ "args_schema_non_parallel", allow_reuse=True
66
+ )(valdictkeys("args_schema_non_parallel"))
67
+ _args_schema_parallel = validator(
68
+ "args_schema_parallel", allow_reuse=True
69
+ )(valdictkeys("args_schema_parallel"))
56
70
  _args_schema_version = validator("args_schema_version", allow_reuse=True)(
57
71
  valstr("args_schema_version")
58
72
  )
73
+ _input_types = validator("input_types", allow_reuse=True)(
74
+ valdictkeys("input_types")
75
+ )
76
+ _output_types = validator("output_types", allow_reuse=True)(
77
+ valdictkeys("output_types")
78
+ )
59
79
 
60
80
 
61
81
  class TaskReadV2(BaseModel):
@@ -106,6 +126,12 @@ class TaskUpdateV2(BaseModel):
106
126
  _command_non_parallel = validator(
107
127
  "command_non_parallel", allow_reuse=True
108
128
  )(valstr("command_non_parallel"))
129
+ _input_types = validator("input_types", allow_reuse=True)(
130
+ valdictkeys("input_types")
131
+ )
132
+ _output_types = validator("output_types", allow_reuse=True)(
133
+ valdictkeys("output_types")
134
+ )
109
135
 
110
136
 
111
137
  class TaskImportV2(BaseModel):
@@ -6,6 +6,7 @@ from pydantic import BaseModel
6
6
  from pydantic import Field
7
7
  from pydantic import validator
8
8
 
9
+ from .._validators import valdictkeys
9
10
  from .._validators import valstr
10
11
  from .task import TaskReadV2
11
12
 
@@ -43,6 +44,9 @@ class TaskCollectPipV2(BaseModel):
43
44
  python_version: Optional[str] = None
44
45
  pinned_package_versions: Optional[dict[str, str]] = None
45
46
 
47
+ _pinned_package_versions = validator(
48
+ "pinned_package_versions", allow_reuse=True
49
+ )(valdictkeys("pinned_package_versions"))
46
50
  _package_extras = validator("package_extras", allow_reuse=True)(
47
51
  valstr("package_extras")
48
52
  )
@@ -8,6 +8,7 @@ from pydantic import Field
8
8
  from pydantic import root_validator
9
9
  from pydantic import validator
10
10
 
11
+ from .._validators import valdictkeys
11
12
  from .._validators import valint
12
13
  from ..v1.task import TaskExportV1
13
14
  from ..v1.task import TaskImportV1
@@ -49,7 +50,18 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
49
50
  is_legacy_task: bool = False
50
51
 
51
52
  # Validators
52
-
53
+ _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
54
+ valdictkeys("meta_non_parallel")
55
+ )
56
+ _meta_parallel = validator("meta_parallel", allow_reuse=True)(
57
+ valdictkeys("meta_parallel")
58
+ )
59
+ _args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
60
+ valdictkeys("args_non_parallel")
61
+ )
62
+ _args_parallel = validator("args_parallel", allow_reuse=True)(
63
+ valdictkeys("args_parallel")
64
+ )
53
65
  _order = validator("order", allow_reuse=True)(valint("order", min_val=0))
54
66
 
55
67
  @root_validator
@@ -96,14 +108,18 @@ class WorkflowTaskUpdateV2(BaseModel):
96
108
  input_filters: Optional[Filters]
97
109
 
98
110
  # Validators
99
-
100
- @validator("meta_parallel", "meta_non_parallel")
101
- def check_no_parallelisation_level(cls, m):
102
- if "parallelization_level" in m:
103
- raise ValueError(
104
- "Overriding task parallelization level currently not allowed"
105
- )
106
- return m
111
+ _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
112
+ valdictkeys("meta_non_parallel")
113
+ )
114
+ _meta_parallel = validator("meta_parallel", allow_reuse=True)(
115
+ valdictkeys("meta_parallel")
116
+ )
117
+ _args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
118
+ valdictkeys("args_non_parallel")
119
+ )
120
+ _args_parallel = validator("args_parallel", allow_reuse=True)(
121
+ valdictkeys("args_parallel")
122
+ )
107
123
 
108
124
 
109
125
  class WorkflowTaskImportV2(BaseModel):
@@ -119,6 +135,19 @@ class WorkflowTaskImportV2(BaseModel):
119
135
  task: Optional[TaskImportV2] = None
120
136
  task_legacy: Optional[TaskImportV1] = None
121
137
 
138
+ _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
139
+ valdictkeys("meta_non_parallel")
140
+ )
141
+ _meta_parallel = validator("meta_parallel", allow_reuse=True)(
142
+ valdictkeys("meta_parallel")
143
+ )
144
+ _args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
145
+ valdictkeys("args_non_parallel")
146
+ )
147
+ _args_parallel = validator("args_parallel", allow_reuse=True)(
148
+ valdictkeys("args_parallel")
149
+ )
150
+
122
151
 
123
152
  class WorkflowTaskExportV2(BaseModel):
124
153
 
@@ -6,6 +6,8 @@ from pydantic import BaseModel
6
6
  from pydantic import Field
7
7
  from pydantic import validator
8
8
 
9
+ from fractal_server.app.schemas._validators import valdictkeys
10
+
9
11
 
10
12
  class SingleImage(BaseModel):
11
13
 
@@ -15,6 +17,12 @@ class SingleImage(BaseModel):
15
17
  attributes: dict[str, Any] = Field(default_factory=dict)
16
18
  types: dict[str, bool] = Field(default_factory=dict)
17
19
 
20
+ # Validators
21
+ _attributes = validator("attributes", allow_reuse=True)(
22
+ valdictkeys("attributes")
23
+ )
24
+ _types = validator("types", allow_reuse=True)(valdictkeys("types"))
25
+
18
26
  @validator("attributes")
19
27
  def validate_attributes(
20
28
  cls, v: dict[str, Any]
@@ -36,6 +44,11 @@ class Filters(BaseModel):
36
44
  extra = "forbid"
37
45
 
38
46
  # Validators
47
+ _attributes = validator("attributes", allow_reuse=True)(
48
+ valdictkeys("attributes")
49
+ )
50
+ _types = validator("types", allow_reuse=True)(valdictkeys("types"))
51
+
39
52
  @validator("attributes")
40
53
  def validate_attributes(
41
54
  cls, v: dict[str, Any]
File without changes
File without changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.0.0a3
3
+ Version: 2.0.0a4
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -16,21 +16,21 @@ Classifier: Programming Language :: Python :: 3.12
16
16
  Provides-Extra: gunicorn
17
17
  Provides-Extra: postgres
18
18
  Requires-Dist: aiosqlite (>=0.19.0,<0.20.0)
19
- Requires-Dist: alembic (>=1.9.1,<2.0.0)
19
+ Requires-Dist: alembic (>=1.13.1,<2.0.0)
20
20
  Requires-Dist: asyncpg (>=0.29.0,<0.30.0) ; extra == "postgres"
21
21
  Requires-Dist: bcrypt (==4.0.1)
22
- Requires-Dist: cloudpickle (>=2.2.1,<2.3.0)
22
+ Requires-Dist: cloudpickle (>=3.0.0,<3.1.0)
23
23
  Requires-Dist: clusterfutures (>=0.5,<0.6)
24
- Requires-Dist: fastapi (>=0.109.0,<0.110.0)
24
+ Requires-Dist: fastapi (>=0.110.0,<0.111.0)
25
25
  Requires-Dist: fastapi-users[oauth] (>=12.1.0,<13.0.0)
26
26
  Requires-Dist: gunicorn (>=21.2.0,<22.0.0) ; extra == "gunicorn"
27
27
  Requires-Dist: packaging (>=23.2,<24.0)
28
28
  Requires-Dist: psycopg2 (>=2.9.5,<3.0.0) ; extra == "postgres"
29
29
  Requires-Dist: pydantic (>=1.10.8,<2)
30
- Requires-Dist: python-dotenv (>=0.21.0,<0.22.0)
30
+ Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
31
31
  Requires-Dist: sqlalchemy[asyncio] (>=2.0.23,<2.1)
32
- Requires-Dist: sqlmodel (>=0.0.14,<0.0.15)
33
- Requires-Dist: uvicorn (>=0.27.0,<0.28.0)
32
+ Requires-Dist: sqlmodel (>=0.0.16,<0.0.17)
33
+ Requires-Dist: uvicorn (>=0.29.0,<0.30.0)
34
34
  Project-URL: Changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
35
35
  Project-URL: Documentation, https://fractal-analytics-platform.github.io/fractal-server
36
36
  Project-URL: Repository, https://github.com/fractal-analytics-platform/fractal-server
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=HWERJ6DN1N5nAIka5TF2b6cTQfjSmZGWOqkcnNTIxWs,24
1
+ fractal_server/__init__.py,sha256=uHs4bTZmEbcmtx5XxScG5VXDbybTf8PipF9cI0iR8qk,24
2
2
  fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -21,8 +21,9 @@ fractal_server/app/models/v2/task.py,sha256=9ZPhug3VWyeqgT8wQ9_8ZXQ2crSiiicRipxr
21
21
  fractal_server/app/models/v2/workflow.py,sha256=4pSTeZC78OQbgHHC5S0ge6pK1AP6ak7Qew_0ZNM9xuw,1256
22
22
  fractal_server/app/models/v2/workflowtask.py,sha256=f2a85MSAyBAdC7oG6SR8mViMNqlomQWaIB08n3ZhT-0,2727
23
23
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
+ fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
25
  fractal_server/app/routes/admin/v1.py,sha256=uY6H1znlAlrM9e1MG2EThTqwciCl87Twew34JM5W6IU,13981
25
- fractal_server/app/routes/admin/v2.py,sha256=RswcZ2DxRr_OPo3JJSLDFG4j2Ac6z1g5H-uiBiWXF2w,9706
26
+ fractal_server/app/routes/admin/v2.py,sha256=c2hk8lB9ilXvFDJ6AHv7aPd_nyaXyobH0S7CaaDqFMI,9826
26
27
  fractal_server/app/routes/api/__init__.py,sha256=EVyZrEq3I_1643QGTPCC5lgCp4xH_auYbrFfogTm4pc,315
27
28
  fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
28
29
  fractal_server/app/routes/api/v1/_aux_functions.py,sha256=eC5exnGj9jnJqx0ccecoNaipxDeK2ZsR1ev0syH5x-Y,11955
@@ -33,13 +34,13 @@ fractal_server/app/routes/api/v1/task.py,sha256=4zUXMtq5M95XjaZs1t9oibYHiDIwxpM-
33
34
  fractal_server/app/routes/api/v1/task_collection.py,sha256=LtOakYF30XiKo4ei7i09WSZ7u4D9pPJhhQBxHaSLr9M,8457
34
35
  fractal_server/app/routes/api/v1/workflow.py,sha256=ZObifWTPi100oRQ1wEER8Sgsr3Neo8QVdCCFQnWMNZ0,10930
35
36
  fractal_server/app/routes/api/v1/workflowtask.py,sha256=ox-DIIqYV4K35hCu86eGa2SHnR5IQml-I00UHEwnmHQ,5579
36
- fractal_server/app/routes/api/v2/__init__.py,sha256=wCd4eBUnZlP43uoFDKtrFMZBwDQz6pX8owGs3pdtixk,1217
37
- fractal_server/app/routes/api/v2/_aux_functions.py,sha256=reX1N0_jP1P86FVxkAuuDFrl0zBQRT8ozS-BuCeLv4Q,14218
37
+ fractal_server/app/routes/api/v2/__init__.py,sha256=0E6_ju5g8wp3TVW5frzJB2lqXEWzcRkwDkC3-WkPJYI,1210
38
+ fractal_server/app/routes/api/v2/_aux_functions.py,sha256=TCHf3aM-KQxaNJen10CGX1Da5IIra00xRF39FUTU698,14301
38
39
  fractal_server/app/routes/api/v2/dataset.py,sha256=qQi9jfT9YLu6DrRCPh280J3MoFWs9yMiejkCNaauCyQ,9680
39
40
  fractal_server/app/routes/api/v2/images.py,sha256=b1NM9Y0ocuRYRec-3UcVAizB0vFkmzPEHfObaoCnIMY,5956
40
41
  fractal_server/app/routes/api/v2/job.py,sha256=9mXaKCX_N3FXM0GIxdE49nWl_hJZ8CBLBIaMMhaCKOM,5334
41
42
  fractal_server/app/routes/api/v2/project.py,sha256=i9a19HAqE36N92G60ZYgObIP9nv-hR7Jt5nd9Dkhz1g,6024
42
- fractal_server/app/routes/api/v2/submit.py,sha256=I8asPxY3KUogLbeDi0uPNbVLQBunOwMHCp1fbTYmdyg,7219
43
+ fractal_server/app/routes/api/v2/submit.py,sha256=egu5jE93sU7sRu3x_4Rp9t3uUJFATK4dwj4Pl3iJPN0,7171
43
44
  fractal_server/app/routes/api/v2/task.py,sha256=gJ0LruSk-Q1iMw8ZOX8C0wrZ4S4DGlQTr_5SdJJud0Q,7130
44
45
  fractal_server/app/routes/api/v2/task_collection.py,sha256=kxSOOSsTFq2w1SeDwMeX6mSDPYbH5Uds18xpdLU5kTo,8466
45
46
  fractal_server/app/routes/api/v2/workflow.py,sha256=sw-1phO_rrmDAcWX9Zqb9M8SfrWF78-02AuLB1-D1PU,11845
@@ -49,41 +50,43 @@ fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
49
50
  fractal_server/app/routes/aux/_job.py,sha256=5gKgvArAruSkMQuPN34Vvzi89WJbwWPsx0oDAa_iXu4,1248
50
51
  fractal_server/app/routes/aux/_runner.py,sha256=psW6fsoo_VrAHrD5UQPbqFYikCp0m16VRymC-U1yUTk,675
51
52
  fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
53
+ fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
54
  fractal_server/app/runner/async_wrap.py,sha256=_O6f8jftKYXG_DozkmlrDBhoiK9QhE9MablOyECq2_M,829
53
55
  fractal_server/app/runner/components.py,sha256=ZF8ct_Ky5k8IAcrmpYOZ-bc6OBgdELEighYVqFDEbZg,119
54
56
  fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrGrLjZcnQl7A,4159
57
+ fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
58
  fractal_server/app/runner/executors/slurm/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
56
59
  fractal_server/app/runner/executors/slurm/_batching.py,sha256=1P6CgrAOCK9u_EvNFTumcQ-PcZMpocCaSAyNr0YB1js,8841
57
60
  fractal_server/app/runner/executors/slurm/_check_jobs_status.py,sha256=8d29a7DQ2xoWxoFQCnFfTpHER-qBX8mEatl4Dw5HU_o,1908
58
61
  fractal_server/app/runner/executors/slurm/_executor_wait_thread.py,sha256=J3tjAx33nBgW4eHAXDte7hDs7Oe9FLEZaElEt8inrbg,4421
59
62
  fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=rF37XDImX1QoWx37MC5hSM9AuY_KfHU5gaWwN4vl4Zk,15552
60
63
  fractal_server/app/runner/executors/slurm/_subprocess_run_as_user.py,sha256=8CCtxWCuB5UDst3C_WJxBU77xwPrpDyq7iMCZMnodXU,5123
61
- fractal_server/app/runner/executors/slurm/executor.py,sha256=iqp73KdTPCnVtMA-FeEGXxtnYuzz-UB_7UnhNYWqIaU,44504
64
+ fractal_server/app/runner/executors/slurm/executor.py,sha256=O9h6ZPAKM95BUJrZkHCdFJZrw2zR2XmxeB5fCoGp97w,44451
62
65
  fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
63
66
  fractal_server/app/runner/filenames.py,sha256=9lwu3yB4C67yiijYw8XIKaLFn3mJUt6_TCyVFM_aZUQ,206
64
67
  fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
65
68
  fractal_server/app/runner/task_files.py,sha256=c5mggMy7BIK_yBUvbimFgvKFZPKKDu6RRfWepwinBVk,3219
66
69
  fractal_server/app/runner/v1/__init__.py,sha256=meqMG2UejFa_1hm5xlsmkDxsM7Y_hqftsexuteQXOrE,13608
67
70
  fractal_server/app/runner/v1/_common.py,sha256=hlSh-lUWbDCWP2k4isswoU9hh7huoT2Zy7cEwjXwnzk,21238
68
- fractal_server/app/runner/v1/_local/__init__.py,sha256=CMKYo01skbGKCc2UHp7HDe4-uu7EKS50a6tSllwCxNk,6919
71
+ fractal_server/app/runner/v1/_local/__init__.py,sha256=8PjeyPLvj6KHdZ3HyzWZCdlrubgedA1hZLXGAsLNOKI,6926
69
72
  fractal_server/app/runner/v1/_local/_local_config.py,sha256=hM7SPxR07luXPcXdrWXRpEB2uOyjSSRUdqW3QBKJn9c,3147
70
73
  fractal_server/app/runner/v1/_local/_submit_setup.py,sha256=kvNPT7ey2mEamORzPMMVThbFHtzZcSr-0A9tYw9uVDA,1493
71
74
  fractal_server/app/runner/v1/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
72
- fractal_server/app/runner/v1/_slurm/__init__.py,sha256=ohvDMhzOOzk1Qe1W3wY1MGxCQGRKmCleQV45EO1rVZc,10839
73
- fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=UoPzhxN86FeIRXJlWouulBKoguNFaOv2j_s3-9MwXCs,2732
75
+ fractal_server/app/runner/v1/_slurm/__init__.py,sha256=KN98RO8E3EG4MLNFa--D3DilRHjUyHrVicC6pHtu5L0,10853
76
+ fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=llTgSOCnCVMvm7Q0SoVpLZshorAOZZUDz927ij0LZEA,2738
74
77
  fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6TLWQon8hSicsD7c3yXK4P9xeId0s_H3HOOeMUVGVss,5977
75
78
  fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
76
79
  fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
77
- fractal_server/app/runner/v2/__init__.py,sha256=xHSI2eoalyEtjDcFIFJdYMZywOM0b9Tj-lkL40H77u0,12431
78
- fractal_server/app/runner/v2/_local/__init__.py,sha256=wTXCiNRG6WkWPw79tvguOCOVvMcXt5vnVwUCrPspVss,6163
80
+ fractal_server/app/runner/v2/__init__.py,sha256=RwIOSLCChMZWHix5QuUNRPtRwgf1UmFDk3YufRCTOoc,12482
81
+ fractal_server/app/runner/v2/_local/__init__.py,sha256=Q1s-DwXleUq6w1ZNv6tlh3tZv6cyBqxB_hMvZlqVYaM,5881
79
82
  fractal_server/app/runner/v2/_local/_local_config.py,sha256=lR0Js-l63mQUzN9hK0HkfdLsrTf-W6GHvPvbPC64amY,3630
80
83
  fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=deagsLSy6A3ZHKaSDcQqrdvbQVM3i4kgyTcbVc0tC5U,1614
81
84
  fractal_server/app/runner/v2/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
82
- fractal_server/app/runner/v2/_slurm/__init__.py,sha256=CJgbawVty4gvBitIjcl2JgfE4FCnDHWCJJfNF4YjH8s,4395
83
- fractal_server/app/runner/v2/_slurm/_submit_setup.py,sha256=3zK_GYN5ou_HFLP9N2--Nf-XUULqSWMRk1MIr2tL9-A,2847
85
+ fractal_server/app/runner/v2/_slurm/__init__.py,sha256=srxn5-KdQxqD8cWJmOJlSoctbXYlyCMM249xWGY9bhI,4409
86
+ fractal_server/app/runner/v2/_slurm/_submit_setup.py,sha256=tsZHQdVy3VxENMdsBzHltrVWzugBppq0cFrHtaVzoUA,2793
84
87
  fractal_server/app/runner/v2/_slurm/get_slurm_config.py,sha256=sqP-hs58TPt849rx10VRFKWX_DgLDPQcKZJcE0zKBXs,6621
85
88
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=UShgbFy8d8elUE5sa1_jLDqQWip4Bi21VDhcFFM0fpU,571
86
- fractal_server/app/runner/v2/handle_failed_job.py,sha256=t4MjRH_7OhDMqZHP5UeZJ9_RlIJVj-F5VYtl34JBXO8,5149
89
+ fractal_server/app/runner/v2/handle_failed_job.py,sha256=fipRJT5Y8UY0US4bXUX-4ORTAQ1AetZcCAOVCjDO3_c,5202
87
90
  fractal_server/app/runner/v2/merge_outputs.py,sha256=IHuHqbKmk97K35BFvTrKVBs60z3e_--OzXTnsvmA02c,1281
88
91
  fractal_server/app/runner/v2/runner.py,sha256=hLLGE6wD8nVSFWui0LlNTqn63WYYCPFeRvIAn4sBLlU,11119
89
92
  fractal_server/app/runner/v2/runner_functions.py,sha256=LfO1-FJF70_Qh78NQTCHJWyzyr011wvvtnzB6nTj5ZM,10087
@@ -91,8 +94,7 @@ fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=Pp3hsj1i1t4ExD
91
94
  fractal_server/app/runner/v2/task_interface.py,sha256=3M0xDaARCVCD3yX2-N6YaFsYwZwsPpCKqAFMp0YqRDA,1376
92
95
  fractal_server/app/runner/v2/v1_compat.py,sha256=6UijuRYbB2ry2mM073u1fW4CSTeelB11lmoj_TOGtm4,511
93
96
  fractal_server/app/schemas/__init__.py,sha256=VL55f3CTFngXHYkOsFaLBEEkEEewEWI5ODlcGTI7cqA,157
94
- fractal_server/app/schemas/_validators.py,sha256=s9a6AX4-3Vfoy1Y_HMQA3lXm4FLdmnODYUD4lfsJr6w,2549
95
- fractal_server/app/schemas/json_schemas/manifest.json,sha256=yXYKHbYXPYSkSXMTLfTpfCUGBtmQuPTk1xuSXscdba4,1787
97
+ fractal_server/app/schemas/_validators.py,sha256=Pdff5plJJmoUTf_nZpMA24tZlFJb84EdRSnLwRZDxfE,3264
96
98
  fractal_server/app/schemas/state.py,sha256=t4XM04aqxeluh8MfvD7LfEc-8-dOmUVluZHhLsfxxkc,692
97
99
  fractal_server/app/schemas/user.py,sha256=rE8WgBz-ceVUs0Sz2ZwcjUrSTZTnS0ys5SBtD2XD9r8,3113
98
100
  fractal_server/app/schemas/v1/__init__.py,sha256=gZLfkANl4YtZ7aV3PFoUj5w0m1-riQv9iRomJhZRLZo,2078
@@ -101,24 +103,24 @@ fractal_server/app/schemas/v1/dataset.py,sha256=n71lNUO3JLy2K3IM9BZM2Fk1EnKQOTU7
101
103
  fractal_server/app/schemas/v1/dumps.py,sha256=67VXnyLh_0Ufo7rPM2jZ9P9rk0CnYcVAkilx_cLX6sg,1274
102
104
  fractal_server/app/schemas/v1/manifest.py,sha256=Yht7guhs0Pcl2U0RMOCbI_UHBZ9YO_YU0H8hxACx3TY,3829
103
105
  fractal_server/app/schemas/v1/project.py,sha256=TO2TjI4m9FO-A9IB9lUCld7E4Ld0k4MacLcyA9j6Qi4,1218
104
- fractal_server/app/schemas/v1/task.py,sha256=7BxOZ_qoRQ8n3YbQpDvB7VMcxB5fSYQmR5RLIWhuJ5U,3704
106
+ fractal_server/app/schemas/v1/task.py,sha256=8L9CBW5kPO4l9EVYldgm1tWDokONyTGcLq53RmVmabM,3731
105
107
  fractal_server/app/schemas/v1/task_collection.py,sha256=uvq9bcMaGD_qHsh7YtcpoSAkVAbw12eY4DocIO3MKOg,3057
106
108
  fractal_server/app/schemas/v1/workflow.py,sha256=tuOs5E5Q_ozA8if7YPZ07cQjzqB_QMkBS4u92qo4Ro0,4618
107
109
  fractal_server/app/schemas/v2/__init__.py,sha256=U3WXzQ1o26dSq3jR8n0rA-Zsq6uIpoN3oCKPOABytvA,1704
108
110
  fractal_server/app/schemas/v2/dataset.py,sha256=ThUwme1uVhamZhlvlN0873bTDTbhTaoFanQBlgp0F5k,1839
109
- fractal_server/app/schemas/v2/dumps.py,sha256=bEhfJrxBon1NzqS3bha16pj-59qPY_VWk_vOjsXsvAo,2047
111
+ fractal_server/app/schemas/v2/dumps.py,sha256=CPJ5hS5z6S0sPJ5frAMe7yFvF5Yv76c07jiddqQpRyU,2037
110
112
  fractal_server/app/schemas/v2/job.py,sha256=zfF9K3v4jWUJ7M482ta2CkqUJ4tVT4XfVt60p9IRhP0,3250
111
113
  fractal_server/app/schemas/v2/manifest.py,sha256=N37IWohcfO3_y2l8rVM0h_1nZq7m4Izxk9iL1vtwBJw,6243
112
114
  fractal_server/app/schemas/v2/project.py,sha256=Okm9n4KqUUs8oxFo6yIV3Y_4mJznLeKCI2ccjY0X8Vo,814
113
- fractal_server/app/schemas/v2/task.py,sha256=vZPIsqBVM9RJDkk81EvJQQhQa-LNSh5YGdP-KM9AKgs,3607
114
- fractal_server/app/schemas/v2/task_collection.py,sha256=Jk-r3f2RIHRAXbej9xnz_WsPrIrod1P_FIWK1iEVkes,2993
115
+ fractal_server/app/schemas/v2/task.py,sha256=xQfQxL2h-Vw0YL3yEiYvVIXTybE1lyRE0pPUu59nZes,4574
116
+ fractal_server/app/schemas/v2/task_collection.py,sha256=sY29NQfJrbjiidmVkVjSIH-20wIsmh7G1QOdr05KoDQ,3171
115
117
  fractal_server/app/schemas/v2/workflow.py,sha256=KnzsuTQZ8S1wwoRDY3poWTnO3GbogFTLqCoBJNYzIFU,1831
116
- fractal_server/app/schemas/v2/workflowtask.py,sha256=Cg9h2RlkMGISeHXNtW0X_SUJFO0azLBX3_NNrm5WDOQ,3903
118
+ fractal_server/app/schemas/v2/workflowtask.py,sha256=vRyPca8smu6fzwd9gO1eOd3qdPLJ-Zq2AAAbSLCou3I,5051
117
119
  fractal_server/app/security/__init__.py,sha256=wxosoHc3mJYPCdPMyWnRD8w_2OgnKYp2aDkdmwrZh5k,11203
118
120
  fractal_server/config.py,sha256=CA8ASObADaME5chDiBXawAJZ3MvjTRpCKP0jvdYtSh8,15080
119
121
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
120
122
  fractal_server/images/__init__.py,sha256=JnTf7TflCdTbhcMHi12s3CJhEtuAXNulwauUU1wDpp0,88
121
- fractal_server/images/models.py,sha256=Aj_U-IGjVFSbubMxo7pwlwa5IuHyn_HdtBiEEvddCss,1540
123
+ fractal_server/images/models.py,sha256=6V4saVpxLKC6bMVwkxMbtxyOFWOFJBylbyE-kHCv7KM,1969
122
124
  fractal_server/images/tools.py,sha256=Q7jM60r_jq5bttrt1b4bU29n717RSUMMPbAbAkzWjgw,2234
123
125
  fractal_server/logger.py,sha256=95duXY8eSxf1HWg0CVn8SUGNzgJw9ZR0FlapDDF6WAY,3924
124
126
  fractal_server/main.py,sha256=7CpwPfCsHxBAo5fWuXPCsYOFCpbBI0F7Z0jsgCQdou8,3001
@@ -147,14 +149,16 @@ fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39N
147
149
  fractal_server/tasks/endpoint_operations.py,sha256=D1WSJd8dIfIumKezon1NYX5a0QNPqqlbj9uRq-ur9CQ,5379
148
150
  fractal_server/tasks/utils.py,sha256=R1_SKfXTwveT7CJJOrvkwi0vNpr9MBIiNh7qv8EK3Wc,3278
149
151
  fractal_server/tasks/v1/_TaskCollectPip.py,sha256=16Gn8lVYHBuwNLBHdcdx0X8s9QXXsbfPwSzcCcM6fRg,3775
152
+ fractal_server/tasks/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
150
153
  fractal_server/tasks/v1/background_operations.py,sha256=T5L-ghgGEJIGcGoZB_r0cjH96UkEfAPkhr2ciTSaQlQ,11725
151
154
  fractal_server/tasks/v1/get_collection_data.py,sha256=bi9tuApLgoKZNMIG1kR4GoKI9S6Y040gFfNQapw4ikM,502
152
155
  fractal_server/tasks/v2/_TaskCollectPip.py,sha256=QeCqXDgOnMjk3diVlC5bgGEywyQjYFm5637Rke49vJY,3775
156
+ fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
153
157
  fractal_server/tasks/v2/background_operations.py,sha256=zr6j3uoWmCeW2EA9auxWNZ0sG3SHgSxUVTC1OpQXE3Y,12803
154
158
  fractal_server/tasks/v2/get_collection_data.py,sha256=Qhf2T_aaqAfqu9_KpUSlXsS7EJoZQbEPEreHHa2jco8,502
155
159
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
156
- fractal_server-2.0.0a3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
157
- fractal_server-2.0.0a3.dist-info/METADATA,sha256=5eETQf0NY1w6uZqbwgmgaLfqUzKyP74Y9vTI9VwUgOY,4205
158
- fractal_server-2.0.0a3.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
159
- fractal_server-2.0.0a3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
160
- fractal_server-2.0.0a3.dist-info/RECORD,,
160
+ fractal_server-2.0.0a4.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
161
+ fractal_server-2.0.0a4.dist-info/METADATA,sha256=IVuqu0v4f5PnP2gq8m5Bk1deyKcHqQqIdCdhM8BRkc0,4204
162
+ fractal_server-2.0.0a4.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
163
+ fractal_server-2.0.0a4.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
164
+ fractal_server-2.0.0a4.dist-info/RECORD,,
@@ -1,81 +0,0 @@
1
- {
2
- "title": "ManifestV1",
3
- "description": "Manifest schema version 1.\n\nAttributes:\n task_list:",
4
- "type": "object",
5
- "properties": {
6
- "manifest_version": {
7
- "title": "Manifest Version",
8
- "type": "string"
9
- },
10
- "task_list": {
11
- "title": "Task List",
12
- "type": "array",
13
- "items": {
14
- "$ref": "#/definitions/TaskManifestV1"
15
- }
16
- },
17
- "has_args_schemas": {
18
- "title": "Has Args Schemas",
19
- "default": false,
20
- "type": "boolean"
21
- },
22
- "args_schema_version": {
23
- "title": "Args Schema Version",
24
- "type": "string"
25
- }
26
- },
27
- "required": [
28
- "manifest_version",
29
- "task_list"
30
- ],
31
- "definitions": {
32
- "TaskManifestV1": {
33
- "title": "TaskManifestV1",
34
- "description": "Task manifest schema version 1.",
35
- "type": "object",
36
- "properties": {
37
- "name": {
38
- "title": "Name",
39
- "type": "string"
40
- },
41
- "executable": {
42
- "title": "Executable",
43
- "type": "string"
44
- },
45
- "input_type": {
46
- "title": "Input Type",
47
- "type": "string"
48
- },
49
- "output_type": {
50
- "title": "Output Type",
51
- "type": "string"
52
- },
53
- "meta": {
54
- "title": "Meta",
55
- "type": "object"
56
- },
57
- "args_schema": {
58
- "title": "Args Schema",
59
- "type": "object"
60
- },
61
- "docs_info": {
62
- "title": "Docs Info",
63
- "type": "string"
64
- },
65
- "docs_link": {
66
- "title": "Docs Link",
67
- "minLength": 1,
68
- "maxLength": 2083,
69
- "format": "uri",
70
- "type": "string"
71
- }
72
- },
73
- "required": [
74
- "name",
75
- "executable",
76
- "input_type",
77
- "output_type"
78
- ]
79
- }
80
- }
81
- }