fractal-server 2.0.0a9__py3-none-any.whl → 2.0.0a11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.0.0a9"
1
+ __VERSION__ = "2.0.0a11"
@@ -2,6 +2,5 @@
2
2
  `models` module
3
3
  """
4
4
  from .security import * # noqa: F401, F403
5
- from .state import State # noqa: F401
6
5
  from .v1 import * # noqa: F401, F403
7
6
  from .v2 import * # noqa: F401, F403
@@ -6,6 +6,7 @@ from .dataset import Resource # noqa: F401
6
6
  from .job import ApplyWorkflow # noqa: F403, F401
7
7
  from .job import JobStatusTypeV1 # noqa: F401, F403
8
8
  from .project import Project # noqa: F403, F401
9
+ from .state import State # noqa: F403, F401
9
10
  from .task import Task # noqa: F403, F401
10
11
  from .workflow import Workflow # noqa: F401, F403
11
12
  from .workflow import WorkflowTask # noqa: F401, F403
@@ -8,8 +8,8 @@ from sqlalchemy.types import JSON
8
8
  from sqlmodel import Field
9
9
  from sqlmodel import SQLModel
10
10
 
11
- from ...utils import get_timestamp
12
- from ..schemas.v1 import _StateBase
11
+ from ....utils import get_timestamp
12
+ from ...schemas.v1 import _StateBase
13
13
 
14
14
 
15
15
  class State(_StateBase, SQLModel, table=True):
@@ -2,6 +2,7 @@
2
2
  v2 `models` module
3
3
  """
4
4
  from ..linkuserproject import LinkUserProjectV2
5
+ from .collection_state import CollectionStateV2
5
6
  from .dataset import DatasetV2
6
7
  from .job import JobV2
7
8
  from .project import ProjectV2
@@ -14,6 +15,7 @@ __all__ = [
14
15
  "DatasetV2",
15
16
  "JobV2",
16
17
  "ProjectV2",
18
+ "CollectionStateV2",
17
19
  "TaskV2",
18
20
  "WorkflowTaskV2",
19
21
  "WorkflowV2",
@@ -0,0 +1,21 @@
1
+ from datetime import datetime
2
+ from typing import Any
3
+ from typing import Optional
4
+
5
+ from sqlalchemy import Column
6
+ from sqlalchemy.types import DateTime
7
+ from sqlalchemy.types import JSON
8
+ from sqlmodel import Field
9
+ from sqlmodel import SQLModel
10
+
11
+ from ....utils import get_timestamp
12
+
13
+
14
+ class CollectionStateV2(SQLModel, table=True):
15
+
16
+ id: Optional[int] = Field(default=None, primary_key=True)
17
+ data: dict[str, Any] = Field(sa_column=Column(JSON), default={})
18
+ timestamp: datetime = Field(
19
+ default_factory=get_timestamp,
20
+ sa_column=Column(DateTime(timezone=True)),
21
+ )
@@ -9,7 +9,7 @@ from sqlmodel import Field
9
9
  from sqlmodel import SQLModel
10
10
 
11
11
  from ....utils import get_timestamp
12
- from ...schemas.v1 import JobStatusTypeV1
12
+ from ...schemas.v2 import JobStatusTypeV2
13
13
 
14
14
 
15
15
  class JobV2(SQLModel, table=True):
@@ -47,5 +47,5 @@ class JobV2(SQLModel, table=True):
47
47
  end_timestamp: Optional[datetime] = Field(
48
48
  default=None, sa_column=Column(DateTime(timezone=True))
49
49
  )
50
- status: str = JobStatusTypeV1.SUBMITTED
50
+ status: str = JobStatusTypeV2.SUBMITTED
51
51
  log: Optional[str] = None
@@ -20,13 +20,13 @@ from ....syringe import Inject
20
20
  from ....utils import get_timestamp
21
21
  from ...db import AsyncSession
22
22
  from ...db import get_async_db
23
- from ...models import JobStatusTypeV1
24
23
  from ...models.security import UserOAuth as User
25
24
  from ...models.v1 import Task
26
25
  from ...models.v2 import JobV2
27
26
  from ...models.v2 import ProjectV2
28
27
  from ...runner.filenames import WORKFLOW_LOG_FILENAME
29
28
  from ...schemas.v2 import JobReadV2
29
+ from ...schemas.v2 import JobStatusTypeV2
30
30
  from ...schemas.v2 import JobUpdateV2
31
31
  from ...schemas.v2 import ProjectReadV2
32
32
  from ...security import current_active_superuser
@@ -90,7 +90,7 @@ async def view_job(
90
90
  project_id: Optional[int] = None,
91
91
  dataset_id: Optional[int] = None,
92
92
  workflow_id: Optional[int] = None,
93
- status: Optional[JobStatusTypeV1] = None,
93
+ status: Optional[JobStatusTypeV2] = None,
94
94
  start_timestamp_min: Optional[datetime] = None,
95
95
  start_timestamp_max: Optional[datetime] = None,
96
96
  end_timestamp_min: Optional[datetime] = None,
@@ -175,7 +175,7 @@ async def view_single_job(
175
175
  )
176
176
  await db.close()
177
177
 
178
- if show_tmp_logs and (job.status == JobStatusTypeV1.SUBMITTED):
178
+ if show_tmp_logs and (job.status == JobStatusTypeV2.SUBMITTED):
179
179
  try:
180
180
  with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
181
181
  job.log = f.read()
@@ -208,7 +208,7 @@ async def update_job(
208
208
  detail=f"Job {job_id} not found",
209
209
  )
210
210
 
211
- if job_update.status != JobStatusTypeV1.FAILED:
211
+ if job_update.status != JobStatusTypeV2.FAILED:
212
212
  raise HTTPException(
213
213
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
214
214
  detail=f"Cannot set job status to {job_update.status}",
@@ -7,6 +7,7 @@ from .dataset import router as dataset_router_v2
7
7
  from .images import router as images_routes_v2
8
8
  from .job import router as job_router_v2
9
9
  from .project import router as project_router_v2
10
+ from .status import router as status_router_v2
10
11
  from .submit import router as submit_job_router_v2
11
12
  from .task import router as task_router_v2
12
13
  from .task_collection import router as task_collection_router_v2
@@ -30,3 +31,4 @@ router_api_v2.include_router(
30
31
  )
31
32
  router_api_v2.include_router(workflow_router_v2, tags=["V2 Workflow"])
32
33
  router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"])
34
+ router_api_v2.include_router(status_router_v2, tags=["V2 Status"])
@@ -21,7 +21,7 @@ from ....models.v2 import ProjectV2
21
21
  from ....models.v2 import TaskV2
22
22
  from ....models.v2 import WorkflowTaskV2
23
23
  from ....models.v2 import WorkflowV2
24
- from ....schemas.v1 import JobStatusTypeV1
24
+ from ....schemas.v2 import JobStatusTypeV2
25
25
  from ....security import User
26
26
  from fractal_server.images import Filters
27
27
 
@@ -384,7 +384,7 @@ def _get_submitted_jobs_statement() -> SelectOfScalar:
384
384
  A sqlmodel statement that selects all `Job`s with
385
385
  `Job.status` equal to `submitted`.
386
386
  """
387
- stm = select(JobV2).where(JobV2.status == JobStatusTypeV1.SUBMITTED)
387
+ stm = select(JobV2).where(JobV2.status == JobStatusTypeV2.SUBMITTED)
388
388
  return stm
389
389
 
390
390
 
@@ -1,5 +1,3 @@
1
- import json
2
- from pathlib import Path
3
1
  from typing import Optional
4
2
 
5
3
  from fastapi import APIRouter
@@ -19,15 +17,11 @@ from ....schemas.v2 import DatasetReadV2
19
17
  from ....schemas.v2 import DatasetUpdateV2
20
18
  from ....schemas.v2.dataset import DatasetExportV2
21
19
  from ....schemas.v2.dataset import DatasetImportV2
22
- from ....schemas.v2.dataset import DatasetStatusReadV2
23
- from ....schemas.v2.dataset import WorkflowTaskStatusTypeV2
24
20
  from ....security import current_active_user
25
21
  from ....security import User
26
22
  from ._aux_functions import _get_dataset_check_owner
27
23
  from ._aux_functions import _get_project_check_owner
28
24
  from ._aux_functions import _get_submitted_jobs_statement
29
- from ._aux_functions import _get_workflow_check_owner
30
- from fractal_server.app.runner.filenames import HISTORY_FILENAME
31
25
 
32
26
  router = APIRouter()
33
27
 
@@ -228,100 +222,6 @@ async def get_user_datasets(
228
222
  return dataset_list
229
223
 
230
224
 
231
- @router.get(
232
- "/project/{project_id}/dataset/{dataset_id}/status/",
233
- response_model=DatasetStatusReadV2,
234
- )
235
- async def get_workflowtask_status(
236
- project_id: int,
237
- dataset_id: int,
238
- user: User = Depends(current_active_user),
239
- db: AsyncSession = Depends(get_async_db),
240
- ) -> Optional[DatasetStatusReadV2]:
241
- """
242
- Extract the status of all `WorkflowTask`s that ran on a given `DatasetV2`.
243
- """
244
- # Get the dataset DB entry
245
- output = await _get_dataset_check_owner(
246
- project_id=project_id,
247
- dataset_id=dataset_id,
248
- user_id=user.id,
249
- db=db,
250
- )
251
- dataset = output["dataset"]
252
-
253
- # Check whether there exists a job such that
254
- # 1. `job.dataset_id == dataset_id`, and
255
- # 2. `job.status` is submitted
256
- # If one such job exists, it will be used later. If there are multiple
257
- # jobs, raise an error.
258
- stm = _get_submitted_jobs_statement().where(JobV2.dataset_id == dataset_id)
259
- res = await db.execute(stm)
260
- running_jobs = res.scalars().all()
261
- if len(running_jobs) == 0:
262
- running_job = None
263
- elif len(running_jobs) == 1:
264
- running_job = running_jobs[0]
265
- else:
266
- string_ids = str([job.id for job in running_jobs])[1:-1]
267
- raise HTTPException(
268
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
269
- detail=(
270
- f"Cannot get WorkflowTaskV2 statuses as DatasetV2 {dataset.id}"
271
- f" is linked to multiple active jobs: {string_ids}."
272
- ),
273
- )
274
-
275
- # Initialize empty dictionary for WorkflowTaskV2 status
276
- workflow_tasks_status_dict: dict = {}
277
-
278
- # Lowest priority: read status from DB, which corresponds to jobs that are
279
- # not running
280
- history = dataset.history
281
- for history_item in history:
282
- wftask_id = history_item["workflowtask"]["id"]
283
- wftask_status = history_item["status"]
284
- workflow_tasks_status_dict[wftask_id] = wftask_status
285
-
286
- # If a job is running, then gather more up-to-date information
287
- if running_job is not None:
288
- # Get the workflow DB entry
289
- running_workflow = await _get_workflow_check_owner(
290
- project_id=project_id,
291
- workflow_id=running_job.workflow_id,
292
- user_id=user.id,
293
- db=db,
294
- )
295
- # Mid priority: Set all WorkflowTask's that are part of the running job
296
- # as "submitted"
297
- start = running_job.first_task_index
298
- end = running_job.last_task_index + 1
299
- for wftask in running_workflow.task_list[start:end]:
300
- workflow_tasks_status_dict[
301
- wftask.id
302
- ] = WorkflowTaskStatusTypeV2.SUBMITTED
303
-
304
- # Highest priority: Read status updates coming from the running-job
305
- # temporary file. Note: this file only contains information on
306
- # # WorkflowTask's that ran through successfully.
307
- tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME
308
- try:
309
- with tmp_file.open("r") as f:
310
- history = json.load(f)
311
- except FileNotFoundError:
312
- history = []
313
- for history_item in history:
314
- wftask_id = history_item["workflowtask"]["id"]
315
- wftask_status = history_item["status"]
316
- workflow_tasks_status_dict[wftask_id] = wftask_status
317
-
318
- response_body = DatasetStatusReadV2(status=workflow_tasks_status_dict)
319
- return response_body
320
-
321
-
322
- # /api/v2/project/{project_id}/dataset/{dataset_id}/export/
323
-
324
-
325
225
  @router.get(
326
226
  "/project/{project_id}/dataset/{dataset_id}/export/",
327
227
  response_model=DatasetExportV2,
@@ -0,0 +1,150 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import Optional
4
+
5
+ from fastapi import APIRouter
6
+ from fastapi import Depends
7
+ from fastapi import HTTPException
8
+ from fastapi import status
9
+
10
+ from ....db import AsyncSession
11
+ from ....db import get_async_db
12
+ from ....models.v2 import JobV2
13
+ from ....schemas.v2.dataset import WorkflowTaskStatusTypeV2
14
+ from ....schemas.v2.status import StatusReadV2
15
+ from ....security import current_active_user
16
+ from ....security import User
17
+ from ._aux_functions import _get_dataset_check_owner
18
+ from ._aux_functions import _get_submitted_jobs_statement
19
+ from ._aux_functions import _get_workflow_check_owner
20
+ from fractal_server.app.runner.filenames import HISTORY_FILENAME
21
+
22
+ router = APIRouter()
23
+
24
+
25
+ @router.get(
26
+ "/project/{project_id}/status/",
27
+ response_model=StatusReadV2,
28
+ )
29
+ async def get_workflowtask_status(
30
+ project_id: int,
31
+ dataset_id: int,
32
+ workflow_id: int,
33
+ user: User = Depends(current_active_user),
34
+ db: AsyncSession = Depends(get_async_db),
35
+ ) -> Optional[StatusReadV2]:
36
+ """
37
+ Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
38
+ on a given `DatasetV2`.
39
+
40
+ *NOTE*: the current endpoint is not guaranteed to provide consistent
41
+ results if the workflow task list is modified in a non-trivial way
42
+ (that is, by adding intermediate tasks, removing tasks, or changing their
43
+ order). See fractal-server GitHub issues: 793, 1083.
44
+ """
45
+ # Get the dataset DB entry
46
+ output = await _get_dataset_check_owner(
47
+ project_id=project_id,
48
+ dataset_id=dataset_id,
49
+ user_id=user.id,
50
+ db=db,
51
+ )
52
+ dataset = output["dataset"]
53
+
54
+ # Get the workflow DB entry
55
+ workflow = await _get_workflow_check_owner(
56
+ project_id=project_id,
57
+ workflow_id=workflow_id,
58
+ user_id=user.id,
59
+ db=db,
60
+ )
61
+
62
+ # Check whether there exists a submitted job associated to this
63
+ # workflow/dataset pair. If it does exist, it will be used later.
64
+ # If there are multiple jobs, raise an error.
65
+ stm = _get_submitted_jobs_statement()
66
+ stm = stm.where(JobV2.dataset_id == dataset_id)
67
+ stm = stm.where(JobV2.workflow_id == workflow_id)
68
+ res = await db.execute(stm)
69
+ running_jobs = res.scalars().all()
70
+ if len(running_jobs) == 0:
71
+ running_job = None
72
+ elif len(running_jobs) == 1:
73
+ running_job = running_jobs[0]
74
+ else:
75
+ string_ids = str([job.id for job in running_jobs])[1:-1]
76
+ raise HTTPException(
77
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
78
+ detail=(
79
+ f"Cannot get WorkflowTaskV2 statuses as DatasetV2 {dataset.id}"
80
+ f" is linked to multiple active jobs: {string_ids}."
81
+ ),
82
+ )
83
+
84
+ # Initialize empty dictionary for WorkflowTaskV2 status
85
+ workflow_tasks_status_dict: dict = {}
86
+
87
+ # Lowest priority: read status from DB, which corresponds to jobs that are
88
+ # not running
89
+ history = dataset.history
90
+ for history_item in history:
91
+ wftask_id = history_item["workflowtask"]["id"]
92
+ wftask_status = history_item["status"]
93
+ workflow_tasks_status_dict[wftask_id] = wftask_status
94
+
95
+ if running_job is None:
96
+ # If no job is running, the chronological-last history item is also the
97
+ # positional-last workflow task to be included in the response.
98
+ if len(dataset.history) > 0:
99
+ last_valid_wftask_id = dataset.history[-1]["workflowtask"]["id"]
100
+ else:
101
+ last_valid_wftask_id = None
102
+ else:
103
+ # If a job is running, then gather more up-to-date information
104
+
105
+ # Mid priority: Set all WorkflowTask's that are part of the running job
106
+ # as "submitted"
107
+ start = running_job.first_task_index
108
+ end = running_job.last_task_index + 1
109
+ for wftask in workflow.task_list[start:end]:
110
+ workflow_tasks_status_dict[
111
+ wftask.id
112
+ ] = WorkflowTaskStatusTypeV2.SUBMITTED
113
+
114
+ # The last workflow task that is included in the submitted job is also
115
+ # the positional-last workflow task to be included in the response.
116
+ last_valid_wftask_id = workflow.task_list[end - 1]
117
+
118
+ # Highest priority: Read status updates coming from the running-job
119
+ # temporary file. Note: this file only contains information on
120
+ # WorkflowTask's that ran through successfully.
121
+ tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME
122
+ try:
123
+ with tmp_file.open("r") as f:
124
+ history = json.load(f)
125
+ except FileNotFoundError:
126
+ history = []
127
+ for history_item in history:
128
+ wftask_id = history_item["workflowtask"]["id"]
129
+ wftask_status = history_item["status"]
130
+ workflow_tasks_status_dict[wftask_id] = wftask_status
131
+
132
+ # Based on previously-gathered information, clean up the response body
133
+ clean_workflow_tasks_status_dict = {}
134
+ for wf_task in workflow.task_list:
135
+ wf_task_status = workflow_tasks_status_dict.get(wf_task.id)
136
+ if wf_task_status is None:
137
+ # If a wftask ID was not found, ignore it and continue
138
+ continue
139
+ clean_workflow_tasks_status_dict[wf_task.id] = wf_task_status
140
+ if wf_task_status == WorkflowTaskStatusTypeV2.FAILED:
141
+ # Starting from the beginning of `workflow.task_list`, stop the
142
+ # first time that you hit a failed job
143
+ break
144
+ if wf_task.id == last_valid_wftask_id:
145
+ # Starting from the beginning of `workflow.task_list`, stop the
146
+ # first time that you hit `last_valid_wftask_id``
147
+ break
148
+
149
+ response_body = StatusReadV2(status=clean_workflow_tasks_status_dict)
150
+ return response_body
@@ -17,7 +17,7 @@ from .....logger import set_logger
17
17
  from .....syringe import Inject
18
18
  from ....db import AsyncSession
19
19
  from ....db import get_async_db
20
- from ....models import State
20
+ from ....models import CollectionStateV2
21
21
  from ....models.v2 import TaskV2
22
22
  from ....schemas import StateRead
23
23
  from ....schemas.v2 import TaskCollectPipV2
@@ -151,7 +151,7 @@ async def collect_tasks_pip(
151
151
  ),
152
152
  )
153
153
  task_collect_status.info = "Already installed"
154
- state = State(data=task_collect_status.sanitised_dict())
154
+ state = CollectionStateV2(data=task_collect_status.sanitised_dict())
155
155
  response.status_code == status.HTTP_200_OK
156
156
  await db.close()
157
157
  return state
@@ -181,7 +181,7 @@ async def collect_tasks_pip(
181
181
  # Create State object (after casting venv_path to string)
182
182
  collection_status_dict = collection_status.dict()
183
183
  collection_status_dict["venv_path"] = str(collection_status.venv_path)
184
- state = State(data=collection_status_dict)
184
+ state = CollectionStateV2(data=collection_status_dict)
185
185
  db.add(state)
186
186
  await db.commit()
187
187
  await db.refresh(state)
@@ -220,7 +220,7 @@ async def check_collection_status(
220
220
  """
221
221
  logger = set_logger(logger_name="check_collection_status")
222
222
  logger.debug(f"Querying state for state.id={state_id}")
223
- state = await db.get(State, state_id)
223
+ state = await db.get(CollectionStateV2, state_id)
224
224
  if not state:
225
225
  await db.close()
226
226
  raise HTTPException(
@@ -46,6 +46,11 @@ async def create_workflowtask(
46
46
 
47
47
  if new_task.is_legacy_task is True:
48
48
  task = await db.get(Task, task_id)
49
+ if not task:
50
+ raise HTTPException(
51
+ status_code=status.HTTP_404_NOT_FOUND,
52
+ detail=f"Task {task_id} not found.",
53
+ )
49
54
  if not task.is_v2_compatible:
50
55
  raise HTTPException(
51
56
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -53,16 +58,11 @@ async def create_workflowtask(
53
58
  )
54
59
  else:
55
60
  task = await db.get(TaskV2, task_id)
56
-
57
- if not task:
58
- if new_task.is_legacy_task:
59
- error = f"Task {task_id} not found."
60
- else:
61
- error = f"TaskV2 {task_id} not found."
62
-
63
- raise HTTPException(
64
- status_code=status.HTTP_404_NOT_FOUND, detail=error
65
- )
61
+ if not task:
62
+ raise HTTPException(
63
+ status_code=status.HTTP_404_NOT_FOUND,
64
+ detail=f"TaskV2 {task_id} not found.",
65
+ )
66
66
 
67
67
  if new_task.is_legacy_task is True or task.type == "parallel":
68
68
  if (
@@ -146,6 +146,9 @@ def run_v2_task_parallel(
146
146
  submit_setup_call: Callable = no_op_submit_setup_call,
147
147
  ) -> TaskOutput:
148
148
 
149
+ if len(images) == 0:
150
+ return TaskOutput()
151
+
149
152
  _check_parallelization_list_size(images)
150
153
 
151
154
  executor_options = _get_executor_options(
@@ -249,6 +252,9 @@ def run_v2_task_compound(
249
252
  # 3/B: parallel part of a compound task
250
253
  _check_parallelization_list_size(parallelization_list)
251
254
 
255
+ if len(parallelization_list) == 0:
256
+ return TaskOutput()
257
+
252
258
  list_function_kwargs = []
253
259
  for ind, parallelization_item in enumerate(parallelization_list):
254
260
  list_function_kwargs.append(
@@ -26,20 +26,6 @@ class _DatasetHistoryItemV2(BaseModel):
26
26
  parallelization: Optional[dict]
27
27
 
28
28
 
29
- class DatasetStatusReadV2(BaseModel):
30
- """
31
- Response type for the
32
- `/project/{project_id}/dataset/{dataset_id}/status/` endpoint
33
- """
34
-
35
- status: Optional[
36
- dict[
37
- str,
38
- WorkflowTaskStatusTypeV2,
39
- ]
40
- ] = None
41
-
42
-
43
29
  # CRUD
44
30
 
45
31
 
@@ -0,0 +1,16 @@
1
+ from pydantic import BaseModel
2
+ from pydantic import Field
3
+
4
+ from .workflowtask import WorkflowTaskStatusTypeV2
5
+
6
+
7
+ class StatusReadV2(BaseModel):
8
+ """
9
+ Response type for the
10
+ `/project/{project_id}/status/` endpoint
11
+ """
12
+
13
+ status: dict[
14
+ str,
15
+ WorkflowTaskStatusTypeV2,
16
+ ] = Field(default_factory=dict)
@@ -1,8 +1,8 @@
1
- """V2
1
+ """v2
2
2
 
3
- Revision ID: 80e12e1bc4fd
3
+ Revision ID: 5bf02391cfef
4
4
  Revises: 9fd26a2b0de4
5
- Create Date: 2024-04-12 10:13:58.085788
5
+ Create Date: 2024-04-18 10:35:19.067833
6
6
 
7
7
  """
8
8
  import sqlalchemy as sa
@@ -11,7 +11,7 @@ from alembic import op
11
11
 
12
12
 
13
13
  # revision identifiers, used by Alembic.
14
- revision = "80e12e1bc4fd"
14
+ revision = "5bf02391cfef"
15
15
  down_revision = "9fd26a2b0de4"
16
16
  branch_labels = None
17
17
  depends_on = None
@@ -19,6 +19,13 @@ depends_on = None
19
19
 
20
20
  def upgrade() -> None:
21
21
  # ### commands auto generated by Alembic - please adjust! ###
22
+ op.create_table(
23
+ "collectionstatev2",
24
+ sa.Column("id", sa.Integer(), nullable=False),
25
+ sa.Column("data", sa.JSON(), nullable=True),
26
+ sa.Column("timestamp", sa.DateTime(timezone=True), nullable=True),
27
+ sa.PrimaryKeyConstraint("id"),
28
+ )
22
29
  op.create_table(
23
30
  "projectv2",
24
31
  sa.Column("id", sa.Integer(), nullable=False),
@@ -234,4 +241,5 @@ def downgrade() -> None:
234
241
  op.drop_table("datasetv2")
235
242
  op.drop_table("taskv2")
236
243
  op.drop_table("projectv2")
244
+ op.drop_table("collectionstatev2")
237
245
  # ### end Alembic commands ###
@@ -15,7 +15,7 @@ from ..utils import slugify_task_name
15
15
  from ._TaskCollectPip import _TaskCollectPip
16
16
  from fractal_server.app.db import DBSyncSession
17
17
  from fractal_server.app.db import get_sync_db
18
- from fractal_server.app.models import State
18
+ from fractal_server.app.models.v2 import CollectionStateV2
19
19
  from fractal_server.app.models.v2 import TaskV2
20
20
  from fractal_server.app.schemas.v2 import TaskCollectStatusV2
21
21
  from fractal_server.app.schemas.v2 import TaskCreateV2
@@ -313,7 +313,7 @@ async def background_collect_pip(
313
313
  logger.debug(f"{key}: {value}")
314
314
 
315
315
  with next(get_sync_db()) as db:
316
- state: State = db.get(State, state_id)
316
+ state: CollectionStateV2 = db.get(CollectionStateV2, state_id)
317
317
  data = TaskCollectStatusV2(**state.data)
318
318
  data.info = None
319
319
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.0.0a9
3
+ Version: 2.0.0a11
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,21 +1,22 @@
1
- fractal_server/__init__.py,sha256=igx3UAg7e-LuCe083CY3r_FQooF1tJV_FTJy9JRvzEo,24
1
+ fractal_server/__init__.py,sha256=3s9YLGOYqSP6sC_lM9jIIhYieV3lgQqBsJSBVyRU0ZU,25
2
2
  fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  fractal_server/app/db/__init__.py,sha256=WZEVfdJAX7ZyBM1ngfEGeqWWcjK_NygtCbawpmbwGpU,4042
6
- fractal_server/app/models/__init__.py,sha256=VP6cx5xYP6p1r6CubqCoyQaUyCYY7duTRwCCIEyndbw,183
6
+ fractal_server/app/models/__init__.py,sha256=fwlIQsf-OLF2pPVQBxo43W0hD0gYIVmnCzPp9ekDo4g,144
7
7
  fractal_server/app/models/linkuserproject.py,sha256=eQaourbGRshvlMVlKzLYJKHEjfsW1CbWws9yW4eHXhA,567
8
8
  fractal_server/app/models/security.py,sha256=UG9wCVA5GRSyHrYEFhH8lIF1hXykxsr9LSi8_dFToMY,3378
9
- fractal_server/app/models/state.py,sha256=GUxmaDI542JI_eu5zIB91YBOt8dwkcSd4Om5yxHHy6Y,1090
10
- fractal_server/app/models/v1/__init__.py,sha256=MyQa9Xi-O8tvZ1OHEo275uIuRit-CYZ5Yqh98vk4CUM,413
9
+ fractal_server/app/models/v1/__init__.py,sha256=qUlUGnWFaIm3aBXfUuLdhcW9f_s1VzAEuypr31zvHGo,458
11
10
  fractal_server/app/models/v1/dataset.py,sha256=99GDgt7njx8yYQApkImqp_7bHA5HH3ElvbR6Oyj9kVI,2017
12
11
  fractal_server/app/models/v1/job.py,sha256=QLGXcWdVRHaUHQNDapYYlLpEfw4K7QyD8TmcwhrWw2o,3304
13
12
  fractal_server/app/models/v1/project.py,sha256=sDmAFLOBK5o4dLrwsIN681JcT5J1rzoUNTV9QVqwnA8,859
13
+ fractal_server/app/models/v1/state.py,sha256=W5XxCR9BlHXo5abCvzblkvufqpGZtM5-G11ixzMUOp4,1092
14
14
  fractal_server/app/models/v1/task.py,sha256=3xZqNeFYUqslh8ddMSXF2nO4nIiOD8T5Ij37wY20kss,2782
15
15
  fractal_server/app/models/v1/workflow.py,sha256=dnY5eMaOe3oZv8arn00RNX9qVkBtTLG-vYdWXcQuyo4,3950
16
- fractal_server/app/models/v2/__init__.py,sha256=2T_ZXpP9n5IktoX3bkQUKUKzGAN5tJiR1LKWOtOCclM,400
16
+ fractal_server/app/models/v2/__init__.py,sha256=uLzdInqATSwi0bS_V4vKB-TqFrOFaXuxCAbU73c0f24,473
17
+ fractal_server/app/models/v2/collection_state.py,sha256=nxb042i8tt8rCpmgbFJoBCYWU-34m0HdUfO9YurTp8k,588
17
18
  fractal_server/app/models/v2/dataset.py,sha256=-7sxHEw4IIAvF_uSan7tA3o8hvoakBkQ0SRvqS2iOQU,1455
18
- fractal_server/app/models/v2/job.py,sha256=PCJf0_NYIc5boXL6e6P72BvYJGydCZOGKnW2DT4Sw9g,1535
19
+ fractal_server/app/models/v2/job.py,sha256=ypJmN-qspkKBGhBG7Mt-HypSQqcQ2EmB4Bzzb2-y550,1535
19
20
  fractal_server/app/models/v2/project.py,sha256=CqDEKzdVxmFDMee6DnVOyX7WGmdn-dQSLSekzw_OLUc,817
20
21
  fractal_server/app/models/v2/task.py,sha256=9ZPhug3VWyeqgT8wQ9_8ZXQ2crSiiicRipxrxTslOso,3257
21
22
  fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO9VBlFCHiSps,1069
@@ -23,7 +24,7 @@ fractal_server/app/models/v2/workflowtask.py,sha256=kEm2k1LI0KK9vlTH7DL1NddaEUpI
23
24
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
25
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
26
  fractal_server/app/routes/admin/v1.py,sha256=uY6H1znlAlrM9e1MG2EThTqwciCl87Twew34JM5W6IU,13981
26
- fractal_server/app/routes/admin/v2.py,sha256=c2hk8lB9ilXvFDJ6AHv7aPd_nyaXyobH0S7CaaDqFMI,9826
27
+ fractal_server/app/routes/admin/v2.py,sha256=T8-bGAL25on-ntZx_Msz9j5jq6NGhkjVl1jp3eRJUbw,9830
27
28
  fractal_server/app/routes/api/__init__.py,sha256=EVyZrEq3I_1643QGTPCC5lgCp4xH_auYbrFfogTm4pc,315
28
29
  fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
29
30
  fractal_server/app/routes/api/v1/_aux_functions.py,sha256=eC5exnGj9jnJqx0ccecoNaipxDeK2ZsR1ev0syH5x-Y,11955
@@ -34,18 +35,19 @@ fractal_server/app/routes/api/v1/task.py,sha256=4zUXMtq5M95XjaZs1t9oibYHiDIwxpM-
34
35
  fractal_server/app/routes/api/v1/task_collection.py,sha256=_cY3pPRGchdWPuJ1XudMZMVJ0IC0_XVH0XwLTiAbRGg,8873
35
36
  fractal_server/app/routes/api/v1/workflow.py,sha256=ZObifWTPi100oRQ1wEER8Sgsr3Neo8QVdCCFQnWMNZ0,10930
36
37
  fractal_server/app/routes/api/v1/workflowtask.py,sha256=ox-DIIqYV4K35hCu86eGa2SHnR5IQml-I00UHEwnmHQ,5579
37
- fractal_server/app/routes/api/v2/__init__.py,sha256=x56HcY1uBNCgq4BRVj-0j6bAj6OsTN97RNDqY8NefJ8,1373
38
- fractal_server/app/routes/api/v2/_aux_functions.py,sha256=TCHf3aM-KQxaNJen10CGX1Da5IIra00xRF39FUTU698,14301
39
- fractal_server/app/routes/api/v2/dataset.py,sha256=mgz8746jOhXDdKkNY7dDN3bM0QgXFBMk1VUFqnxU-B0,11573
38
+ fractal_server/app/routes/api/v2/__init__.py,sha256=UNgODxoEXfQpQDjvsnMvHaUWbZOrcHhEXNisLcU-0tE,1487
39
+ fractal_server/app/routes/api/v2/_aux_functions.py,sha256=IL1JKVqRcGfqiVbptDzpMKqi9QTYDYCCcsqIG0x0Nl8,14301
40
+ fractal_server/app/routes/api/v2/dataset.py,sha256=0JGRnK1DRQKgVA3FDhK8VdoRglLYFxgkMQOaoWI-tiQ,7853
40
41
  fractal_server/app/routes/api/v2/images.py,sha256=4r_HblPWyuKSZSJZfn8mbDaLv1ncwZU0gWdKneZcNG4,7894
41
42
  fractal_server/app/routes/api/v2/job.py,sha256=9mXaKCX_N3FXM0GIxdE49nWl_hJZ8CBLBIaMMhaCKOM,5334
42
43
  fractal_server/app/routes/api/v2/project.py,sha256=i9a19HAqE36N92G60ZYgObIP9nv-hR7Jt5nd9Dkhz1g,6024
44
+ fractal_server/app/routes/api/v2/status.py,sha256=3bqQejJ3TnIMan5wK6jr9sv4ypsQr9WWU8xqlvTgDCE,5739
43
45
  fractal_server/app/routes/api/v2/submit.py,sha256=iszII5CvWDEjGPTphBgH9FVS1pNb5m11Xc8xozGgjgI,6901
44
46
  fractal_server/app/routes/api/v2/task.py,sha256=gJ0LruSk-Q1iMw8ZOX8C0wrZ4S4DGlQTr_5SdJJud0Q,7130
45
- fractal_server/app/routes/api/v2/task_collection.py,sha256=iw74UF8qdQa9pJf0DvSjihng6ri2k2HtW2UhMS_a8Zc,8904
47
+ fractal_server/app/routes/api/v2/task_collection.py,sha256=O5eg40P-TwYy6azGh0DpyN6Rya9FfhRHQDf4qpYIGEE,8952
46
48
  fractal_server/app/routes/api/v2/task_legacy.py,sha256=P_VJv9v0yzFUBuS-DQHhMVSOe20ecGJJcFBqiiFciOM,1628
47
49
  fractal_server/app/routes/api/v2/workflow.py,sha256=sw-1phO_rrmDAcWX9Zqb9M8SfrWF78-02AuLB1-D1PU,11845
48
- fractal_server/app/routes/api/v2/workflowtask.py,sha256=I1nrIV5J_DW1IeBq0q9VmUeBDo7P6x7qYO_Ocls2Pno,8720
50
+ fractal_server/app/routes/api/v2/workflowtask.py,sha256=l4eTD5IIun5cOdYzsxh3ajmnOISaSccYA_mVf15Cjtw,8802
49
51
  fractal_server/app/routes/auth.py,sha256=Xv80iqdyfY3lyicYs2Y8B6zEDEnyUu_H6_6psYtv3R4,4885
50
52
  fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
53
  fractal_server/app/routes/aux/_job.py,sha256=5gKgvArAruSkMQuPN34Vvzi89WJbwWPsx0oDAa_iXu4,1248
@@ -90,7 +92,7 @@ fractal_server/app/runner/v2/deduplicate_list.py,sha256=-imwO7OB7ATADEnqVbTElUwo
90
92
  fractal_server/app/runner/v2/handle_failed_job.py,sha256=M1r3dnrbUMo_AI2qjaVuGhieMAyLh5gcvB10YOBpjvI,5415
91
93
  fractal_server/app/runner/v2/merge_outputs.py,sha256=IHuHqbKmk97K35BFvTrKVBs60z3e_--OzXTnsvmA02c,1281
92
94
  fractal_server/app/runner/v2/runner.py,sha256=K6bmWbQRSZwbO6ZI2Bp7wNxYdkHcXxhWwBObMxJ0iSU,12599
93
- fractal_server/app/runner/v2/runner_functions.py,sha256=kN_xuaAg4qeRNIXijo30F1WSOo3zbjz5JCuS87EQf4g,10171
95
+ fractal_server/app/runner/v2/runner_functions.py,sha256=qVGG9KlH8ObX4Y0kr0q6qE8OpWFwf4RnOHhgPRRdj5M,10293
94
96
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=djNKD1y_EE0Q9Jkzh1QdKpjM66JVsLQgX2_zJT0xQlA,3947
95
97
  fractal_server/app/runner/v2/task_interface.py,sha256=TZLVJs6CNFo2lFhr-lsDxe585cEhRv48eA490LS9aqc,1746
96
98
  fractal_server/app/runner/v2/v1_compat.py,sha256=t0ficzAHUFaaeI56nqTb4YEKxfARF7L9Y6ijtJCwjP8,912
@@ -108,11 +110,12 @@ fractal_server/app/schemas/v1/task.py,sha256=7BxOZ_qoRQ8n3YbQpDvB7VMcxB5fSYQmR5R
108
110
  fractal_server/app/schemas/v1/task_collection.py,sha256=uvq9bcMaGD_qHsh7YtcpoSAkVAbw12eY4DocIO3MKOg,3057
109
111
  fractal_server/app/schemas/v1/workflow.py,sha256=tuOs5E5Q_ozA8if7YPZ07cQjzqB_QMkBS4u92qo4Ro0,4618
110
112
  fractal_server/app/schemas/v2/__init__.py,sha256=zlCYrplCWwnCL9-BYsExRMfVzhBy21IMBfdHPMgJZYk,1752
111
- fractal_server/app/schemas/v2/dataset.py,sha256=_nnpGqaD7HJNC125jAyPn05iavy5uy4jxEMDB40TXCA,2737
113
+ fractal_server/app/schemas/v2/dataset.py,sha256=MGv0bdzEIQFNy8ARqiDn_neC1mJJTMXFzbb9M5l4xxg,2474
112
114
  fractal_server/app/schemas/v2/dumps.py,sha256=IpIT_2KxJd7qTgW2NllDknGeP7vBAJDfyz1I5p3TytU,2023
113
115
  fractal_server/app/schemas/v2/job.py,sha256=zfF9K3v4jWUJ7M482ta2CkqUJ4tVT4XfVt60p9IRhP0,3250
114
116
  fractal_server/app/schemas/v2/manifest.py,sha256=N37IWohcfO3_y2l8rVM0h_1nZq7m4Izxk9iL1vtwBJw,6243
115
117
  fractal_server/app/schemas/v2/project.py,sha256=u7S4B-bote1oGjzAGiZ-DuQIyeRAGqJsI71Tc1EtYE0,736
118
+ fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
116
119
  fractal_server/app/schemas/v2/task.py,sha256=7IfxiZkaVqlARy7WYE_H8m7j_IEcuQaZORUrs6b5YuY,4672
117
120
  fractal_server/app/schemas/v2/task_collection.py,sha256=sY29NQfJrbjiidmVkVjSIH-20wIsmh7G1QOdr05KoDQ,3171
118
121
  fractal_server/app/schemas/v2/workflow.py,sha256=Zzx3e-qgkH8le0FUmAx9UrV5PWd7bj14PPXUh_zgZXM,1827
@@ -131,9 +134,9 @@ fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhR
131
134
  fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
132
135
  fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py,sha256=Mob8McGYAcmgvrseyyYOa54E6Gsgr-4SiGdC-r9O4_A,1157
133
136
  fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=zwXegXs9J40eyCWi3w0c_iIBVJjXNn4VdVnQaT3KxDg,8770
137
+ fractal_server/migrations/versions/5bf02391cfef_v2.py,sha256=axhNkr_H6R4rRbY7oGYazNbFvPXeSyBDWFVbKNmiqs8,8433
134
138
  fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=Q-DsMzG3IcUV2Ol1dhJWosDvKERamBE6QvA2zzS5zpQ,1632
135
139
  fractal_server/migrations/versions/71eefd1dd202_add_slurm_accounts.py,sha256=mbWuCkTpRAdGbRhW7lhXs_e5S6O37UAcCN6JfoY5H8A,1353
136
- fractal_server/migrations/versions/80e12e1bc4fd_v2.py,sha256=WsgwzUVN2WNkaDaLawpYGwvGfoYmD0Vl3EZFdrIqXhg,8116
137
140
  fractal_server/migrations/versions/84bf0fffde30_add_dumps_to_applyworkflow.py,sha256=NSCuhANChsg76vBkShBl-9tQ4VEHubOjtAv1etHhlvY,2684
138
141
  fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py,sha256=6pgODDtyAxevZvAJBj9IJ41inhV1RpwbpZr_qfPPu1A,1115
139
142
  fractal_server/migrations/versions/97f444d47249_add_applyworkflow_project_dump.py,sha256=eKTZm3EgUgapXBxO0RuHkEfTKic-TZG3ADaMpGLuc0k,1057
@@ -155,12 +158,12 @@ fractal_server/tasks/v1/background_operations.py,sha256=T5L-ghgGEJIGcGoZB_r0cjH9
155
158
  fractal_server/tasks/v1/get_collection_data.py,sha256=bi9tuApLgoKZNMIG1kR4GoKI9S6Y040gFfNQapw4ikM,502
156
159
  fractal_server/tasks/v2/_TaskCollectPip.py,sha256=QeCqXDgOnMjk3diVlC5bgGEywyQjYFm5637Rke49vJY,3775
157
160
  fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
158
- fractal_server/tasks/v2/background_operations.py,sha256=zr6j3uoWmCeW2EA9auxWNZ0sG3SHgSxUVTC1OpQXE3Y,12803
161
+ fractal_server/tasks/v2/background_operations.py,sha256=fUukEA-zFjUDhxgI3oO_Bvy7FinaYFaydciASOIbL3w,12842
159
162
  fractal_server/tasks/v2/get_collection_data.py,sha256=Qhf2T_aaqAfqu9_KpUSlXsS7EJoZQbEPEreHHa2jco8,502
160
163
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
161
164
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
162
- fractal_server-2.0.0a9.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
163
- fractal_server-2.0.0a9.dist-info/METADATA,sha256=P8NzTlZ9SHoftxPyngm08c824qK_b9sdIXvfrxt_e5Y,4200
164
- fractal_server-2.0.0a9.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
165
- fractal_server-2.0.0a9.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
166
- fractal_server-2.0.0a9.dist-info/RECORD,,
165
+ fractal_server-2.0.0a11.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
166
+ fractal_server-2.0.0a11.dist-info/METADATA,sha256=vFS5XczcfUY_lJ0uxHE2LAm_9pwDShIGzl_jjC6Cwdo,4201
167
+ fractal_server-2.0.0a11.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
168
+ fractal_server-2.0.0a11.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
169
+ fractal_server-2.0.0a11.dist-info/RECORD,,