fractal-server 2.14.0a7__py3-none-any.whl → 2.14.0a9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,16 +14,12 @@ from ....db import get_async_db
14
14
  from ....models.v2 import JobV2
15
15
  from ....models.v2 import ProjectV2
16
16
  from ....models.v2 import WorkflowV2
17
- from ....runner.set_start_and_last_task_index import (
18
- set_start_and_last_task_index,
19
- )
20
17
  from ....schemas.v2 import WorkflowCreateV2
21
18
  from ....schemas.v2 import WorkflowExportV2
22
19
  from ....schemas.v2 import WorkflowReadV2
23
20
  from ....schemas.v2 import WorkflowReadV2WithWarnings
24
21
  from ....schemas.v2 import WorkflowUpdateV2
25
22
  from ._aux_functions import _check_workflow_exists
26
- from ._aux_functions import _get_dataset_check_owner
27
23
  from ._aux_functions import _get_project_check_owner
28
24
  from ._aux_functions import _get_submitted_jobs_statement
29
25
  from ._aux_functions import _get_workflow_check_owner
@@ -293,27 +289,21 @@ async def get_user_workflows(
293
289
  return workflow_list
294
290
 
295
291
 
296
- class TypeFiltersFlow(BaseModel):
297
- dataset_filters: list[dict[str, bool]]
298
- input_filters: list[dict[str, bool]]
299
- output_filters: list[dict[str, bool]]
292
+ class WorkflowTaskTypeFiltersInfo(BaseModel):
293
+ current_type_filters: dict[str, bool]
294
+ input_type_filters: dict[str, bool]
295
+ output_type_filters: dict[str, bool]
300
296
 
301
297
 
302
- @router.get(
303
- "/project/{project_id}/workflow/{workflow_id}/type-filters-flow/",
304
- response_model=TypeFiltersFlow,
305
- )
298
+ @router.get("/project/{project_id}/workflow/{workflow_id}/type-filters-flow/")
306
299
  async def get_workflow_type_filters(
307
300
  project_id: int,
308
301
  workflow_id: int,
309
- dataset_id: Optional[int] = None,
310
- first_task_index: Optional[int] = None,
311
- last_task_index: Optional[int] = None,
312
302
  user: UserOAuth = Depends(current_active_user),
313
303
  db: AsyncSession = Depends(get_async_db),
314
- ) -> Optional[WorkflowReadV2WithWarnings]:
304
+ ) -> dict[str, WorkflowTaskTypeFiltersInfo]:
315
305
  """
316
- Get info on an existing workflow
306
+ Get info on type/type-filters flow for a workflow.
317
307
  """
318
308
 
319
309
  workflow = await _get_workflow_check_owner(
@@ -323,59 +313,32 @@ async def get_workflow_type_filters(
323
313
  db=db,
324
314
  )
325
315
 
326
- if len(workflow.task_list) == 0:
316
+ num_tasks = len(workflow.task_list)
317
+ if num_tasks == 0:
327
318
  raise HTTPException(
328
319
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
329
320
  detail="Workflow has no tasks.",
330
321
  )
331
322
 
332
- if dataset_id is None:
333
- dataset_type_filters = {}
334
- else:
335
- res = await _get_dataset_check_owner(
336
- project_id=project_id,
337
- dataset_id=dataset_id,
338
- user_id=user.id,
339
- db=db,
340
- )
341
- dataset = res["dataset"]
342
- dataset_type_filters = dataset.type_filters
343
-
344
- num_tasks = len(workflow.task_list)
345
- try:
346
- first_task_index, last_task_index = set_start_and_last_task_index(
347
- num_tasks,
348
- first_task_index=first_task_index,
349
- last_task_index=last_task_index,
350
- )
351
- except ValueError as e:
352
- raise HTTPException(
353
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
354
- detail=f"Invalid first/last task index.\nOriginal error: {str(e)}",
355
- )
323
+ current_type_filters = {}
356
324
 
357
- list_dataset_filters = [copy(dataset_type_filters)]
358
- list_filters_in = []
359
- list_filters_out = []
360
- for wftask in workflow.task_list[first_task_index : last_task_index + 1]:
325
+ response = {}
326
+ for wftask in workflow.task_list:
361
327
 
362
- input_type_filters = copy(dataset_type_filters)
363
- patch = merge_type_filters(
328
+ # Compute input_type_filters, based on wftask and task manifest
329
+ input_type_filters = merge_type_filters(
364
330
  wftask_type_filters=wftask.type_filters,
365
331
  task_input_types=wftask.task.input_types,
366
332
  )
367
- input_type_filters.update(patch)
368
- list_filters_in.append(copy(input_type_filters))
369
333
 
370
- output_type_filters = wftask.task.output_types
371
- list_filters_out.append(output_type_filters)
334
+ # Append current item to response list
335
+ response[str(wftask.id)] = dict(
336
+ current_type_filters=copy(current_type_filters),
337
+ input_type_filters=copy(input_type_filters),
338
+ output_type_filters=copy(wftask.task.output_types),
339
+ )
372
340
 
373
- dataset_type_filters.update(wftask.task.output_types)
374
- list_dataset_filters.append(copy(dataset_type_filters))
341
+ # Update `current_type_filters`
342
+ current_type_filters.update(wftask.task.output_types)
375
343
 
376
- response_body = dict(
377
- dataset_filters=list_dataset_filters,
378
- input_filters=list_filters_in,
379
- output_filters=list_filters_out,
380
- )
381
- return response_body
344
+ return response
@@ -9,7 +9,6 @@ from concurrent.futures import InvalidStateError
9
9
  from copy import copy
10
10
  from pathlib import Path
11
11
  from typing import Any
12
- from typing import Callable
13
12
  from typing import Optional
14
13
  from typing import Sequence
15
14
 
@@ -216,7 +215,7 @@ class FractalSlurmSSHExecutor(Executor):
216
215
 
217
216
  def submit(
218
217
  self,
219
- fun: Callable[..., Any],
218
+ fun: callable,
220
219
  *fun_args: Sequence[Any],
221
220
  slurm_config: SlurmConfig,
222
221
  task_files: TaskFiles,
@@ -278,7 +277,7 @@ class FractalSlurmSSHExecutor(Executor):
278
277
 
279
278
  def map(
280
279
  self,
281
- fn: Callable[..., Any],
280
+ fn: callable,
282
281
  iterable: list[Sequence[Any]],
283
282
  *,
284
283
  slurm_config: SlurmConfig,
@@ -446,7 +445,7 @@ class FractalSlurmSSHExecutor(Executor):
446
445
 
447
446
  def _prepare_job(
448
447
  self,
449
- fun: Callable[..., Any],
448
+ fun: callable,
450
449
  slurm_file_prefix: str,
451
450
  task_files: TaskFiles,
452
451
  slurm_config: SlurmConfig,
@@ -2,7 +2,6 @@ import logging
2
2
  from copy import copy
3
3
  from copy import deepcopy
4
4
  from pathlib import Path
5
- from typing import Callable
6
5
  from typing import Optional
7
6
 
8
7
  from sqlalchemy.orm.attributes import flag_modified
@@ -20,13 +19,15 @@ from .runner_functions import run_v2_task_non_parallel
20
19
  from .runner_functions import run_v2_task_parallel
21
20
  from .task_interface import TaskOutput
22
21
  from fractal_server.app.db import get_sync_db
23
- from fractal_server.app.history.status_enum import XXXStatus
24
22
  from fractal_server.app.models.v2 import AccountingRecord
25
23
  from fractal_server.app.models.v2 import DatasetV2
26
24
  from fractal_server.app.models.v2 import HistoryRun
27
25
  from fractal_server.app.models.v2 import TaskGroupV2
28
26
  from fractal_server.app.models.v2 import WorkflowTaskV2
29
27
  from fractal_server.app.runner.executors.base_runner import BaseRunner
28
+ from fractal_server.app.schemas.v2 import HistoryUnitStatus
29
+ from fractal_server.app.schemas.v2 import TaskDumpV2
30
+ from fractal_server.app.schemas.v2 import TaskGroupDumpV2
30
31
  from fractal_server.images.models import AttributeFiltersType
31
32
  from fractal_server.images.tools import merge_type_filters
32
33
 
@@ -40,7 +41,7 @@ def execute_tasks_v2(
40
41
  workflow_dir_local: Path,
41
42
  workflow_dir_remote: Optional[Path] = None,
42
43
  logger_name: Optional[str] = None,
43
- submit_setup_call: Callable = no_op_submit_setup_call,
44
+ submit_setup_call: callable = no_op_submit_setup_call,
44
45
  job_attribute_filters: AttributeFiltersType,
45
46
  ) -> None:
46
47
  logger = logging.getLogger(logger_name)
@@ -86,28 +87,24 @@ def execute_tasks_v2(
86
87
  else:
87
88
  num_available_images = 0
88
89
 
89
- # Create history item
90
90
  with next(get_sync_db()) as db:
91
+ # Create dumps for workflowtask and taskgroup
91
92
  workflowtask_dump = dict(
92
93
  **wftask.model_dump(exclude={"task"}),
93
- task=wftask.task.model_dump(),
94
+ task=TaskDumpV2(**wftask.task.model_dump()).model_dump(),
94
95
  )
95
-
96
- # Exclude timestamps since they'd need to be serialized properly
97
96
  task_group = db.get(TaskGroupV2, wftask.task.taskgroupv2_id)
98
- task_group_dump = task_group.model_dump(
99
- exclude={
100
- "timestamp_created",
101
- "timestamp_last_used",
102
- }
103
- )
97
+ task_group_dump = TaskGroupDumpV2(
98
+ **task_group.model_dump()
99
+ ).model_dump()
100
+ # Create HistoryRun
104
101
  history_run = HistoryRun(
105
102
  dataset_id=dataset.id,
106
103
  workflowtask_id=wftask.id,
107
104
  workflowtask_dump=workflowtask_dump,
108
105
  task_group_dump=task_group_dump,
109
106
  num_available_images=num_available_images,
110
- status=XXXStatus.SUBMITTED,
107
+ status=HistoryUnitStatus.SUBMITTED,
111
108
  )
112
109
  db.add(history_run)
113
110
  db.commit()
@@ -361,14 +358,14 @@ def execute_tasks_v2(
361
358
  db.execute(
362
359
  update(HistoryRun)
363
360
  .where(HistoryRun.id == history_run_id)
364
- .values(status=XXXStatus.DONE)
361
+ .values(status=HistoryUnitStatus.DONE)
365
362
  )
366
363
  db.commit()
367
364
  else:
368
365
  db.execute(
369
366
  update(HistoryRun)
370
367
  .where(HistoryRun.id == history_run_id)
371
- .values(status=XXXStatus.FAILED)
368
+ .values(status=HistoryUnitStatus.FAILED)
372
369
  )
373
370
  db.commit()
374
371
  logger.error(
@@ -15,7 +15,6 @@ from .runner_functions_low_level import run_single_task
15
15
  from .task_interface import InitTaskOutput
16
16
  from .task_interface import TaskOutput
17
17
  from fractal_server.app.db import get_sync_db
18
- from fractal_server.app.history.status_enum import XXXStatus
19
18
  from fractal_server.app.models.v2 import HistoryUnit
20
19
  from fractal_server.app.models.v2 import TaskV2
21
20
  from fractal_server.app.models.v2 import WorkflowTaskV2
@@ -23,12 +22,15 @@ from fractal_server.app.runner.components import _COMPONENT_KEY_
23
22
  from fractal_server.app.runner.components import _index_to_component
24
23
  from fractal_server.app.runner.executors.base_runner import BaseRunner
25
24
  from fractal_server.app.runner.v2._db_tools import bulk_upsert_image_cache_fast
25
+ from fractal_server.app.schemas.v2 import HistoryUnitStatus
26
26
 
27
27
 
28
28
  __all__ = [
29
- "run_v2_task_non_parallel",
30
29
  "run_v2_task_parallel",
30
+ "run_v2_task_non_parallel",
31
31
  "run_v2_task_compound",
32
+ "run_v2_task_converter_non_parallel",
33
+ "run_v2_task_converter_compound",
32
34
  ]
33
35
 
34
36
  MAX_PARALLELIZATION_LIST_SIZE = 20_000
@@ -125,7 +127,7 @@ def run_v2_task_non_parallel(
125
127
  with next(get_sync_db()) as db:
126
128
  history_unit = HistoryUnit(
127
129
  history_run_id=history_run_id,
128
- status=XXXStatus.SUBMITTED,
130
+ status=HistoryUnitStatus.SUBMITTED,
129
131
  logfile=None, # FIXME
130
132
  zarr_urls=function_kwargs["zarr_urls"],
131
133
  )
@@ -165,7 +167,7 @@ def run_v2_task_non_parallel(
165
167
  db.execute(
166
168
  update(HistoryUnit)
167
169
  .where(HistoryUnit.id == history_unit_id)
168
- .values(status=XXXStatus.DONE)
170
+ .values(status=HistoryUnitStatus.DONE)
169
171
  )
170
172
  db.commit()
171
173
  if result is None:
@@ -176,7 +178,7 @@ def run_v2_task_non_parallel(
176
178
  db.execute(
177
179
  update(HistoryUnit)
178
180
  .where(HistoryUnit.id == history_unit_id)
179
- .values(status=XXXStatus.FAILED)
181
+ .values(status=HistoryUnitStatus.FAILED)
180
182
  )
181
183
  db.commit()
182
184
  return (TaskOutput(), num_tasks, {0: exception})
@@ -222,7 +224,7 @@ def run_v2_task_converter_non_parallel(
222
224
  with next(get_sync_db()) as db:
223
225
  history_unit = HistoryUnit(
224
226
  history_run_id=history_run_id,
225
- status=XXXStatus.SUBMITTED,
227
+ status=HistoryUnitStatus.SUBMITTED,
226
228
  logfile=None, # FIXME
227
229
  zarr_urls=[],
228
230
  )
@@ -250,7 +252,7 @@ def run_v2_task_converter_non_parallel(
250
252
  db.execute(
251
253
  update(HistoryUnit)
252
254
  .where(HistoryUnit.id == history_unit_id)
253
- .values(status=XXXStatus.DONE)
255
+ .values(status=HistoryUnitStatus.DONE)
254
256
  )
255
257
  db.commit()
256
258
  if result is None:
@@ -261,7 +263,7 @@ def run_v2_task_converter_non_parallel(
261
263
  db.execute(
262
264
  update(HistoryUnit)
263
265
  .where(HistoryUnit.id == history_unit_id)
264
- .values(status=XXXStatus.FAILED)
266
+ .values(status=HistoryUnitStatus.FAILED)
265
267
  )
266
268
  db.commit()
267
269
  return (TaskOutput(), num_tasks, {0: exception})
@@ -303,7 +305,7 @@ def run_v2_task_parallel(
303
305
  history_units = [
304
306
  HistoryUnit(
305
307
  history_run_id=history_run_id,
306
- status=XXXStatus.SUBMITTED,
308
+ status=HistoryUnitStatus.SUBMITTED,
307
309
  logfile=None, # FIXME
308
310
  zarr_urls=[image["zarr_url"]],
309
311
  )
@@ -367,12 +369,12 @@ def run_v2_task_parallel(
367
369
  db.execute(
368
370
  update(HistoryUnit)
369
371
  .where(HistoryUnit.id.in_(history_unit_ids_done))
370
- .values(status=XXXStatus.DONE)
372
+ .values(status=HistoryUnitStatus.DONE)
371
373
  )
372
374
  db.execute(
373
375
  update(HistoryUnit)
374
376
  .where(HistoryUnit.id.in_(history_unit_ids_failed))
375
- .values(status=XXXStatus.FAILED)
377
+ .values(status=HistoryUnitStatus.FAILED)
376
378
  )
377
379
  db.commit()
378
380
 
@@ -421,7 +423,7 @@ def run_v2_task_compound(
421
423
  # Create a single `HistoryUnit` for the whole compound task
422
424
  history_unit = HistoryUnit(
423
425
  history_run_id=history_run_id,
424
- status=XXXStatus.SUBMITTED,
426
+ status=HistoryUnitStatus.SUBMITTED,
425
427
  logfile=None, # FIXME
426
428
  zarr_urls=input_image_zarr_urls,
427
429
  )
@@ -467,7 +469,7 @@ def run_v2_task_compound(
467
469
  db.execute(
468
470
  update(HistoryUnit)
469
471
  .where(HistoryUnit.id == history_unit_id)
470
- .values(status=XXXStatus.FAILED)
472
+ .values(status=HistoryUnitStatus.FAILED)
471
473
  )
472
474
  db.commit()
473
475
  return (TaskOutput(), num_tasks, {0: exception})
@@ -485,7 +487,7 @@ def run_v2_task_compound(
485
487
  db.execute(
486
488
  update(HistoryUnit)
487
489
  .where(HistoryUnit.id == history_unit_id)
488
- .values(status=XXXStatus.DONE)
490
+ .values(status=HistoryUnitStatus.DONE)
489
491
  )
490
492
  db.commit()
491
493
  return (TaskOutput(), 0, {})
@@ -535,13 +537,13 @@ def run_v2_task_compound(
535
537
  db.execute(
536
538
  update(HistoryUnit)
537
539
  .where(HistoryUnit.id == history_unit_id)
538
- .values(status=XXXStatus.FAILED)
540
+ .values(status=HistoryUnitStatus.FAILED)
539
541
  )
540
542
  else:
541
543
  db.execute(
542
544
  update(HistoryUnit)
543
545
  .where(HistoryUnit.id == history_unit_id)
544
- .values(status=XXXStatus.DONE)
546
+ .values(status=HistoryUnitStatus.DONE)
545
547
  )
546
548
  db.commit()
547
549
 
@@ -586,7 +588,7 @@ def run_v2_task_converter_compound(
586
588
  # Create a single `HistoryUnit` for the whole compound task
587
589
  history_unit = HistoryUnit(
588
590
  history_run_id=history_run_id,
589
- status=XXXStatus.SUBMITTED,
591
+ status=HistoryUnitStatus.SUBMITTED,
590
592
  logfile=None, # FIXME
591
593
  zarr_urls=[],
592
594
  )
@@ -619,7 +621,7 @@ def run_v2_task_converter_compound(
619
621
  db.execute(
620
622
  update(HistoryUnit)
621
623
  .where(HistoryUnit.id == history_unit_id)
622
- .values(status=XXXStatus.FAILED)
624
+ .values(status=HistoryUnitStatus.FAILED)
623
625
  )
624
626
  db.commit()
625
627
  return (TaskOutput(), num_tasks, {0: exception})
@@ -637,7 +639,7 @@ def run_v2_task_converter_compound(
637
639
  db.execute(
638
640
  update(HistoryUnit)
639
641
  .where(HistoryUnit.id == history_unit_id)
640
- .values(status=XXXStatus.DONE)
642
+ .values(status=HistoryUnitStatus.DONE)
641
643
  )
642
644
  db.commit()
643
645
  return (TaskOutput(), 0, {})
@@ -687,13 +689,13 @@ def run_v2_task_converter_compound(
687
689
  db.execute(
688
690
  update(HistoryUnit)
689
691
  .where(HistoryUnit.id == history_unit_id)
690
- .values(status=XXXStatus.FAILED)
692
+ .values(status=HistoryUnitStatus.FAILED)
691
693
  )
692
694
  else:
693
695
  db.execute(
694
696
  update(HistoryUnit)
695
697
  .where(HistoryUnit.id == history_unit_id)
696
- .values(status=XXXStatus.DONE)
698
+ .values(status=HistoryUnitStatus.DONE)
697
699
  )
698
700
  db.commit()
699
701
 
@@ -7,8 +7,14 @@ from .dataset import DatasetUpdateV2 # noqa F401
7
7
  from .dumps import DatasetDumpV2 # noqa F401
8
8
  from .dumps import ProjectDumpV2 # noqa F401
9
9
  from .dumps import TaskDumpV2 # noqa F401
10
+ from .dumps import TaskGroupDumpV2 # noqa F401
10
11
  from .dumps import WorkflowDumpV2 # noqa F401
11
12
  from .dumps import WorkflowTaskDumpV2 # noqa F401
13
+ from .history import HistoryRunReadAggregated # noqa F401
14
+ from .history import HistoryUnitRead # noqa F401
15
+ from .history import HistoryUnitStatus # noqa F401
16
+ from .history import ImageLogsRequest # noqa F401
17
+ from .history import ZarrUrlAndStatus # noqa F401
12
18
  from .job import JobCreateV2 # noqa F401
13
19
  from .job import JobReadV2 # noqa F401
14
20
  from .job import JobStatusTypeV2 # noqa F401
@@ -18,6 +24,7 @@ from .manifest import TaskManifestV2 # noqa F401
18
24
  from .project import ProjectCreateV2 # noqa F401
19
25
  from .project import ProjectReadV2 # noqa F401
20
26
  from .project import ProjectUpdateV2 # noqa F401
27
+ from .status_legacy import WorkflowTaskStatusTypeV2 # noqa F401
21
28
  from .task import TaskCreateV2 # noqa F401
22
29
  from .task import TaskExportV2 # noqa F401
23
30
  from .task import TaskImportV2 # noqa F401
@@ -47,5 +54,4 @@ from .workflowtask import WorkflowTaskImportV2 # noqa F401
47
54
  from .workflowtask import WorkflowTaskReadV2 # noqa F401
48
55
  from .workflowtask import WorkflowTaskReadV2WithWarning # noqa F401
49
56
  from .workflowtask import WorkflowTaskReplaceV2 # noqa F401
50
- from .workflowtask import WorkflowTaskStatusTypeV2 # noqa F401
51
57
  from .workflowtask import WorkflowTaskUpdateV2 # noqa F401
@@ -1,18 +1,20 @@
1
1
  """
2
-
3
2
  Dump models differ from their Read counterpart in that:
4
3
  * They are directly JSON-able, without any additional encoder.
5
- * They may only include a subset of the Read attributes.
4
+ * They may include only a subset of the available fields.
6
5
 
7
6
  These models are used in at least two situations:
8
7
  1. In the "*_dump" attributes of Job models;
9
- 2. In the `_DatasetHistoryItem.workflowtask` model, to trim its size.
8
+ 2. In the history items, to trim their size.
10
9
  """
11
10
  from typing import Optional
12
11
 
13
12
  from pydantic import BaseModel
14
13
  from pydantic import ConfigDict
14
+ from pydantic import Field
15
15
 
16
+ from .task import TaskTypeType
17
+ from .task_group import TaskGroupV2OriginEnum
16
18
  from fractal_server.images.models import AttributeFiltersType
17
19
 
18
20
 
@@ -26,7 +28,7 @@ class ProjectDumpV2(BaseModel):
26
28
  class TaskDumpV2(BaseModel):
27
29
  id: int
28
30
  name: str
29
- type: str
31
+ type: TaskTypeType
30
32
 
31
33
  command_non_parallel: Optional[str] = None
32
34
  command_parallel: Optional[str] = None
@@ -72,3 +74,17 @@ class DatasetDumpV2(BaseModel):
72
74
  zarr_dir: str
73
75
  type_filters: dict[str, bool]
74
76
  attribute_filters: AttributeFiltersType
77
+
78
+
79
+ class TaskGroupDumpV2(BaseModel):
80
+ id: int
81
+ origin: TaskGroupV2OriginEnum
82
+ pkg_name: str
83
+ version: Optional[str] = None
84
+ python_version: Optional[str] = None
85
+ pip_extras: Optional[str] = None
86
+ pinned_package_versions: dict[str, str] = Field(default_factory=dict)
87
+
88
+ path: Optional[str] = None
89
+ venv_path: Optional[str] = None
90
+ wheel_path: Optional[str] = None
@@ -0,0 +1,54 @@
1
+ from datetime import datetime
2
+ from enum import Enum
3
+ from typing import Any
4
+ from typing import Optional
5
+
6
+ from pydantic import AwareDatetime
7
+ from pydantic import BaseModel
8
+ from pydantic import field_serializer
9
+
10
+
11
+ class HistoryUnitStatus(str, Enum):
12
+ """
13
+ Available status for images
14
+
15
+ Attributes:
16
+ SUBMITTED:
17
+ DONE:
18
+ FAILED:
19
+ """
20
+
21
+ SUBMITTED = "submitted"
22
+ DONE = "done"
23
+ FAILED = "failed"
24
+
25
+
26
+ class HistoryUnitRead(BaseModel):
27
+ id: int
28
+ logfile: Optional[str] = None
29
+ status: HistoryUnitStatus
30
+ zarr_urls: list[str]
31
+
32
+
33
+ class HistoryRunReadAggregated(BaseModel):
34
+ id: int
35
+ timestamp_started: AwareDatetime
36
+ workflowtask_dump: dict[str, Any]
37
+ num_submitted_units: int
38
+ num_done_units: int
39
+ num_failed_units: int
40
+
41
+ @field_serializer("timestamp_started")
42
+ def serialize_datetime(v: datetime) -> str:
43
+ return v.isoformat()
44
+
45
+
46
+ class ImageLogsRequest(BaseModel):
47
+ workflowtask_id: int
48
+ dataset_id: int
49
+ zarr_url: str
50
+
51
+
52
+ class ZarrUrlAndStatus(BaseModel):
53
+ zarr_url: str
54
+ status: Optional[HistoryUnitStatus] = None
@@ -0,0 +1,35 @@
1
+ from enum import Enum
2
+
3
+ from pydantic import BaseModel
4
+ from pydantic import Field
5
+
6
+
7
+ class WorkflowTaskStatusTypeV2(str, Enum):
8
+ """
9
+ Define the available values for the status of a `WorkflowTask`.
10
+
11
+ This model is used within the `Dataset.history` attribute, which is
12
+ constructed in the runner and then used in the API (e.g. in the
13
+ `api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
14
+
15
+ Attributes:
16
+ SUBMITTED: The `WorkflowTask` is part of a running job.
17
+ DONE: The most-recent execution of this `WorkflowTask` was successful.
18
+ FAILED: The most-recent execution of this `WorkflowTask` failed.
19
+ """
20
+
21
+ SUBMITTED = "submitted"
22
+ DONE = "done"
23
+ FAILED = "failed"
24
+
25
+
26
+ class LegacyStatusReadV2(BaseModel):
27
+ """
28
+ Response type for the
29
+ `/project/{project_id}/status/` endpoint
30
+ """
31
+
32
+ status: dict[
33
+ str,
34
+ WorkflowTaskStatusTypeV2,
35
+ ] = Field(default_factory=dict)
@@ -135,7 +135,7 @@ class TaskCreateV2(BaseModel):
135
135
  class TaskReadV2(BaseModel):
136
136
  id: int
137
137
  name: str
138
- type: Literal["parallel", "non_parallel", "compound"]
138
+ type: TaskTypeType
139
139
  source: Optional[str] = None
140
140
  version: Optional[str] = None
141
141
 
@@ -158,8 +158,6 @@ class TaskReadV2(BaseModel):
158
158
  authors: Optional[str] = None
159
159
  tags: list[str]
160
160
 
161
- type: Optional[TaskTypeType] = None
162
-
163
161
 
164
162
  class TaskUpdateV2(BaseModel):
165
163
  model_config = ConfigDict(extra="forbid")
@@ -1,4 +1,3 @@
1
- from enum import Enum
2
1
  from typing import Any
3
2
  from typing import Optional
4
3
  from typing import Union
@@ -21,25 +20,6 @@ from .task import TaskTypeType
21
20
  RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
22
21
 
23
22
 
24
- class WorkflowTaskStatusTypeV2(str, Enum):
25
- """
26
- Define the available values for the status of a `WorkflowTask`.
27
-
28
- This model is used within the `Dataset.history` attribute, which is
29
- constructed in the runner and then used in the API (e.g. in the
30
- `api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
31
-
32
- Attributes:
33
- SUBMITTED: The `WorkflowTask` is part of a running job.
34
- DONE: The most-recent execution of this `WorkflowTask` was successful.
35
- FAILED: The most-recent execution of this `WorkflowTask` failed.
36
- """
37
-
38
- SUBMITTED = "submitted"
39
- DONE = "done"
40
- FAILED = "failed"
41
-
42
-
43
23
  class WorkflowTaskCreateV2(BaseModel):
44
24
  model_config = ConfigDict(extra="forbid")
45
25
 
@@ -1,8 +1,7 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.0a7
3
+ Version: 2.14.0a9
4
4
  Summary: Backend component of the Fractal analytics platform
5
- Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
5
  License: BSD-3-Clause
7
6
  Author: Tommaso Comparin
8
7
  Author-email: tommaso.comparin@exact-lab.it
@@ -29,6 +28,7 @@ Requires-Dist: sqlmodel (==0.0.22)
29
28
  Requires-Dist: uvicorn (>=0.29.0,<0.35.0)
30
29
  Requires-Dist: uvicorn-worker (==0.3.0)
31
30
  Project-URL: Documentation, https://fractal-analytics-platform.github.io/fractal-server
31
+ Project-URL: Homepage, https://github.com/fractal-analytics-platform/fractal-server
32
32
  Project-URL: Repository, https://github.com/fractal-analytics-platform/fractal-server
33
33
  Project-URL: changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
34
34
  Description-Content-Type: text/markdown