fractal-server 2.14.0a8__py3-none-any.whl → 2.14.0a10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/dataset.py +0 -10
  3. fractal_server/app/models/v2/job.py +3 -0
  4. fractal_server/app/routes/api/v2/__init__.py +2 -0
  5. fractal_server/app/routes/api/v2/history.py +14 -9
  6. fractal_server/app/routes/api/v2/images.py +5 -2
  7. fractal_server/app/routes/api/v2/submit.py +16 -14
  8. fractal_server/app/routes/api/v2/verify_image_types.py +64 -0
  9. fractal_server/app/routes/api/v2/workflow.py +27 -60
  10. fractal_server/app/runner/executors/slurm_ssh/_check_job_status_ssh.py +67 -0
  11. fractal_server/app/runner/executors/slurm_ssh/runner.py +711 -0
  12. fractal_server/app/runner/executors/slurm_sudo/runner.py +76 -30
  13. fractal_server/app/runner/v2/__init__.py +1 -0
  14. fractal_server/app/runner/v2/_local.py +2 -0
  15. fractal_server/app/runner/v2/_slurm_ssh.py +2 -0
  16. fractal_server/app/runner/v2/_slurm_sudo.py +2 -0
  17. fractal_server/app/runner/v2/runner.py +6 -8
  18. fractal_server/app/runner/v2/runner_functions.py +9 -4
  19. fractal_server/app/schemas/v2/dataset.py +4 -71
  20. fractal_server/app/schemas/v2/dumps.py +6 -5
  21. fractal_server/app/schemas/v2/job.py +6 -3
  22. fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
  23. fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
  24. {fractal_server-2.14.0a8.dist-info → fractal_server-2.14.0a10.dist-info}/METADATA +1 -1
  25. {fractal_server-2.14.0a8.dist-info → fractal_server-2.14.0a10.dist-info}/RECORD +29 -24
  26. /fractal_server/app/runner/executors/{slurm_sudo → slurm_common}/_check_jobs_status.py +0 -0
  27. {fractal_server-2.14.0a8.dist-info → fractal_server-2.14.0a10.dist-info}/LICENSE +0 -0
  28. {fractal_server-2.14.0a8.dist-info → fractal_server-2.14.0a10.dist-info}/WHEEL +0 -0
  29. {fractal_server-2.14.0a8.dist-info → fractal_server-2.14.0a10.dist-info}/entry_points.txt +0 -0
@@ -6,6 +6,7 @@ import shlex
6
6
  import subprocess # nosec
7
7
  import sys
8
8
  import time
9
+ from copy import copy
9
10
  from pathlib import Path
10
11
  from typing import Any
11
12
  from typing import Optional
@@ -14,7 +15,9 @@ import cloudpickle
14
15
  from pydantic import BaseModel
15
16
  from pydantic import ConfigDict
16
17
 
17
- from ._check_jobs_status import get_finished_jobs
18
+ from ..slurm_common._check_jobs_status import (
19
+ get_finished_jobs,
20
+ )
18
21
  from ._subprocess_run_as_user import _mkdir_as_user
19
22
  from ._subprocess_run_as_user import _run_command_as_user
20
23
  from fractal_server import __VERSION__
@@ -132,6 +135,14 @@ class SlurmJob(BaseModel):
132
135
  self.workdir_remote / f"slurm-{self.label}-submit.sh"
133
136
  ).as_posix()
134
137
 
138
+ @property
139
+ def slurm_stdout(self) -> str:
140
+ return (self.workdir_remote / f"slurm-{self.label}.out").as_posix()
141
+
142
+ @property
143
+ def slurm_stderr(self) -> str:
144
+ return (self.workdir_remote / f"slurm-{self.label}.err").as_posix()
145
+
135
146
  @property
136
147
  def log_files_local(self) -> list[str]:
137
148
  return [task.task_files.log_file_local for task in self.tasks]
@@ -276,9 +287,6 @@ class RunnerSlurmSudo(BaseRunner):
276
287
  slurm_job: SlurmJob,
277
288
  slurm_config: SlurmConfig,
278
289
  ) -> str:
279
- # if len(slurm_job.tasks) > 1:
280
- # raise NotImplementedError()
281
-
282
290
  # Prepare input pickle(s)
283
291
  versions = dict(
284
292
  python=sys.version_info[:3],
@@ -291,40 +299,76 @@ class RunnerSlurmSudo(BaseRunner):
291
299
  funcser = cloudpickle.dumps((versions, func, _args, _kwargs))
292
300
  with open(task.input_pickle_file_local, "wb") as f:
293
301
  f.write(funcser)
294
-
295
302
  # Prepare commands to be included in SLURM submission script
296
-
297
- preamble_lines = [
298
- "#!/bin/bash",
299
- "#SBATCH --partition=main",
300
- "#SBATCH --ntasks=1",
301
- "#SBATCH --cpus-per-task=1",
302
- "#SBATCH --mem=10M",
303
- f"#SBATCH --err={slurm_job.slurm_log_file_remote}",
304
- f"#SBATCH --out={slurm_job.slurm_log_file_remote}",
305
- f"#SBATCH -D {slurm_job.workdir_remote}",
306
- "#SBATCH --job-name=test",
307
- "\n",
308
- ]
309
-
303
+ settings = Inject(get_settings)
304
+ python_worker_interpreter = (
305
+ settings.FRACTAL_SLURM_WORKER_PYTHON or sys.executable
306
+ )
310
307
  cmdlines = []
311
308
  for task in slurm_job.tasks:
312
- cmd = (
313
- f"{self.python_worker_interpreter}"
314
- " -m fractal_server.app.runner.executors.slurm_common.remote "
315
- f"--input-file {task.input_pickle_file_local} "
316
- f"--output-file {task.output_pickle_file_remote}"
317
- )
318
- cmdlines.append("whoami")
309
+ input_pickle_file = task.input_pickle_file_local
310
+ output_pickle_file = task.output_pickle_file_remote
319
311
  cmdlines.append(
320
- f"srun --ntasks=1 --cpus-per-task=1 --mem=10MB {cmd} &"
312
+ (
313
+ f"{python_worker_interpreter}"
314
+ " -m fractal_server.app.runner."
315
+ "executors.slurm_common.remote "
316
+ f"--input-file {input_pickle_file} "
317
+ f"--output-file {output_pickle_file}"
318
+ )
321
319
  )
322
- cmdlines.append("wait\n")
320
+
321
+ # ...
322
+ num_tasks_max_running = slurm_config.parallel_tasks_per_job
323
+ mem_per_task_MB = slurm_config.mem_per_task_MB
324
+
325
+ # Set ntasks
326
+ ntasks = min(len(cmdlines), num_tasks_max_running)
327
+ slurm_config.parallel_tasks_per_job = ntasks
328
+
329
+ # Prepare SLURM preamble based on SlurmConfig object
330
+ script_lines = slurm_config.to_sbatch_preamble(
331
+ remote_export_dir=self.user_cache_dir
332
+ )
333
+
334
+ # Extend SLURM preamble with variable which are not in SlurmConfig, and
335
+ # fix their order
336
+ script_lines.extend(
337
+ [
338
+ f"#SBATCH --err={slurm_job.slurm_stderr}",
339
+ f"#SBATCH --out={slurm_job.slurm_stdout}",
340
+ f"#SBATCH -D {slurm_job.workdir_remote}",
341
+ ]
342
+ )
343
+ script_lines = slurm_config.sort_script_lines(script_lines)
344
+ logger.debug(script_lines)
345
+
346
+ # Always print output of `uname -n` and `pwd`
347
+ script_lines.append(
348
+ '"Hostname: `uname -n`; current directory: `pwd`"\n'
349
+ )
350
+
351
+ # Complete script preamble
352
+ script_lines.append("\n")
353
+
354
+ # Include command lines
355
+ tmp_list_commands = copy(cmdlines)
356
+ while tmp_list_commands:
357
+ if tmp_list_commands:
358
+ cmd = tmp_list_commands.pop(0) # take first element
359
+ script_lines.append(
360
+ "srun --ntasks=1 --cpus-per-task=$SLURM_CPUS_PER_TASK "
361
+ f"--mem={mem_per_task_MB}MB "
362
+ f"{cmd} &"
363
+ )
364
+ script_lines.append("wait\n")
365
+
366
+ script = "\n".join(script_lines)
323
367
 
324
368
  # Write submission script
325
- submission_script_contents = "\n".join(preamble_lines + cmdlines)
369
+ # submission_script_contents = "\n".join(preamble_lines + cmdlines)
326
370
  with open(slurm_job.slurm_submission_script_local, "w") as f:
327
- f.write(submission_script_contents)
371
+ f.write(script)
328
372
 
329
373
  # Run sbatch
330
374
  pre_command = f"sudo --set-home --non-interactive -u {self.slurm_user}"
@@ -468,6 +512,8 @@ class RunnerSlurmSudo(BaseRunner):
468
512
  )
469
513
  ],
470
514
  ) # TODO: replace with actual values (BASED ON TASKFILES)
515
+
516
+ slurm_config.parallel_tasks_per_job = 1
471
517
  self._submit_single_sbatch(
472
518
  func,
473
519
  slurm_job=slurm_job,
@@ -299,6 +299,7 @@ def submit_workflow(
299
299
  first_task_index=job.first_task_index,
300
300
  last_task_index=job.last_task_index,
301
301
  job_attribute_filters=job.attribute_filters,
302
+ job_type_filters=job.type_filters,
302
303
  **backend_specific_kwargs,
303
304
  )
304
305
 
@@ -20,6 +20,7 @@ def process_workflow(
20
20
  last_task_index: Optional[int] = None,
21
21
  logger_name: str,
22
22
  job_attribute_filters: AttributeFiltersType,
23
+ job_type_filters: dict[str, bool],
23
24
  user_id: int,
24
25
  **kwargs,
25
26
  ) -> None:
@@ -80,5 +81,6 @@ def process_workflow(
80
81
  logger_name=logger_name,
81
82
  submit_setup_call=_local_submit_setup,
82
83
  job_attribute_filters=job_attribute_filters,
84
+ job_type_filters=job_type_filters,
83
85
  user_id=user_id,
84
86
  )
@@ -43,6 +43,7 @@ def process_workflow(
43
43
  last_task_index: Optional[int] = None,
44
44
  logger_name: str,
45
45
  job_attribute_filters: AttributeFiltersType,
46
+ job_type_filters: dict[str, bool],
46
47
  fractal_ssh: FractalSSH,
47
48
  worker_init: Optional[str] = None,
48
49
  user_id: int,
@@ -92,5 +93,6 @@ def process_workflow(
92
93
  logger_name=logger_name,
93
94
  submit_setup_call=_slurm_submit_setup,
94
95
  job_attribute_filters=job_attribute_filters,
96
+ job_type_filters=job_type_filters,
95
97
  user_id=user_id,
96
98
  )
@@ -38,6 +38,7 @@ def process_workflow(
38
38
  last_task_index: Optional[int] = None,
39
39
  logger_name: str,
40
40
  job_attribute_filters: AttributeFiltersType,
41
+ job_type_filters: dict[str, bool],
41
42
  user_id: int,
42
43
  # Slurm-specific
43
44
  user_cache_dir: Optional[str] = None,
@@ -84,5 +85,6 @@ def process_workflow(
84
85
  logger_name=logger_name,
85
86
  submit_setup_call=_slurm_submit_setup,
86
87
  job_attribute_filters=job_attribute_filters,
88
+ job_type_filters=job_type_filters,
87
89
  user_id=user_id,
88
90
  )
@@ -42,6 +42,7 @@ def execute_tasks_v2(
42
42
  workflow_dir_remote: Optional[Path] = None,
43
43
  logger_name: Optional[str] = None,
44
44
  submit_setup_call: callable = no_op_submit_setup_call,
45
+ job_type_filters: dict[str, bool],
45
46
  job_attribute_filters: AttributeFiltersType,
46
47
  ) -> None:
47
48
  logger = logging.getLogger(logger_name)
@@ -56,7 +57,7 @@ def execute_tasks_v2(
56
57
  # Initialize local dataset attributes
57
58
  zarr_dir = dataset.zarr_dir
58
59
  tmp_images = deepcopy(dataset.images)
59
- current_dataset_type_filters = deepcopy(dataset.type_filters)
60
+ current_dataset_type_filters = copy(job_type_filters)
60
61
 
61
62
  for wftask in wf_task_list:
62
63
  task = wftask.task
@@ -124,7 +125,7 @@ def execute_tasks_v2(
124
125
  task=task,
125
126
  workflow_dir_local=workflow_dir_local,
126
127
  workflow_dir_remote=workflow_dir_remote,
127
- executor=runner,
128
+ runner=runner,
128
129
  submit_setup_call=submit_setup_call,
129
130
  history_run_id=history_run_id,
130
131
  dataset_id=dataset.id,
@@ -152,7 +153,7 @@ def execute_tasks_v2(
152
153
  task=task,
153
154
  workflow_dir_local=workflow_dir_local,
154
155
  workflow_dir_remote=workflow_dir_remote,
155
- executor=runner,
156
+ runner=runner,
156
157
  submit_setup_call=submit_setup_call,
157
158
  history_run_id=history_run_id,
158
159
  dataset_id=dataset.id,
@@ -333,13 +334,10 @@ def execute_tasks_v2(
333
334
  current_dataset_type_filters.update(type_filters_from_task_manifest)
334
335
 
335
336
  with next(get_sync_db()) as db:
336
- # Write current dataset attributes (history + filters) into the
337
- # database.
337
+ # Write current dataset images into the database.
338
338
  db_dataset = db.get(DatasetV2, dataset.id)
339
- db_dataset.type_filters = current_dataset_type_filters
340
339
  db_dataset.images = tmp_images
341
- for attribute_name in ["type_filters", "images"]:
342
- flag_modified(db_dataset, attribute_name)
340
+ flag_modified(db_dataset, "images")
343
341
  db.merge(db_dataset)
344
342
  db.commit()
345
343
  db.close() # FIXME: why is this needed?
@@ -33,6 +33,8 @@ __all__ = [
33
33
  "run_v2_task_converter_compound",
34
34
  ]
35
35
 
36
+ # FIXME: Review whether we need 5 functions or 3 are enough
37
+
36
38
  MAX_PARALLELIZATION_LIST_SIZE = 20_000
37
39
 
38
40
 
@@ -93,7 +95,7 @@ def run_v2_task_non_parallel(
93
95
  wftask: WorkflowTaskV2,
94
96
  workflow_dir_local: Path,
95
97
  workflow_dir_remote: Optional[Path] = None,
96
- executor: BaseRunner,
98
+ runner: BaseRunner,
97
99
  submit_setup_call: callable = no_op_submit_setup_call,
98
100
  dataset_id: int,
99
101
  history_run_id: int,
@@ -148,7 +150,7 @@ def run_v2_task_non_parallel(
148
150
  ],
149
151
  )
150
152
 
151
- result, exception = executor.submit(
153
+ result, exception = runner.submit(
152
154
  functools.partial(
153
155
  run_single_task,
154
156
  wftask=wftask,
@@ -274,7 +276,7 @@ def run_v2_task_parallel(
274
276
  images: list[dict[str, Any]],
275
277
  task: TaskV2,
276
278
  wftask: WorkflowTaskV2,
277
- executor: BaseRunner,
279
+ runner: BaseRunner,
278
280
  workflow_dir_local: Path,
279
281
  workflow_dir_remote: Optional[Path] = None,
280
282
  submit_setup_call: callable = no_op_submit_setup_call,
@@ -334,7 +336,7 @@ def run_v2_task_parallel(
334
336
  db=db, list_upsert_objects=history_image_caches
335
337
  )
336
338
 
337
- results, exceptions = executor.multisubmit(
339
+ results, exceptions = runner.multisubmit(
338
340
  functools.partial(
339
341
  run_single_task,
340
342
  wftask=wftask,
@@ -396,6 +398,9 @@ def run_v2_task_compound(
396
398
  dataset_id: int,
397
399
  history_run_id: int,
398
400
  ) -> tuple[TaskOutput, int, dict[int, BaseException]]:
401
+ # FIXME: Add task_files as a required argument, rather than a kwargs
402
+ # through executor_options_init
403
+
399
404
  executor_options_init = submit_setup_call(
400
405
  wftask=wftask,
401
406
  root_dir_local=workflow_dir_local,
@@ -1,5 +1,4 @@
1
1
  from datetime import datetime
2
- from typing import Any
3
2
  from typing import Optional
4
3
 
5
4
  from pydantic import BaseModel
@@ -10,8 +9,6 @@ from pydantic import field_validator
10
9
  from pydantic import model_validator
11
10
  from pydantic.types import AwareDatetime
12
11
 
13
- from .._filter_validators import validate_attribute_filters
14
- from .._filter_validators import validate_type_filters
15
12
  from .._validators import cant_set_none
16
13
  from .._validators import NonEmptyString
17
14
  from .._validators import root_validate_dict_keys
@@ -28,7 +25,6 @@ class DatasetCreateV2(BaseModel):
28
25
 
29
26
  zarr_dir: Optional[str] = None
30
27
 
31
- type_filters: dict[str, bool] = Field(default_factory=dict)
32
28
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
33
29
 
34
30
  # Validators
@@ -36,12 +32,6 @@ class DatasetCreateV2(BaseModel):
36
32
  _dict_keys = model_validator(mode="before")(
37
33
  classmethod(root_validate_dict_keys)
38
34
  )
39
- _type_filters = field_validator("type_filters")(
40
- classmethod(validate_type_filters)
41
- )
42
- _attribute_filters = field_validator("attribute_filters")(
43
- classmethod(validate_attribute_filters)
44
- )
45
35
 
46
36
  @field_validator("zarr_dir")
47
37
  @classmethod
@@ -61,8 +51,6 @@ class DatasetReadV2(BaseModel):
61
51
  timestamp_created: AwareDatetime
62
52
 
63
53
  zarr_dir: str
64
- type_filters: dict[str, bool]
65
- attribute_filters: AttributeFiltersType
66
54
 
67
55
  @field_serializer("timestamp_created")
68
56
  def serialize_datetime(v: datetime) -> str:
@@ -74,20 +62,12 @@ class DatasetUpdateV2(BaseModel):
74
62
 
75
63
  name: Optional[NonEmptyString] = None
76
64
  zarr_dir: Optional[str] = None
77
- type_filters: Optional[dict[str, bool]] = None
78
- attribute_filters: Optional[dict[str, list[Any]]] = None
79
65
 
80
66
  # Validators
81
67
 
82
68
  _dict_keys = model_validator(mode="before")(
83
69
  classmethod(root_validate_dict_keys)
84
70
  )
85
- _type_filters = field_validator("type_filters")(
86
- classmethod(validate_type_filters)
87
- )
88
- _attribute_filters = field_validator("attribute_filters")(
89
- classmethod(validate_attribute_filters)
90
- )
91
71
 
92
72
  @field_validator("name")
93
73
  @classmethod
@@ -106,63 +86,20 @@ class DatasetImportV2(BaseModel):
106
86
  """
107
87
  Class for `Dataset` import.
108
88
 
89
+ We are dropping `model_config = ConfigDict(extra="forbid")` so that any
90
+ kind of legacy filters can be included in the payload, and ignored in the
91
+ API.
92
+
109
93
  Attributes:
110
94
  name:
111
95
  zarr_dir:
112
96
  images:
113
- filters:
114
- type_filters:
115
- attribute_filters:
116
97
  """
117
98
 
118
- model_config = ConfigDict(extra="forbid")
119
-
120
99
  name: str
121
100
  zarr_dir: str
122
101
  images: list[SingleImage] = Field(default_factory=list)
123
102
 
124
- filters: Optional[dict[str, Any]] = None
125
- type_filters: dict[str, bool] = Field(default_factory=dict)
126
- attribute_filters: AttributeFiltersType = Field(default_factory=dict)
127
-
128
- @model_validator(mode="before")
129
- @classmethod
130
- def update_legacy_filters(cls, values: dict):
131
- """
132
- Transform legacy filters (created with fractal-server<2.11.0)
133
- into attribute/type filters
134
- """
135
- if values.get("filters") is not None:
136
- if (
137
- "type_filters" in values.keys()
138
- or "attribute_filters" in values.keys()
139
- ):
140
- raise ValueError(
141
- "Cannot set filters both through the legacy field "
142
- "('filters') and the new ones ('type_filters' and/or "
143
- "'attribute_filters')."
144
- )
145
-
146
- else:
147
- # Convert legacy filters.types into new type_filters
148
- values["type_filters"] = values["filters"].get("types", {})
149
- values["attribute_filters"] = {
150
- key: [value]
151
- for key, value in values["filters"]
152
- .get("attributes", {})
153
- .items()
154
- }
155
- values["filters"] = None
156
-
157
- return values
158
-
159
- _type_filters = field_validator("type_filters")(
160
- classmethod(validate_type_filters)
161
- )
162
- _attribute_filters = field_validator("attribute_filters")(
163
- classmethod(validate_attribute_filters)
164
- )
165
-
166
103
  @field_validator("zarr_dir")
167
104
  @classmethod
168
105
  def normalize_zarr_dir(cls, v: str) -> str:
@@ -177,12 +114,8 @@ class DatasetExportV2(BaseModel):
177
114
  name:
178
115
  zarr_dir:
179
116
  images:
180
- type_filters:
181
- attribute_filters:
182
117
  """
183
118
 
184
119
  name: str
185
120
  zarr_dir: str
186
121
  images: list[SingleImage]
187
- type_filters: dict[str, bool]
188
- attribute_filters: AttributeFiltersType
@@ -15,7 +15,6 @@ from pydantic import Field
15
15
 
16
16
  from .task import TaskTypeType
17
17
  from .task_group import TaskGroupV2OriginEnum
18
- from fractal_server.images.models import AttributeFiltersType
19
18
 
20
19
 
21
20
  class ProjectDumpV2(BaseModel):
@@ -65,15 +64,17 @@ class WorkflowDumpV2(BaseModel):
65
64
 
66
65
 
67
66
  class DatasetDumpV2(BaseModel):
68
- model_config = ConfigDict(extra="forbid")
67
+ """
68
+ We do not include 'model_config = ConfigDict(extra="forbid")' because
69
+ legacy data may include 'type_filters' or 'attribute_filters' and we
70
+ want to avoid response-validation errors.
71
+ """
72
+
69
73
  id: int
70
74
  name: str
71
75
  project_id: int
72
76
  timestamp_created: str
73
-
74
77
  zarr_dir: str
75
- type_filters: dict[str, bool]
76
- attribute_filters: AttributeFiltersType
77
78
 
78
79
 
79
80
  class TaskGroupDumpV2(BaseModel):
@@ -13,6 +13,7 @@ from pydantic.types import AwareDatetime
13
13
  from pydantic.types import StrictStr
14
14
 
15
15
  from .._filter_validators import validate_attribute_filters
16
+ from .._filter_validators import validate_type_filters
16
17
  from .._validators import cant_set_none
17
18
  from .._validators import NonEmptyString
18
19
  from .._validators import root_validate_dict_keys
@@ -44,7 +45,6 @@ class JobStatusTypeV2(str, Enum):
44
45
 
45
46
 
46
47
  class JobCreateV2(BaseModel):
47
-
48
48
  model_config = ConfigDict(extra="forbid")
49
49
 
50
50
  first_task_index: Optional[int] = None
@@ -53,6 +53,7 @@ class JobCreateV2(BaseModel):
53
53
  worker_init: Optional[NonEmptyString] = None
54
54
 
55
55
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
56
+ type_filters: dict[str, bool] = Field(default_factory=dict)
56
57
 
57
58
  # Validators
58
59
 
@@ -67,6 +68,9 @@ class JobCreateV2(BaseModel):
67
68
  _attribute_filters = field_validator("attribute_filters")(
68
69
  classmethod(validate_attribute_filters)
69
70
  )
71
+ _type_filters = field_validator("type_filters")(
72
+ classmethod(validate_type_filters)
73
+ )
70
74
 
71
75
  @field_validator("first_task_index")
72
76
  @classmethod
@@ -104,7 +108,6 @@ class JobCreateV2(BaseModel):
104
108
 
105
109
 
106
110
  class JobReadV2(BaseModel):
107
-
108
111
  id: int
109
112
  project_id: Optional[int] = None
110
113
  project_dump: ProjectDumpV2
@@ -124,6 +127,7 @@ class JobReadV2(BaseModel):
124
127
  last_task_index: Optional[int] = None
125
128
  worker_init: Optional[str] = None
126
129
  attribute_filters: AttributeFiltersType
130
+ type_filters: dict[str, bool]
127
131
 
128
132
  @field_serializer("start_timestamp")
129
133
  def serialize_datetime_start(v: datetime) -> str:
@@ -138,7 +142,6 @@ class JobReadV2(BaseModel):
138
142
 
139
143
 
140
144
  class JobUpdateV2(BaseModel):
141
-
142
145
  model_config = ConfigDict(extra="forbid")
143
146
 
144
147
  status: JobStatusTypeV2
@@ -0,0 +1,50 @@
1
+ """Drop dataset filters
2
+
3
+ Revision ID: 47351f8c7ebc
4
+ Revises: fbce16ff4e47
5
+ Create Date: 2025-03-26 11:10:17.869028
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "47351f8c7ebc"
14
+ down_revision = "fbce16ff4e47"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
22
+ batch_op.drop_column("type_filters")
23
+ batch_op.drop_column("attribute_filters")
24
+
25
+ # ### end Alembic commands ###
26
+
27
+
28
+ def downgrade() -> None:
29
+ # ### commands auto generated by Alembic - please adjust! ###
30
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
31
+ batch_op.add_column(
32
+ sa.Column(
33
+ "attribute_filters",
34
+ postgresql.JSON(astext_type=sa.Text()),
35
+ server_default=sa.text("'{}'::json"),
36
+ autoincrement=False,
37
+ nullable=False,
38
+ )
39
+ )
40
+ batch_op.add_column(
41
+ sa.Column(
42
+ "type_filters",
43
+ postgresql.JSON(astext_type=sa.Text()),
44
+ server_default=sa.text("'{}'::json"),
45
+ autoincrement=False,
46
+ nullable=False,
47
+ )
48
+ )
49
+
50
+ # ### end Alembic commands ###
@@ -0,0 +1,36 @@
1
+ """Add job.type_filters
2
+
3
+ Revision ID: e81103413827
4
+ Revises: 47351f8c7ebc
5
+ Create Date: 2025-03-26 11:10:41.748248
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "e81103413827"
14
+ down_revision = "47351f8c7ebc"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column(
24
+ "type_filters", sa.JSON(), server_default="{}", nullable=False
25
+ )
26
+ )
27
+
28
+ # ### end Alembic commands ###
29
+
30
+
31
+ def downgrade() -> None:
32
+ # ### commands auto generated by Alembic - please adjust! ###
33
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
34
+ batch_op.drop_column("type_filters")
35
+
36
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.0a8
3
+ Version: 2.14.0a10
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin