fractal-server 2.4.2__py3-none-any.whl → 2.5.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +4 -1
  3. fractal_server/app/models/v1/task.py +0 -5
  4. fractal_server/app/models/v2/workflowtask.py +2 -10
  5. fractal_server/app/routes/admin/v2.py +0 -30
  6. fractal_server/app/routes/api/v2/__init__.py +0 -4
  7. fractal_server/app/routes/api/v2/_aux_functions.py +11 -46
  8. fractal_server/app/routes/api/v2/workflow.py +23 -54
  9. fractal_server/app/routes/api/v2/workflowtask.py +9 -33
  10. fractal_server/app/routes/auth/_aux_auth.py +11 -5
  11. fractal_server/app/routes/auth/current_user.py +5 -1
  12. fractal_server/app/routes/auth/users.py +9 -8
  13. fractal_server/app/runner/v2/__init__.py +1 -4
  14. fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py +1 -4
  15. fractal_server/app/runner/v2/handle_failed_job.py +2 -9
  16. fractal_server/app/runner/v2/runner.py +42 -70
  17. fractal_server/app/runner/v2/runner_functions.py +0 -58
  18. fractal_server/app/runner/v2/runner_functions_low_level.py +7 -21
  19. fractal_server/app/schemas/user.py +17 -0
  20. fractal_server/app/schemas/v2/__init__.py +0 -1
  21. fractal_server/app/schemas/v2/dumps.py +2 -23
  22. fractal_server/app/schemas/v2/task.py +0 -5
  23. fractal_server/app/schemas/v2/workflowtask.py +4 -29
  24. fractal_server/migrations/env.py +4 -7
  25. fractal_server/migrations/naming_convention.py +7 -0
  26. fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py +1 -1
  27. fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py +97 -0
  28. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a1.dist-info}/METADATA +1 -1
  29. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a1.dist-info}/RECORD +32 -32
  30. fractal_server/app/routes/api/v2/task_legacy.py +0 -59
  31. fractal_server/app/runner/v2/v1_compat.py +0 -31
  32. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a1.dist-info}/LICENSE +0 -0
  33. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a1.dist-info}/WHEEL +0 -0
  34. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a1.dist-info}/entry_points.txt +0 -0
@@ -17,7 +17,6 @@ from ..filenames import FILTERS_FILENAME
17
17
  from ..filenames import HISTORY_FILENAME
18
18
  from ..filenames import IMAGES_FILENAME
19
19
  from .runner_functions import no_op_submit_setup_call
20
- from .runner_functions import run_v1_task_parallel
21
20
  from .runner_functions import run_v2_task_compound
22
21
  from .runner_functions import run_v2_task_non_parallel
23
22
  from .runner_functions import run_v2_task_parallel
@@ -53,16 +52,8 @@ def execute_tasks_v2(
53
52
 
54
53
  for wftask in wf_task_list:
55
54
  task = wftask.task
56
- task_legacy = wftask.task_legacy
57
- if wftask.is_legacy_task:
58
- task_name = task_legacy.name
59
- logger.debug(
60
- f"SUBMIT {wftask.order}-th task "
61
- f'(legacy, name="{task_name}")'
62
- )
63
- else:
64
- task_name = task.name
65
- logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
55
+ task_name = task.name
56
+ logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
66
57
 
67
58
  # PRE TASK EXECUTION
68
59
 
@@ -78,67 +69,53 @@ def execute_tasks_v2(
78
69
  filters=Filters(**pre_filters),
79
70
  )
80
71
  # Verify that filtered images comply with task input_types
81
- if not wftask.is_legacy_task:
82
- for image in filtered_images:
83
- if not match_filter(image, Filters(types=task.input_types)):
84
- raise JobExecutionError(
85
- "Invalid filtered image list\n"
86
- f"Task input types: {task.input_types=}\n"
87
- f'Image zarr_url: {image["zarr_url"]}\n'
88
- f'Image types: {image["types"]}\n'
89
- )
72
+ for image in filtered_images:
73
+ if not match_filter(image, Filters(types=task.input_types)):
74
+ raise JobExecutionError(
75
+ "Invalid filtered image list\n"
76
+ f"Task input types: {task.input_types=}\n"
77
+ f'Image zarr_url: {image["zarr_url"]}\n'
78
+ f'Image types: {image["types"]}\n'
79
+ )
90
80
 
91
81
  # TASK EXECUTION (V2)
92
- if not wftask.is_legacy_task:
93
- if task.type == "non_parallel":
94
- current_task_output = run_v2_task_non_parallel(
95
- images=filtered_images,
96
- zarr_dir=zarr_dir,
97
- wftask=wftask,
98
- task=task,
99
- workflow_dir_local=workflow_dir_local,
100
- workflow_dir_remote=workflow_dir_remote,
101
- executor=executor,
102
- logger_name=logger_name,
103
- submit_setup_call=submit_setup_call,
104
- )
105
- elif task.type == "parallel":
106
- current_task_output = run_v2_task_parallel(
107
- images=filtered_images,
108
- wftask=wftask,
109
- task=task,
110
- workflow_dir_local=workflow_dir_local,
111
- workflow_dir_remote=workflow_dir_remote,
112
- executor=executor,
113
- logger_name=logger_name,
114
- submit_setup_call=submit_setup_call,
115
- )
116
- elif task.type == "compound":
117
- current_task_output = run_v2_task_compound(
118
- images=filtered_images,
119
- zarr_dir=zarr_dir,
120
- wftask=wftask,
121
- task=task,
122
- workflow_dir_local=workflow_dir_local,
123
- workflow_dir_remote=workflow_dir_remote,
124
- executor=executor,
125
- logger_name=logger_name,
126
- submit_setup_call=submit_setup_call,
127
- )
128
- else:
129
- raise ValueError(f"Unexpected error: Invalid {task.type=}.")
130
- # TASK EXECUTION (V1)
131
- else:
132
- current_task_output = run_v1_task_parallel(
82
+ if task.type == "non_parallel":
83
+ current_task_output = run_v2_task_non_parallel(
133
84
  images=filtered_images,
85
+ zarr_dir=zarr_dir,
134
86
  wftask=wftask,
135
- task_legacy=task_legacy,
87
+ task=task,
88
+ workflow_dir_local=workflow_dir_local,
89
+ workflow_dir_remote=workflow_dir_remote,
136
90
  executor=executor,
137
91
  logger_name=logger_name,
92
+ submit_setup_call=submit_setup_call,
93
+ )
94
+ elif task.type == "parallel":
95
+ current_task_output = run_v2_task_parallel(
96
+ images=filtered_images,
97
+ wftask=wftask,
98
+ task=task,
99
+ workflow_dir_local=workflow_dir_local,
100
+ workflow_dir_remote=workflow_dir_remote,
101
+ executor=executor,
102
+ logger_name=logger_name,
103
+ submit_setup_call=submit_setup_call,
104
+ )
105
+ elif task.type == "compound":
106
+ current_task_output = run_v2_task_compound(
107
+ images=filtered_images,
108
+ zarr_dir=zarr_dir,
109
+ wftask=wftask,
110
+ task=task,
138
111
  workflow_dir_local=workflow_dir_local,
139
112
  workflow_dir_remote=workflow_dir_remote,
113
+ executor=executor,
114
+ logger_name=logger_name,
140
115
  submit_setup_call=submit_setup_call,
141
116
  )
117
+ else:
118
+ raise ValueError(f"Unexpected error: Invalid {task.type=}.")
142
119
 
143
120
  # POST TASK EXECUTION
144
121
 
@@ -191,8 +168,7 @@ def execute_tasks_v2(
191
168
  # Update image attributes/types with task output and manifest
192
169
  updated_attributes.update(image["attributes"])
193
170
  updated_types.update(image["types"])
194
- if not wftask.is_legacy_task:
195
- updated_types.update(task.output_types)
171
+ updated_types.update(task.output_types)
196
172
 
197
173
  # Unset attributes with None value
198
174
  updated_attributes = {
@@ -249,8 +225,7 @@ def execute_tasks_v2(
249
225
  if value is not None
250
226
  }
251
227
  updated_types.update(image["types"])
252
- if not wftask.is_legacy_task:
253
- updated_types.update(task.output_types)
228
+ updated_types.update(task.output_types)
254
229
  new_image = dict(
255
230
  zarr_url=image["zarr_url"],
256
231
  origin=image["origin"],
@@ -282,10 +257,7 @@ def execute_tasks_v2(
282
257
  )
283
258
 
284
259
  # Find manifest ouptut types
285
- if wftask.is_legacy_task:
286
- types_from_manifest = {}
287
- else:
288
- types_from_manifest = task.output_types
260
+ types_from_manifest = task.output_types
289
261
 
290
262
  # Find task-output types
291
263
  if current_task_output.filters is not None:
@@ -16,8 +16,6 @@ from .merge_outputs import merge_outputs
16
16
  from .runner_functions_low_level import run_single_task
17
17
  from .task_interface import InitTaskOutput
18
18
  from .task_interface import TaskOutput
19
- from .v1_compat import convert_v2_args_into_v1
20
- from fractal_server.app.models.v1 import Task as TaskV1
21
19
  from fractal_server.app.models.v2 import TaskV2
22
20
  from fractal_server.app.models.v2 import WorkflowTaskV2
23
21
  from fractal_server.app.runner.components import _COMPONENT_KEY_
@@ -28,7 +26,6 @@ __all__ = [
28
26
  "run_v2_task_non_parallel",
29
27
  "run_v2_task_parallel",
30
28
  "run_v2_task_compound",
31
- "run_v1_task_parallel",
32
29
  ]
33
30
 
34
31
  MAX_PARALLELIZATION_LIST_SIZE = 20_000
@@ -317,58 +314,3 @@ def run_v2_task_compound(
317
314
 
318
315
  merged_output = merge_outputs(outputs)
319
316
  return merged_output
320
-
321
-
322
- def run_v1_task_parallel(
323
- *,
324
- images: list[dict[str, Any]],
325
- task_legacy: TaskV1,
326
- wftask: WorkflowTaskV2,
327
- executor: Executor,
328
- workflow_dir_local: Path,
329
- workflow_dir_remote: Optional[Path] = None,
330
- logger_name: Optional[str] = None,
331
- submit_setup_call: Callable = no_op_submit_setup_call,
332
- ) -> TaskOutput:
333
-
334
- _check_parallelization_list_size(images)
335
-
336
- executor_options = _get_executor_options(
337
- wftask=wftask,
338
- workflow_dir_local=workflow_dir_local,
339
- workflow_dir_remote=workflow_dir_remote,
340
- submit_setup_call=submit_setup_call,
341
- which_type="parallel",
342
- )
343
-
344
- list_function_kwargs = []
345
- for ind, image in enumerate(images):
346
- list_function_kwargs.append(
347
- convert_v2_args_into_v1(
348
- kwargs_v2=dict(
349
- zarr_url=image["zarr_url"],
350
- **(wftask.args_parallel or {}),
351
- ),
352
- parallelization_level=task_legacy.parallelization_level,
353
- ),
354
- )
355
- list_function_kwargs[-1][_COMPONENT_KEY_] = _index_to_component(ind)
356
-
357
- results_iterator = executor.map(
358
- functools.partial(
359
- run_single_task,
360
- wftask=wftask,
361
- command=task_legacy.command,
362
- workflow_dir_local=workflow_dir_local,
363
- workflow_dir_remote=workflow_dir_remote,
364
- is_task_v1=True,
365
- ),
366
- list_function_kwargs,
367
- **executor_options,
368
- )
369
- # Explicitly iterate over the whole list, so that all futures are waited
370
- list(results_iterator)
371
-
372
- # Ignore any output metadata for V1 tasks, and return an empty object
373
- out = TaskOutput()
374
- return out
@@ -61,7 +61,6 @@ def run_single_task(
61
61
  workflow_dir_local: Path,
62
62
  workflow_dir_remote: Optional[Path] = None,
63
63
  logger_name: Optional[str] = None,
64
- is_task_v1: bool = False,
65
64
  ) -> dict[str, Any]:
66
65
  """
67
66
  Runs within an executor.
@@ -73,10 +72,7 @@ def run_single_task(
73
72
  if not workflow_dir_remote:
74
73
  workflow_dir_remote = workflow_dir_local
75
74
 
76
- if is_task_v1:
77
- task_name = wftask.task_legacy.name
78
- else:
79
- task_name = wftask.task.name
75
+ task_name = wftask.task.name
80
76
 
81
77
  component = args.pop(_COMPONENT_KEY_, None)
82
78
  task_files = get_task_file_paths(
@@ -92,18 +88,11 @@ def run_single_task(
92
88
  json.dump(args, f, indent=2)
93
89
 
94
90
  # Assemble full command
95
- if is_task_v1:
96
- full_command = (
97
- f"{command} "
98
- f"--json {task_files.args.as_posix()} "
99
- f"--metadata-out {task_files.metadiff.as_posix()}"
100
- )
101
- else:
102
- full_command = (
103
- f"{command} "
104
- f"--args-json {task_files.args.as_posix()} "
105
- f"--out-json {task_files.metadiff.as_posix()}"
106
- )
91
+ full_command = (
92
+ f"{command} "
93
+ f"--args-json {task_files.args.as_posix()} "
94
+ f"--out-json {task_files.metadiff.as_posix()}"
95
+ )
107
96
 
108
97
  try:
109
98
  _call_command_wrapper(
@@ -113,10 +102,7 @@ def run_single_task(
113
102
  except TaskExecutionError as e:
114
103
  e.workflow_task_order = wftask.order
115
104
  e.workflow_task_id = wftask.id
116
- if wftask.is_legacy_task:
117
- e.task_name = wftask.task_legacy.name
118
- else:
119
- e.task_name = wftask.task.name
105
+ e.task_name = wftask.task.name
120
106
  raise e
121
107
 
122
108
  try:
@@ -20,6 +20,22 @@ __all__ = (
20
20
  )
21
21
 
22
22
 
23
+ class OAuthAccountRead(BaseModel):
24
+ """
25
+ Schema for storing essential `OAuthAccount` information within
26
+ `UserRead.oauth_accounts`.
27
+
28
+ Attributes:
29
+ id: ID of the row in fractal-owned `oauthaccount` table.
30
+ account_email: Email associated to OAuth account
31
+ oauth_name: Name of the OAuth provider (e.g. `github`)
32
+ """
33
+
34
+ id: int
35
+ account_email: str
36
+ oauth_name: str
37
+
38
+
23
39
  class UserRead(schemas.BaseUser[int]):
24
40
  """
25
41
  Schema for `User` read from database.
@@ -37,6 +53,7 @@ class UserRead(schemas.BaseUser[int]):
37
53
  slurm_accounts: list[str]
38
54
  group_names: Optional[list[str]] = None
39
55
  group_ids: Optional[list[int]] = None
56
+ oauth_accounts: list[OAuthAccountRead]
40
57
 
41
58
 
42
59
  class UserUpdate(schemas.BaseUserUpdate):
@@ -20,7 +20,6 @@ from .project import ProjectUpdateV2 # noqa F401
20
20
  from .task import TaskCreateV2 # noqa F401
21
21
  from .task import TaskExportV2 # noqa F401
22
22
  from .task import TaskImportV2 # noqa F401
23
- from .task import TaskLegacyReadV2 # noqa F401
24
23
  from .task import TaskReadV2 # noqa F401
25
24
  from .task import TaskUpdateV2 # noqa F401
26
25
  from .task_collection import CollectionStateReadV2 # noqa F401
@@ -12,9 +12,7 @@ from typing import Optional
12
12
 
13
13
  from pydantic import BaseModel
14
14
  from pydantic import Extra
15
- from pydantic import root_validator
16
15
 
17
- from fractal_server.app.schemas.v1.dumps import TaskDumpV1
18
16
  from fractal_server.images import Filters
19
17
 
20
18
 
@@ -45,29 +43,10 @@ class WorkflowTaskDumpV2(BaseModel):
45
43
  workflow_id: int
46
44
  order: Optional[int]
47
45
 
48
- is_legacy_task: bool
49
-
50
46
  input_filters: Filters
51
47
 
52
- task_id: Optional[int]
53
- task: Optional[TaskDumpV2]
54
- task_legacy_id: Optional[int]
55
- task_legacy: Optional[TaskDumpV1]
56
-
57
- # Validators
58
- @root_validator
59
- def task_v1_or_v2(cls, values):
60
- v1 = values.get("task_legacy_id")
61
- v2 = values.get("task_id")
62
- if ((v1 is not None) and (v2 is not None)) or (
63
- (v1 is None) and (v2 is None)
64
- ):
65
- message = "both" if (v1 and v2) else "none"
66
- raise ValueError(
67
- "One and only one must be provided between "
68
- f"'task_legacy_id' and 'task_id' (you provided {message})"
69
- )
70
- return values
48
+ task_id: int
49
+ task: TaskDumpV2
71
50
 
72
51
 
73
52
  class WorkflowDumpV2(BaseModel, extra=Extra.forbid):
@@ -11,7 +11,6 @@ from pydantic import validator
11
11
 
12
12
  from .._validators import valdictkeys
13
13
  from .._validators import valstr
14
- from ..v1.task import TaskReadV1
15
14
 
16
15
 
17
16
  class TaskCreateV2(BaseModel, extra=Extra.forbid):
@@ -101,10 +100,6 @@ class TaskReadV2(BaseModel):
101
100
  output_types: dict[str, bool]
102
101
 
103
102
 
104
- class TaskLegacyReadV2(TaskReadV1):
105
- is_v2_compatible: bool
106
-
107
-
108
103
  class TaskUpdateV2(BaseModel):
109
104
 
110
105
  name: Optional[str]
@@ -5,16 +5,12 @@ from typing import Optional
5
5
  from pydantic import BaseModel
6
6
  from pydantic import Extra
7
7
  from pydantic import Field
8
- from pydantic import root_validator
9
8
  from pydantic import validator
10
9
 
11
10
  from .._validators import valdictkeys
12
11
  from .._validators import valint
13
- from ..v1.task import TaskExportV1
14
- from ..v1.task import TaskImportV1
15
12
  from .task import TaskExportV2
16
13
  from .task import TaskImportV2
17
- from .task import TaskLegacyReadV2
18
14
  from .task import TaskReadV2
19
15
  from fractal_server.images import Filters
20
16
 
@@ -49,8 +45,6 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
49
45
  order: Optional[int]
50
46
  input_filters: Filters = Field(default_factory=Filters)
51
47
 
52
- is_legacy_task: bool = False
53
-
54
48
  # Validators
55
49
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
56
50
  valdictkeys("meta_non_parallel")
@@ -88,18 +82,6 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
88
82
  )
89
83
  return value
90
84
 
91
- @root_validator
92
- def validate_legacy_task(cls, values):
93
- if values["is_legacy_task"] and (
94
- values.get("meta_non_parallel") is not None
95
- or values.get("args_non_parallel") is not None
96
- ):
97
- raise ValueError(
98
- "If Task is legacy, 'args_non_parallel' and 'meta_non_parallel"
99
- "must be None"
100
- )
101
- return values
102
-
103
85
 
104
86
  class WorkflowTaskReadV2(BaseModel):
105
87
 
@@ -115,12 +97,9 @@ class WorkflowTaskReadV2(BaseModel):
115
97
 
116
98
  input_filters: Filters
117
99
 
118
- is_legacy_task: bool
119
100
  task_type: str
120
- task_id: Optional[int]
121
- task: Optional[TaskReadV2]
122
- task_legacy_id: Optional[int]
123
- task_legacy: Optional[TaskLegacyReadV2]
101
+ task_id: int
102
+ task: TaskReadV2
124
103
 
125
104
 
126
105
  class WorkflowTaskUpdateV2(BaseModel):
@@ -177,9 +156,7 @@ class WorkflowTaskImportV2(BaseModel):
177
156
 
178
157
  input_filters: Optional[Filters] = None
179
158
 
180
- is_legacy_task: bool = False
181
- task: Optional[TaskImportV2] = None
182
- task_legacy: Optional[TaskImportV1] = None
159
+ task: TaskImportV2
183
160
 
184
161
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
185
162
  valdictkeys("meta_non_parallel")
@@ -203,6 +180,4 @@ class WorkflowTaskExportV2(BaseModel):
203
180
  args_parallel: Optional[dict[str, Any]] = None
204
181
  input_filters: Filters = Field(default_factory=Filters)
205
182
 
206
- is_legacy_task: bool = False
207
- task: Optional[TaskExportV2]
208
- task_legacy: Optional[TaskExportV1]
183
+ task: TaskExportV2
@@ -7,6 +7,7 @@ from sqlmodel import SQLModel
7
7
 
8
8
  from fractal_server.app import models # noqa
9
9
  from fractal_server.config import get_settings
10
+ from fractal_server.migrations.naming_convention import NAMING_CONVENTION
10
11
  from fractal_server.syringe import Inject
11
12
 
12
13
  # this is the Alembic Config object, which provides
@@ -25,13 +26,7 @@ if config.config_file_name is not None:
25
26
  # from myapp import mymodel
26
27
  # target_metadata = mymodel.Base.metadata
27
28
  target_metadata = SQLModel.metadata
28
- target_metadata.naming_convention = {
29
- "ix": "ix_%(column_0_label)s",
30
- "uq": "uq_%(table_name)s_%(column_0_name)s",
31
- "ck": "ck_%(table_name)s_`%(constraint_name)s`",
32
- "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
33
- "pk": "pk_%(table_name)s",
34
- }
29
+ target_metadata.naming_convention = NAMING_CONVENTION
35
30
 
36
31
  # other values from the config, defined by the needs of env.py,
37
32
  # can be acquired:
@@ -58,6 +53,7 @@ def run_migrations_offline() -> None:
58
53
  target_metadata=target_metadata,
59
54
  literal_binds=True,
60
55
  dialect_opts={"paramstyle": "named"},
56
+ render_as_batch=True,
61
57
  )
62
58
 
63
59
  with context.begin_transaction():
@@ -68,6 +64,7 @@ def do_run_migrations(connection: Connection) -> None:
68
64
  context.configure(
69
65
  connection=connection,
70
66
  target_metadata=target_metadata,
67
+ render_as_batch=True,
71
68
  )
72
69
 
73
70
  with context.begin_transaction():
@@ -0,0 +1,7 @@
1
+ NAMING_CONVENTION = {
2
+ "ix": "ix_%(column_0_label)s",
3
+ "uq": "uq_%(table_name)s_%(column_0_name)s",
4
+ "ck": "ck_%(table_name)s_`%(constraint_name)s`",
5
+ "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
6
+ "pk": "pk_%(table_name)s",
7
+ }
@@ -1,4 +1,4 @@
1
- """Add_usergroup_and_linkusergroup_table
1
+ """Add_usergroup_and_linkusergroup_table
2
2
 
3
3
  Revision ID: 091b01f51f88
4
4
  Revises: 5bf02391cfef
@@ -0,0 +1,97 @@
1
+ """Remove link between v1 and v2 tasks/workflowtasks tables
2
+
3
+ Revision ID: d9a140db5d42
4
+ Revises: 5bf02391cfef
5
+ Create Date: 2024-09-09 14:15:34.415926
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+ from fractal_server.migrations.naming_convention import NAMING_CONVENTION
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "d9a140db5d42"
15
+ down_revision = "091b01f51f88"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+
22
+ with op.batch_alter_table("workflowtaskv2") as batch_op:
23
+ batch_op.alter_column(
24
+ "task_id", existing_type=sa.INTEGER(), nullable=False
25
+ )
26
+
27
+ # NOTE: in sqlite, this `drop_constraint` only works if
28
+ # `batch_alter_table` has a `naming_convention` set. Ref
29
+ # https://alembic.sqlalchemy.org/en/latest/batch.html#dropping-unnamed-or-named-foreign-key-constraints
30
+ with op.batch_alter_table(
31
+ "workflowtaskv2", naming_convention=NAMING_CONVENTION
32
+ ) as batch_op:
33
+ batch_op.drop_constraint(
34
+ "fk_workflowtaskv2_task_legacy_id_task", type_="foreignkey"
35
+ )
36
+
37
+ # NOTE: in sqlite, the `drop_index` command fails if the existing table
38
+ # has zero rows, while it succeeds if there are already some rows
39
+ if op.get_bind().dialect.name == "sqlite":
40
+ import sqlite3
41
+ import logging
42
+
43
+ logger = logging.getLogger("alembic.runtime.migration")
44
+ logger.warning(
45
+ f"Using sqlite, with {sqlite3.version=} and "
46
+ f"{sqlite3.sqlite_version=}"
47
+ )
48
+ logger.warning("Now drop index 'idx_workflowtaskv2_task_legacy_id'")
49
+ try:
50
+ with op.batch_alter_table("workflowtaskv2") as batch_op:
51
+ batch_op.drop_index("idx_workflowtaskv2_task_legacy_id")
52
+ except sa.exc.OperationalError:
53
+ logger.warning(
54
+ "Could not drop index; "
55
+ "this is expected, when the database is empty."
56
+ )
57
+ logger.warning("Continue.")
58
+
59
+ with op.batch_alter_table(
60
+ "workflowtaskv2", schema=None, naming_convention=NAMING_CONVENTION
61
+ ) as batch_op:
62
+ batch_op.drop_column("is_legacy_task")
63
+ batch_op.drop_column("task_legacy_id")
64
+
65
+ with op.batch_alter_table("task") as batch_op:
66
+ batch_op.drop_column("is_v2_compatible")
67
+
68
+
69
+ def downgrade() -> None:
70
+ # ### commands auto generated by Alembic - please adjust! ###
71
+ with op.batch_alter_table("task", schema=None) as batch_op:
72
+ batch_op.add_column(
73
+ sa.Column(
74
+ "is_v2_compatible",
75
+ sa.BOOLEAN(),
76
+ server_default=sa.text("(false)"),
77
+ nullable=False,
78
+ )
79
+ )
80
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
81
+ batch_op.add_column(
82
+ sa.Column("task_legacy_id", sa.INTEGER(), nullable=True)
83
+ )
84
+ batch_op.add_column(
85
+ sa.Column("is_legacy_task", sa.BOOLEAN(), nullable=False)
86
+ )
87
+ batch_op.create_foreign_key(
88
+ "fk_workflowtaskv2_task_legacy_id_task",
89
+ "task",
90
+ ["task_legacy_id"],
91
+ ["id"],
92
+ )
93
+ batch_op.alter_column(
94
+ "task_id", existing_type=sa.INTEGER(), nullable=True
95
+ )
96
+
97
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.4.2
3
+ Version: 2.5.0a1
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause