fractal-server 2.7.0a4__py3-none-any.whl → 2.7.0a6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/routes/admin/v2/task.py +0 -5
  3. fractal_server/app/routes/api/v1/task_collection.py +2 -2
  4. fractal_server/app/routes/api/v2/__init__.py +4 -0
  5. fractal_server/app/routes/api/v2/_aux_functions.py +1 -7
  6. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +72 -11
  7. fractal_server/app/routes/api/v2/task_collection.py +49 -39
  8. fractal_server/app/routes/api/v2/task_group.py +10 -6
  9. fractal_server/app/routes/api/v2/workflow.py +14 -82
  10. fractal_server/app/routes/api/v2/workflow_import.py +355 -0
  11. fractal_server/app/routes/api/v2/workflowtask.py +0 -1
  12. fractal_server/app/routes/auth/group.py +27 -0
  13. fractal_server/app/runner/v2/__init__.py +13 -7
  14. fractal_server/app/schemas/v2/__init__.py +1 -0
  15. fractal_server/app/schemas/v2/dumps.py +0 -1
  16. fractal_server/app/schemas/v2/manifest.py +13 -0
  17. fractal_server/app/schemas/v2/task.py +20 -10
  18. fractal_server/app/schemas/v2/workflowtask.py +3 -4
  19. fractal_server/data_migrations/2_7_0.py +62 -3
  20. fractal_server/tasks/utils.py +19 -5
  21. fractal_server/tasks/v1/background_operations.py +3 -3
  22. fractal_server/tasks/v1/get_collection_data.py +2 -2
  23. fractal_server/tasks/v2/background_operations.py +4 -4
  24. fractal_server/tasks/v2/endpoint_operations.py +95 -15
  25. {fractal_server-2.7.0a4.dist-info → fractal_server-2.7.0a6.dist-info}/METADATA +1 -1
  26. {fractal_server-2.7.0a4.dist-info → fractal_server-2.7.0a6.dist-info}/RECORD +29 -28
  27. {fractal_server-2.7.0a4.dist-info → fractal_server-2.7.0a6.dist-info}/LICENSE +0 -0
  28. {fractal_server-2.7.0a4.dist-info → fractal_server-2.7.0a6.dist-info}/WHEEL +0 -0
  29. {fractal_server-2.7.0a4.dist-info → fractal_server-2.7.0a6.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,355 @@
1
+ from typing import Optional
2
+
3
+ from fastapi import APIRouter
4
+ from fastapi import Depends
5
+ from fastapi import HTTPException
6
+ from fastapi import status
7
+ from sqlmodel import or_
8
+ from sqlmodel import select
9
+
10
+ from ....db import AsyncSession
11
+ from ....db import get_async_db
12
+ from ....models.v2 import TaskV2
13
+ from ....models.v2 import WorkflowV2
14
+ from ....schemas.v2 import TaskImportV2Legacy
15
+ from ....schemas.v2 import WorkflowImportV2
16
+ from ....schemas.v2 import WorkflowReadV2WithWarnings
17
+ from ....schemas.v2 import WorkflowTaskCreateV2
18
+ from ._aux_functions import _check_workflow_exists
19
+ from ._aux_functions import _get_project_check_owner
20
+ from ._aux_functions import _workflow_insert_task
21
+ from ._aux_functions_tasks import _add_warnings_to_workflow_tasks
22
+ from fractal_server.app.models import LinkUserGroup
23
+ from fractal_server.app.models import UserOAuth
24
+ from fractal_server.app.models.v2.task import TaskGroupV2
25
+ from fractal_server.app.routes.auth import current_active_user
26
+ from fractal_server.app.routes.auth._aux_auth import _get_default_usergroup_id
27
+ from fractal_server.app.schemas.v2.task import TaskImportV2
28
+ from fractal_server.logger import set_logger
29
+
30
+ router = APIRouter()
31
+
32
+
33
+ logger = set_logger(__name__)
34
+
35
+
36
+ async def _get_user_accessible_taskgroups(
37
+ *,
38
+ user_id: int,
39
+ db: AsyncSession,
40
+ ) -> list[TaskGroupV2]:
41
+ """
42
+ Retrieve list of task groups that the user has access to.
43
+ """
44
+ stm = select(TaskGroupV2).where(
45
+ or_(
46
+ TaskGroupV2.user_id == user_id,
47
+ TaskGroupV2.user_group_id.in_(
48
+ select(LinkUserGroup.group_id).where(
49
+ LinkUserGroup.user_id == user_id
50
+ )
51
+ ),
52
+ )
53
+ )
54
+ res = await db.execute(stm)
55
+ accessible_task_groups = res.scalars().all()
56
+ logger.info(
57
+ f"Found {len(accessible_task_groups)} accessible "
58
+ f"task groups for {user_id=}."
59
+ )
60
+ return accessible_task_groups
61
+
62
+
63
+ async def _get_task_by_source(
64
+ source: str,
65
+ task_groups_list: list[TaskGroupV2],
66
+ ) -> Optional[int]:
67
+ """
68
+ Find task with a given source.
69
+
70
+ Args:
71
+ task_import: Info on task to be imported.
72
+ user_id: ID of current user.
73
+ default_group_id: ID of default user group.
74
+ task_group_list: Current list of valid task groups.
75
+ db: Asynchronous db session
76
+
77
+ Return:
78
+ `id` of the matching task, or `None`.
79
+ """
80
+ task_id = next(
81
+ iter(
82
+ task.id
83
+ for task_group in task_groups_list
84
+ for task in task_group.task_list
85
+ if task.source == source
86
+ ),
87
+ None,
88
+ )
89
+ return task_id
90
+
91
+
92
+ async def _disambiguate_task_groups(
93
+ *,
94
+ matching_task_groups: list[TaskGroupV2],
95
+ user_id: int,
96
+ db: AsyncSession,
97
+ default_group_id: int,
98
+ ) -> Optional[TaskV2]:
99
+ """
100
+ Disambiguate task groups based on ownership information.
101
+ """
102
+ # Highest priority: task groups created by user
103
+ for task_group in matching_task_groups:
104
+ if task_group.user_id == user_id:
105
+ logger.info(
106
+ "[_disambiguate_task_groups] "
107
+ f"Found task group {task_group.id} with {user_id=}, return."
108
+ )
109
+ return task_group
110
+ logger.info(
111
+ "[_disambiguate_task_groups] "
112
+ f"No task group found with {user_id=}, continue."
113
+ )
114
+
115
+ # Medium priority: task groups owned by default user group
116
+ for task_group in matching_task_groups:
117
+ if task_group.user_group_id == default_group_id:
118
+ logger.info(
119
+ "[_disambiguate_task_groups] "
120
+ f"Found task group {task_group.id} with user_group_id="
121
+ f"{default_group_id}, return."
122
+ )
123
+ return task_group
124
+ logger.info(
125
+ "[_disambiguate_task_groups] "
126
+ "No task group found with user_group_id="
127
+ f"{default_group_id}, continue."
128
+ )
129
+
130
+ # Lowest priority: task groups owned by other groups, sorted
131
+ # according to age of the user/usergroup link
132
+ logger.info(
133
+ "[_disambiguate_task_groups] "
134
+ "Now sorting remaining task groups by oldest-user-link."
135
+ )
136
+ user_group_ids = [
137
+ task_group.user_group_id for task_group in matching_task_groups
138
+ ]
139
+ stm = (
140
+ select(LinkUserGroup.group_id)
141
+ .where(LinkUserGroup.user_id == user_id)
142
+ .where(LinkUserGroup.group_id.in_(user_group_ids))
143
+ .order_by(LinkUserGroup.timestamp_created.asc())
144
+ )
145
+ res = await db.execute(stm)
146
+ oldest_user_group_id = res.scalars().first()
147
+ logger.info(
148
+ "[_disambiguate_task_groups] "
149
+ f"Result of sorting: {oldest_user_group_id=}."
150
+ )
151
+ task_group = next(
152
+ iter(
153
+ task_group
154
+ for task_group in matching_task_groups
155
+ if task_group.user_group_id == oldest_user_group_id
156
+ ),
157
+ None,
158
+ )
159
+ return task_group
160
+
161
+
162
+ async def _get_task_by_taskimport(
163
+ *,
164
+ task_import: TaskImportV2,
165
+ task_groups_list: list[TaskGroupV2],
166
+ user_id: int,
167
+ default_group_id: int,
168
+ db: AsyncSession,
169
+ ) -> Optional[int]:
170
+ """
171
+ Find a task based on `task_import`.
172
+
173
+ Args:
174
+ task_import: Info on task to be imported.
175
+ user_id: ID of current user.
176
+ default_group_id: ID of default user group.
177
+ task_group_list: Current list of valid task groups.
178
+ db: Asynchronous db session
179
+
180
+ Return:
181
+ `id` of the matching task, or `None`.
182
+ """
183
+
184
+ logger.info(f"[_get_task_by_taskimport] START, {task_import=}")
185
+
186
+ # Filter by `pkg_name` and by presence of a task with given `name`.
187
+ matching_task_groups = [
188
+ task_group
189
+ for task_group in task_groups_list
190
+ if (
191
+ task_group.pkg_name == task_import.pkg_name
192
+ and task_import.name
193
+ in [task.name for task in task_group.task_list]
194
+ )
195
+ ]
196
+ if len(matching_task_groups) < 1:
197
+ logger.info(
198
+ "[_get_task_by_taskimport] "
199
+ f"No task group with {task_import.pkg_name=} "
200
+ f"and a task with {task_import.name=}."
201
+ )
202
+ return None
203
+
204
+ # Determine target `version`
205
+ # Note that task_import.version cannot be "", due to a validator
206
+ if task_import.version is None:
207
+ logger.info(
208
+ "[_get_task_by_taskimport] "
209
+ "No version requested, looking for latest."
210
+ )
211
+ latest_task = max(
212
+ matching_task_groups, key=lambda tg: tg.version or ""
213
+ )
214
+ version = latest_task.version
215
+ logger.info(
216
+ f"[_get_task_by_taskimport] Latest version set to {version}."
217
+ )
218
+ else:
219
+ version = task_import.version
220
+
221
+ # Filter task groups by version
222
+ final_matching_task_groups = list(
223
+ filter(lambda tg: tg.version == version, task_groups_list)
224
+ )
225
+
226
+ if len(final_matching_task_groups) < 1:
227
+ logger.info(
228
+ "[_get_task_by_taskimport] "
229
+ "No task group left after filtering by version."
230
+ )
231
+ return None
232
+ elif len(final_matching_task_groups) == 1:
233
+ final_task_group = final_matching_task_groups[0]
234
+ logger.info(
235
+ "[_get_task_by_taskimport] "
236
+ "Found a single task group, after filtering by version."
237
+ )
238
+ else:
239
+ logger.info(
240
+ "[_get_task_by_taskimport] "
241
+ "Found many task groups, after filtering by version."
242
+ )
243
+ final_task_group = await _disambiguate_task_groups(
244
+ matching_task_groups, user_id, db, default_group_id
245
+ )
246
+ if final_task_group is None:
247
+ logger.info(
248
+ "[_get_task_by_taskimport] Disambiguation returned None."
249
+ )
250
+ return None
251
+
252
+ # Find task with given name
253
+ task_id = next(
254
+ iter(
255
+ task.id
256
+ for task in final_task_group.task_list
257
+ if task.name == task_import.name
258
+ ),
259
+ None,
260
+ )
261
+
262
+ logger.info(f"[_get_task_by_taskimport] END, {task_import=}, {task_id=}.")
263
+
264
+ return task_id
265
+
266
+
267
+ @router.post(
268
+ "/project/{project_id}/workflow/import/",
269
+ response_model=WorkflowReadV2WithWarnings,
270
+ status_code=status.HTTP_201_CREATED,
271
+ )
272
+ async def import_workflow(
273
+ project_id: int,
274
+ workflow_import: WorkflowImportV2,
275
+ user: UserOAuth = Depends(current_active_user),
276
+ db: AsyncSession = Depends(get_async_db),
277
+ ) -> WorkflowReadV2WithWarnings:
278
+ """
279
+ Import an existing workflow into a project and create required objects.
280
+ """
281
+
282
+ # Preliminary checks
283
+ await _get_project_check_owner(
284
+ project_id=project_id,
285
+ user_id=user.id,
286
+ db=db,
287
+ )
288
+ await _check_workflow_exists(
289
+ name=workflow_import.name,
290
+ project_id=project_id,
291
+ db=db,
292
+ )
293
+
294
+ task_group_list = await _get_user_accessible_taskgroups(
295
+ user_id=user.id,
296
+ db=db,
297
+ )
298
+ default_group_id = await _get_default_usergroup_id(db)
299
+
300
+ list_wf_tasks = []
301
+ for wf_task in workflow_import.task_list:
302
+ task_import = wf_task.task
303
+ if isinstance(task_import, TaskImportV2Legacy):
304
+ task_id = await _get_task_by_source(
305
+ source=task_import.source,
306
+ task_groups_list=task_group_list,
307
+ )
308
+ else:
309
+ task_id = await _get_task_by_taskimport(
310
+ task_import=task_import,
311
+ user_id=user.id,
312
+ default_group_id=default_group_id,
313
+ task_groups_list=task_group_list,
314
+ db=db,
315
+ )
316
+ if task_id is None:
317
+ raise HTTPException(
318
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
319
+ detail=f"Could not find a task matching with {wf_task.task}.",
320
+ )
321
+ new_wf_task = WorkflowTaskCreateV2(
322
+ **wf_task.dict(exclude_none=True, exclude={"task"})
323
+ )
324
+ list_wf_tasks.append(new_wf_task)
325
+
326
+ # Create new Workflow
327
+ db_workflow = WorkflowV2(
328
+ project_id=project_id,
329
+ **workflow_import.dict(exclude_none=True, exclude={"task_list"}),
330
+ )
331
+ db.add(db_workflow)
332
+ await db.commit()
333
+ await db.refresh(db_workflow)
334
+
335
+ # Insert task into the workflow
336
+ for new_wf_task in list_wf_tasks:
337
+ await _workflow_insert_task(
338
+ **new_wf_task.dict(),
339
+ workflow_id=db_workflow.id,
340
+ task_id=task_id,
341
+ db=db,
342
+ )
343
+
344
+ # Add warnings for non-active tasks (or non-accessible tasks,
345
+ # although that should never happen)
346
+ wftask_list_with_warnings = await _add_warnings_to_workflow_tasks(
347
+ wftask_list=db_workflow.task_list, user_id=user.id, db=db
348
+ )
349
+ workflow_data = dict(
350
+ **db_workflow.model_dump(),
351
+ project=db_workflow.project,
352
+ task_list=wftask_list_with_warnings,
353
+ )
354
+
355
+ return workflow_data
@@ -77,7 +77,6 @@ async def create_workflowtask(
77
77
  workflow_task = await _workflow_insert_task(
78
78
  workflow_id=workflow.id,
79
79
  task_id=task_id,
80
- order=new_task.order,
81
80
  meta_non_parallel=new_task.meta_non_parallel,
82
81
  meta_parallel=new_task.meta_parallel,
83
82
  args_non_parallel=new_task.args_non_parallel,
@@ -19,10 +19,12 @@ from fractal_server.app.db import get_async_db
19
19
  from fractal_server.app.models import LinkUserGroup
20
20
  from fractal_server.app.models import UserGroup
21
21
  from fractal_server.app.models import UserOAuth
22
+ from fractal_server.app.models import UserSettings
22
23
  from fractal_server.app.models.v2 import TaskGroupV2
23
24
  from fractal_server.app.schemas.user_group import UserGroupCreate
24
25
  from fractal_server.app.schemas.user_group import UserGroupRead
25
26
  from fractal_server.app.schemas.user_group import UserGroupUpdate
27
+ from fractal_server.app.schemas.user_settings import UserSettingsUpdate
26
28
  from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
27
29
  from fractal_server.logger import set_logger
28
30
 
@@ -212,3 +214,28 @@ async def delete_single_group(
212
214
  await db.commit()
213
215
 
214
216
  return Response(status_code=status.HTTP_204_NO_CONTENT)
217
+
218
+
219
+ @router_group.patch("/group/{group_id}/user-settings/", status_code=200)
220
+ async def patch_user_settings_bulk(
221
+ group_id: int,
222
+ settings_update: UserSettingsUpdate,
223
+ superuser: UserOAuth = Depends(current_active_superuser),
224
+ db: AsyncSession = Depends(get_async_db),
225
+ ):
226
+ await _usergroup_or_404(group_id, db)
227
+ res = await db.execute(
228
+ select(UserSettings)
229
+ .join(UserOAuth)
230
+ .where(LinkUserGroup.user_id == UserOAuth.id)
231
+ .where(LinkUserGroup.group_id == group_id)
232
+ )
233
+ settings_list = res.scalars().all()
234
+ update = settings_update.dict(exclude_unset=True)
235
+ for settings in settings_list:
236
+ for k, v in update.items():
237
+ setattr(settings, k, v)
238
+ db.add(settings)
239
+ await db.commit()
240
+
241
+ return Response(status_code=status.HTTP_200_OK)
@@ -177,11 +177,13 @@ async def submit_workflow(
177
177
  return
178
178
 
179
179
  try:
180
-
181
180
  # Create WORKFLOW_DIR_LOCAL
182
- original_umask = os.umask(0)
183
- WORKFLOW_DIR_LOCAL.mkdir(parents=True, mode=0o755)
184
- os.umask(original_umask)
181
+ if FRACTAL_RUNNER_BACKEND == "slurm":
182
+ original_umask = os.umask(0)
183
+ WORKFLOW_DIR_LOCAL.mkdir(parents=True, mode=0o755)
184
+ os.umask(original_umask)
185
+ else:
186
+ WORKFLOW_DIR_LOCAL.mkdir(parents=True)
185
187
 
186
188
  # Define and create WORKFLOW_DIR_REMOTE
187
189
  if FRACTAL_RUNNER_BACKEND == "local":
@@ -214,15 +216,19 @@ async def submit_workflow(
214
216
  order=order,
215
217
  task_name=task_name,
216
218
  )
217
- original_umask = os.umask(0)
218
- (WORKFLOW_DIR_LOCAL / subfolder_name).mkdir(mode=0o755)
219
- os.umask(original_umask)
220
219
  if FRACTAL_RUNNER_BACKEND == "slurm":
220
+ # Create local subfolder (with 755) and remote one
221
+ # (via `sudo -u`)
222
+ original_umask = os.umask(0)
223
+ (WORKFLOW_DIR_LOCAL / subfolder_name).mkdir(mode=0o755)
224
+ os.umask(original_umask)
221
225
  _mkdir_as_user(
222
226
  folder=str(WORKFLOW_DIR_REMOTE / subfolder_name),
223
227
  user=slurm_user,
224
228
  )
225
229
  else:
230
+ # Create local subfolder (with standard permission set)
231
+ (WORKFLOW_DIR_LOCAL / subfolder_name).mkdir()
226
232
  logger.info("Skip remote-subfolder creation")
227
233
  except Exception as e:
228
234
  error_type = type(e).__name__
@@ -20,6 +20,7 @@ from .project import ProjectUpdateV2 # noqa F401
20
20
  from .task import TaskCreateV2 # noqa F401
21
21
  from .task import TaskExportV2 # noqa F401
22
22
  from .task import TaskImportV2 # noqa F401
23
+ from .task import TaskImportV2Legacy # noqa F401
23
24
  from .task import TaskReadV2 # noqa F401
24
25
  from .task import TaskUpdateV2 # noqa F401
25
26
  from .task_collection import CollectionStateReadV2 # noqa F401
@@ -31,7 +31,6 @@ class TaskDumpV2(BaseModel):
31
31
  command_non_parallel: Optional[str]
32
32
  command_parallel: Optional[str]
33
33
  source: Optional[str] = None
34
- owner: Optional[str]
35
34
  version: Optional[str]
36
35
 
37
36
  input_types: dict[str, bool]
@@ -159,6 +159,19 @@ class ManifestV2(BaseModel):
159
159
  )
160
160
  return values
161
161
 
162
+ @root_validator()
163
+ def _unique_task_names(cls, values):
164
+ task_list = values["task_list"]
165
+ task_list_names = [t.name for t in task_list]
166
+ if len(set(task_list_names)) != len(task_list_names):
167
+ raise ValueError(
168
+ (
169
+ "Task names in manifest must be unique.\n",
170
+ f"Given: {task_list_names}.",
171
+ )
172
+ )
173
+ return values
174
+
162
175
  @validator("manifest_version")
163
176
  def manifest_version_2(cls, value):
164
177
  if value != "2":
@@ -21,7 +21,6 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
21
21
 
22
22
  command_non_parallel: Optional[str] = None
23
23
  command_parallel: Optional[str] = None
24
- source: Optional[str] = None
25
24
 
26
25
  meta_non_parallel: Optional[dict[str, Any]] = None
27
26
  meta_parallel: Optional[dict[str, Any]] = None
@@ -64,7 +63,6 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
64
63
  _command_parallel = validator("command_parallel", allow_reuse=True)(
65
64
  valstr("command_parallel")
66
65
  )
67
- _source = validator("source", allow_reuse=True)(valstr("source"))
68
66
  _version = validator("version", allow_reuse=True)(valstr("version"))
69
67
 
70
68
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
@@ -136,8 +134,6 @@ class TaskReadV2(BaseModel):
136
134
 
137
135
  class TaskUpdateV2(BaseModel, extra=Extra.forbid):
138
136
 
139
- name: Optional[str] = None
140
- version: Optional[str] = None
141
137
  command_parallel: Optional[str] = None
142
138
  command_non_parallel: Optional[str] = None
143
139
  input_types: Optional[dict[str, bool]] = None
@@ -155,10 +151,6 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
155
151
  raise ValueError
156
152
  return v
157
153
 
158
- _name = validator("name", allow_reuse=True)(valstr("name"))
159
- _version = validator("version", allow_reuse=True)(
160
- valstr("version", accept_none=True)
161
- )
162
154
  _command_parallel = validator("command_parallel", allow_reuse=True)(
163
155
  valstr("command_parallel")
164
156
  )
@@ -191,11 +183,29 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
191
183
 
192
184
  class TaskImportV2(BaseModel, extra=Extra.forbid):
193
185
 
186
+ pkg_name: str
187
+ version: Optional[str] = None
188
+ name: str
189
+ _pkg_name = validator("pkg_name", allow_reuse=True)(valstr("pkg_name"))
190
+ _version = validator("version", allow_reuse=True)(
191
+ valstr("version", accept_none=True)
192
+ )
193
+ _name = validator("name", allow_reuse=True)(valstr("name"))
194
+
195
+
196
+ class TaskImportV2Legacy(BaseModel):
194
197
  source: str
195
198
  _source = validator("source", allow_reuse=True)(valstr("source"))
196
199
 
197
200
 
198
201
  class TaskExportV2(BaseModel):
199
202
 
200
- source: Optional[str] = None
201
- _source = validator("source", allow_reuse=True)(valstr("source"))
203
+ pkg_name: str
204
+ version: Optional[str] = None
205
+ name: str
206
+
207
+ _pkg_name = validator("pkg_name", allow_reuse=True)(valstr("pkg_name"))
208
+ _version = validator("version", allow_reuse=True)(
209
+ valstr("version", accept_none=True)
210
+ )
211
+ _name = validator("name", allow_reuse=True)(valstr("name"))
@@ -1,6 +1,7 @@
1
1
  from enum import Enum
2
2
  from typing import Any
3
3
  from typing import Optional
4
+ from typing import Union
4
5
 
5
6
  from pydantic import BaseModel
6
7
  from pydantic import Extra
@@ -8,9 +9,9 @@ from pydantic import Field
8
9
  from pydantic import validator
9
10
 
10
11
  from .._validators import valdictkeys
11
- from .._validators import valint
12
12
  from .task import TaskExportV2
13
13
  from .task import TaskImportV2
14
+ from .task import TaskImportV2Legacy
14
15
  from .task import TaskReadV2
15
16
  from fractal_server.images import Filters
16
17
 
@@ -42,7 +43,6 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
42
43
  meta_parallel: Optional[dict[str, Any]]
43
44
  args_non_parallel: Optional[dict[str, Any]]
44
45
  args_parallel: Optional[dict[str, Any]]
45
- order: Optional[int]
46
46
  input_filters: Filters = Field(default_factory=Filters)
47
47
 
48
48
  # Validators
@@ -52,7 +52,6 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
52
52
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
53
53
  valdictkeys("meta_parallel")
54
54
  )
55
- _order = validator("order", allow_reuse=True)(valint("order", min_val=0))
56
55
 
57
56
  @validator("args_non_parallel")
58
57
  def validate_args_non_parallel(cls, value):
@@ -160,7 +159,7 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
160
159
 
161
160
  input_filters: Optional[Filters] = None
162
161
 
163
- task: TaskImportV2
162
+ task: Union[TaskImportV2, TaskImportV2Legacy]
164
163
 
165
164
  _meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
166
165
  valdictkeys("meta_non_parallel")