fractal-server 2.0.3__py3-none-any.whl → 2.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.0.3"
1
+ __VERSION__ = "2.0.5"
@@ -4,6 +4,7 @@ Definition of `/admin` routes.
4
4
  from datetime import datetime
5
5
  from datetime import timezone
6
6
  from pathlib import Path
7
+ from typing import Literal
7
8
  from typing import Optional
8
9
 
9
10
  from fastapi import APIRouter
@@ -13,6 +14,8 @@ from fastapi import Response
13
14
  from fastapi import status
14
15
  from fastapi.responses import StreamingResponse
15
16
  from pydantic import BaseModel
17
+ from pydantic import EmailStr
18
+ from pydantic import Field
16
19
  from sqlmodel import select
17
20
 
18
21
  from ....config import get_settings
@@ -24,6 +27,9 @@ from ...models.security import UserOAuth as User
24
27
  from ...models.v1 import Task
25
28
  from ...models.v2 import JobV2
26
29
  from ...models.v2 import ProjectV2
30
+ from ...models.v2 import TaskV2
31
+ from ...models.v2 import WorkflowTaskV2
32
+ from ...models.v2 import WorkflowV2
27
33
  from ...runner.filenames import WORKFLOW_LOG_FILENAME
28
34
  from ...schemas.v2 import JobReadV2
29
35
  from ...schemas.v2 import JobStatusTypeV2
@@ -307,3 +313,128 @@ async def flag_task_v1_as_v2_compatible(
307
313
  await db.close()
308
314
 
309
315
  return Response(status_code=status.HTTP_200_OK)
316
+
317
+
318
+ class TaskV2Minimal(BaseModel):
319
+
320
+ id: int
321
+ name: str
322
+ type: str
323
+ command_non_parallel: Optional[str]
324
+ command_parallel: Optional[str]
325
+ source: str
326
+ owner: Optional[str]
327
+ version: Optional[str]
328
+
329
+
330
+ class ProjectUser(BaseModel):
331
+
332
+ id: int
333
+ email: EmailStr
334
+
335
+
336
+ class TaskV2Relationship(BaseModel):
337
+
338
+ workflow_id: int
339
+ workflow_name: str
340
+ project_id: int
341
+ project_name: str
342
+ project_users: list[ProjectUser] = Field(default_factory=list)
343
+
344
+
345
+ class TaskV2Info(BaseModel):
346
+
347
+ task: TaskV2Minimal
348
+ relationships: list[TaskV2Relationship]
349
+
350
+
351
+ @router_admin_v2.get("/task/", response_model=list[TaskV2Info])
352
+ async def query_tasks(
353
+ id: Optional[int] = None,
354
+ source: Optional[str] = None,
355
+ version: Optional[str] = None,
356
+ name: Optional[str] = None,
357
+ owner: Optional[str] = None,
358
+ kind: Optional[Literal["common", "users"]] = None,
359
+ max_number_of_results: int = 25,
360
+ user: User = Depends(current_active_superuser),
361
+ db: AsyncSession = Depends(get_async_db),
362
+ ) -> list[TaskV2Info]:
363
+ """
364
+ Query `TaskV2` table and get informations about related items
365
+ (WorkflowV2s and ProjectV2s)
366
+
367
+ Args:
368
+ id: If not `None`, query for matching `task.id`.
369
+ source: If not `None`, query for contained case insensitive
370
+ `task.source`.
371
+ version: If not `None`, query for matching `task.version`.
372
+ name: If not `None`, query for contained case insensitive `task.name`.
373
+ owner: If not `None`, query for matching `task.owner`.
374
+ kind: If not `None`, query for TaskV2s that have (`users`) or don't
375
+ have (`common`) a `task.owner`.
376
+ max_number_of_results: The maximum length of the response.
377
+ """
378
+
379
+ stm = select(TaskV2)
380
+
381
+ if id is not None:
382
+ stm = stm.where(TaskV2.id == id)
383
+ if source is not None:
384
+ stm = stm.where(TaskV2.source.icontains(source))
385
+ if version is not None:
386
+ stm = stm.where(TaskV2.version == version)
387
+ if name is not None:
388
+ stm = stm.where(TaskV2.name.icontains(name))
389
+ if owner is not None:
390
+ stm = stm.where(TaskV2.owner == owner)
391
+
392
+ if kind == "common":
393
+ stm = stm.where(TaskV2.owner == None) # noqa E711
394
+ elif kind == "users":
395
+ stm = stm.where(TaskV2.owner != None) # noqa E711
396
+
397
+ res = await db.execute(stm)
398
+ task_list = res.scalars().all()
399
+ if len(task_list) > max_number_of_results:
400
+ await db.close()
401
+ raise HTTPException(
402
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
403
+ detail=(
404
+ f"Too many Tasks ({len(task_list)} > {max_number_of_results})."
405
+ " Please add more query filters."
406
+ ),
407
+ )
408
+
409
+ task_info_list = []
410
+
411
+ for task in task_list:
412
+ stm = (
413
+ select(WorkflowV2)
414
+ .join(WorkflowTaskV2)
415
+ .where(WorkflowTaskV2.workflow_id == WorkflowV2.id)
416
+ .where(WorkflowTaskV2.task_id == task.id)
417
+ )
418
+ res = await db.execute(stm)
419
+ wf_list = res.scalars().all()
420
+
421
+ task_info_list.append(
422
+ dict(
423
+ task=task.model_dump(),
424
+ relationships=[
425
+ dict(
426
+ workflow_id=workflow.id,
427
+ workflow_name=workflow.name,
428
+ project_id=workflow.project.id,
429
+ project_name=workflow.project.name,
430
+ project_users=[
431
+ dict(id=user.id, email=user.email)
432
+ for user in workflow.project.user_list
433
+ ],
434
+ )
435
+ for workflow in wf_list
436
+ ],
437
+ )
438
+ )
439
+
440
+ return task_info_list
@@ -1,6 +1,7 @@
1
1
  from datetime import datetime
2
2
  from datetime import timedelta
3
3
  from datetime import timezone
4
+ from pathlib import Path
4
5
  from typing import Optional
5
6
 
6
7
  from fastapi import APIRouter
@@ -12,6 +13,7 @@ from sqlmodel import select
12
13
 
13
14
  from .....config import get_settings
14
15
  from .....syringe import Inject
16
+ from .....utils import get_timestamp
15
17
  from ....db import AsyncSession
16
18
  from ....db import get_async_db
17
19
  from ....models.v2 import JobV2
@@ -91,23 +93,17 @@ async def apply_workflow(
91
93
 
92
94
  # If backend is SLURM, check that the user has required attributes
93
95
  settings = Inject(get_settings)
94
- backend = settings.FRACTAL_RUNNER_BACKEND
95
- if backend == "slurm":
96
+ FRACTAL_RUNNER_BACKEND = settings.FRACTAL_RUNNER_BACKEND
97
+ if FRACTAL_RUNNER_BACKEND == "slurm":
96
98
  if not user.slurm_user:
97
99
  raise HTTPException(
98
100
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
99
- detail=(
100
- f"FRACTAL_RUNNER_BACKEND={backend}, "
101
- f"but {user.slurm_user=}."
102
- ),
101
+ detail=f"{FRACTAL_RUNNER_BACKEND=}, but {user.slurm_user=}.",
103
102
  )
104
103
  if not user.cache_dir:
105
104
  raise HTTPException(
106
105
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
107
- detail=(
108
- f"FRACTAL_RUNNER_BACKEND={backend}, "
109
- f"but {user.cache_dir=}."
110
- ),
106
+ detail=f"{FRACTAL_RUNNER_BACKEND=}, but {user.cache_dir=}.",
111
107
  )
112
108
 
113
109
  # Check that no other job with the same dataset_id is SUBMITTED
@@ -197,6 +193,30 @@ async def apply_workflow(
197
193
  await db.commit()
198
194
  await db.refresh(job)
199
195
 
196
+ # Define server-side job directory
197
+ timestamp_string = get_timestamp().strftime("%Y%m%d_%H%M%S")
198
+ WORKFLOW_DIR = (
199
+ settings.FRACTAL_RUNNER_WORKING_BASE_DIR
200
+ / (
201
+ f"proj_v2_{project_id:07d}_wf_{workflow_id:07d}_job_{job.id:07d}"
202
+ f"_{timestamp_string}"
203
+ )
204
+ ).resolve()
205
+
206
+ # Define user-side job directory
207
+ if FRACTAL_RUNNER_BACKEND == "local":
208
+ WORKFLOW_DIR_USER = WORKFLOW_DIR
209
+ elif FRACTAL_RUNNER_BACKEND == "slurm":
210
+ WORKFLOW_DIR_USER = (
211
+ Path(user.cache_dir) / f"{WORKFLOW_DIR.name}"
212
+ ).resolve()
213
+
214
+ # Update job folders in the db
215
+ job.working_dir = WORKFLOW_DIR.as_posix()
216
+ job.working_dir_user = WORKFLOW_DIR_USER.as_posix()
217
+ await db.merge(job)
218
+ await db.commit()
219
+
200
220
  background_tasks.add_task(
201
221
  submit_workflow,
202
222
  workflow_id=workflow.id,
@@ -208,5 +228,4 @@ async def apply_workflow(
208
228
  )
209
229
 
210
230
  await db.close()
211
-
212
231
  return job
@@ -14,6 +14,7 @@ from ....db import get_async_db
14
14
  from ....models.v1 import Task as TaskV1
15
15
  from ....models.v2 import TaskV2
16
16
  from ....models.v2 import WorkflowTaskV2
17
+ from ....models.v2 import WorkflowV2
17
18
  from ....schemas.v2 import TaskCreateV2
18
19
  from ....schemas.v2 import TaskReadV2
19
20
  from ....schemas.v2 import TaskUpdateV2
@@ -204,16 +205,45 @@ async def delete_task(
204
205
  # Check that the TaskV2 is not in relationship with some WorkflowTaskV2
205
206
  stm = select(WorkflowTaskV2).filter(WorkflowTaskV2.task_id == task_id)
206
207
  res = await db.execute(stm)
207
- workflowtask_list = res.scalars().all()
208
- if workflowtask_list:
208
+ workflow_tasks = res.scalars().all()
209
+
210
+ if workflow_tasks:
211
+ # Find IDs of all affected workflows
212
+ workflow_ids = set(wftask.workflow_id for wftask in workflow_tasks)
213
+ # Fetch all affected workflows from DB
214
+ stm = select(WorkflowV2).where(WorkflowV2.id.in_(workflow_ids))
215
+ res = await db.execute(stm)
216
+ workflows = res.scalars().all()
217
+
218
+ # Find which workflows are associated to the current user
219
+ workflows_current_user = [
220
+ wf for wf in workflows if user in wf.project.user_list
221
+ ]
222
+ if workflows_current_user:
223
+ current_user_msg = (
224
+ "For the current-user workflows (listed below),"
225
+ " you can update the task or remove the workflows.\n"
226
+ )
227
+ current_user_msg += "\n".join(
228
+ [
229
+ f"* '{wf.name}' (id={wf.id})"
230
+ for wf in workflows_current_user
231
+ ]
232
+ )
233
+ else:
234
+ current_user_msg = ""
235
+
236
+ # Count workflows of current users or other users
237
+ num_workflows_current_user = len(workflows_current_user)
238
+ num_workflows_other_users = len(workflows) - num_workflows_current_user
239
+
209
240
  raise HTTPException(
210
241
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
211
242
  detail=(
212
- f"Cannot remove TaskV2 {task_id} because it is currently "
213
- "imported in WorkflowsV2 "
214
- f"{[x.workflow_id for x in workflowtask_list]}. "
215
- "If you want to remove this task, then you should first remove"
216
- " the workflows.",
243
+ f"Cannot remove Task with id={task_id}: it is currently in "
244
+ f"use in {num_workflows_current_user} current-user workflows "
245
+ f"and in {num_workflows_other_users} other-users workflows.\n"
246
+ f"{current_user_msg}"
217
247
  ),
218
248
  )
219
249
 
@@ -489,6 +489,10 @@ class FractalSlurmExecutor(SlurmExecutor):
489
489
  if len(args_batches) != math.ceil(tot_tasks / tasks_per_job):
490
490
  raise RuntimeError("Something wrong here while batching tasks")
491
491
 
492
+ # Fetch configuration variable
493
+ settings = Inject(get_settings)
494
+ FRACTAL_SLURM_SBATCH_SLEEP = settings.FRACTAL_SLURM_SBATCH_SLEEP
495
+
492
496
  # Construct list of futures (one per SLURM job, i.e. one per batch)
493
497
  fs = []
494
498
  current_component_index = 0
@@ -508,6 +512,7 @@ class FractalSlurmExecutor(SlurmExecutor):
508
512
  )
509
513
  )
510
514
  current_component_index += batch_size
515
+ time.sleep(FRACTAL_SLURM_SBATCH_SLEEP)
511
516
 
512
517
  # Yield must be hidden in closure so that the futures are submitted
513
518
  # before the first iterator value is required.
@@ -108,18 +108,15 @@ async def submit_workflow(
108
108
  return
109
109
 
110
110
  # Define and create server-side working folder
111
- project_id = workflow.project_id
112
- timestamp_string = get_timestamp().strftime("%Y%m%d_%H%M%S")
113
- WORKFLOW_DIR = (
114
- settings.FRACTAL_RUNNER_WORKING_BASE_DIR
115
- / (
116
- f"proj_{project_id:07d}_wf_{workflow_id:07d}_job_{job_id:07d}"
117
- f"_{timestamp_string}"
118
- )
119
- ).resolve()
120
-
111
+ WORKFLOW_DIR = Path(job.working_dir)
121
112
  if WORKFLOW_DIR.exists():
122
- raise RuntimeError(f"Workflow dir {WORKFLOW_DIR} already exists.")
113
+ job.status = JobStatusTypeV2.FAILED
114
+ job.end_timestamp = get_timestamp()
115
+ job.log = f"Workflow dir {WORKFLOW_DIR} already exists."
116
+ db_sync.merge(job)
117
+ db_sync.commit()
118
+ db_sync.close()
119
+ return
123
120
 
124
121
  # Create WORKFLOW_DIR with 755 permissions
125
122
  original_umask = os.umask(0)
@@ -127,26 +124,13 @@ async def submit_workflow(
127
124
  os.umask(original_umask)
128
125
 
129
126
  # Define and create user-side working folder, if needed
130
- if FRACTAL_RUNNER_BACKEND == "local":
131
- WORKFLOW_DIR_USER = WORKFLOW_DIR
132
- elif FRACTAL_RUNNER_BACKEND == "slurm":
133
-
127
+ WORKFLOW_DIR_USER = Path(job.working_dir_user)
128
+ if FRACTAL_RUNNER_BACKEND == "slurm":
134
129
  from ..executors.slurm._subprocess_run_as_user import (
135
130
  _mkdir_as_user,
136
131
  )
137
132
 
138
- WORKFLOW_DIR_USER = (
139
- Path(user_cache_dir) / f"{WORKFLOW_DIR.name}"
140
- ).resolve()
141
133
  _mkdir_as_user(folder=str(WORKFLOW_DIR_USER), user=slurm_user)
142
- else:
143
- raise ValueError(f"{FRACTAL_RUNNER_BACKEND=} not supported")
144
-
145
- # Update db
146
- job.working_dir = WORKFLOW_DIR.as_posix()
147
- job.working_dir_user = WORKFLOW_DIR_USER.as_posix()
148
- db_sync.merge(job)
149
- db_sync.commit()
150
134
 
151
135
  # After Session.commit() is called, either explicitly or when using a
152
136
  # context manager, all objects associated with the Session are expired.
fractal_server/config.py CHANGED
@@ -331,6 +331,12 @@ class Settings(BaseSettings):
331
331
  [`clusterfutures`](https://github.com/sampsyo/clusterfutures/blob/master/cfut/__init__.py)).
332
332
  """
333
333
 
334
+ FRACTAL_SLURM_SBATCH_SLEEP: int = 0
335
+ """
336
+ Interval to wait (in seconds) between two subsequent `sbatch` calls, when
337
+ running a task that produces multiple SLURM jobs.
338
+ """
339
+
334
340
  FRACTAL_SLURM_ERROR_HANDLING_INTERVAL: int = 5
335
341
  """
336
342
  Interval to wait (in seconds) when the SLURM backend does not find an
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.0.3
3
+ Version: 2.0.5
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -39,7 +39,7 @@ Description-Content-Type: text/markdown
39
39
  # Fractal Server
40
40
 
41
41
  [![PyPI version](https://img.shields.io/pypi/v/fractal-server?color=gree)](https://pypi.org/project/fractal-server/)
42
- [![CI Status](https://github.com/fractal-analytics-platform/fractal-server/actions/workflows/ci.yml/badge.svg)](https://github.com/fractal-analytics-platform/fractal-server/actions/workflows/ci.yml)
42
+ [![CI Status](https://github.com/fractal-analytics-platform/fractal-server/actions/workflows/ci.yml/badge.svg)](https://github.com/fractal-analytics-platform/fractal-server/actions/workflows/ci.yml?query=branch%3Amain)
43
43
  [![Coverage](https://raw.githubusercontent.com/fractal-analytics-platform/fractal-server/python-coverage-comment-action-data/badge.svg)](https://htmlpreview.github.io/?https://github.com/fractal-analytics-platform/fractal-server/blob/python-coverage-comment-action-data/htmlcov/index.html)
44
44
  [![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
45
45
  [![Benchmarks](https://img.shields.io/badge/Benchmarks-Done-blue)](https://htmlpreview.github.io/?https://github.com/fractal-analytics-platform/fractal-server/blob/benchmark-api/benchmarks/bench.html)
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=y8qIfi3XJXZnIlvcM-yGdSqRCMQ2ceId2VN0yHXJBAI,22
1
+ fractal_server/__init__.py,sha256=iemtcbFq1-Ut2bx94D5rbhyNac6WTU--MpGgUfTDdXI,22
2
2
  fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -24,7 +24,7 @@ fractal_server/app/models/v2/workflowtask.py,sha256=3jEkObsSnlI05Pur_dSsXYdJxRqP
24
24
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
26
  fractal_server/app/routes/admin/v1.py,sha256=uMupmRkicaoWazX8qSX5fgh00O3MbuSfim8QayP6NkE,13996
27
- fractal_server/app/routes/admin/v2.py,sha256=T8-bGAL25on-ntZx_Msz9j5jq6NGhkjVl1jp3eRJUbw,9830
27
+ fractal_server/app/routes/admin/v2.py,sha256=e0A6RMWfmTO5dVL95XO-P2EljdEwR00FErxahEPzODQ,13757
28
28
  fractal_server/app/routes/api/__init__.py,sha256=EVyZrEq3I_1643QGTPCC5lgCp4xH_auYbrFfogTm4pc,315
29
29
  fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
30
30
  fractal_server/app/routes/api/v1/_aux_functions.py,sha256=KoSefKiBXximu0df4fJ3l9bKsGaLO8rb3z6xhD8PWj4,11973
@@ -42,8 +42,8 @@ fractal_server/app/routes/api/v2/images.py,sha256=4r_HblPWyuKSZSJZfn8mbDaLv1ncwZ
42
42
  fractal_server/app/routes/api/v2/job.py,sha256=BtaxErBDbLwjY2zgGD1I6eRpsffoMonifcS1CMEXmLU,5325
43
43
  fractal_server/app/routes/api/v2/project.py,sha256=qyvizYZ4aUFgF3tGdfp4z8AwWgfo19N_KbFEljfUaC8,5594
44
44
  fractal_server/app/routes/api/v2/status.py,sha256=osLexiMOSqmYcEV-41tlrwt9ofyFbtRm5HmPS5BU0t4,6394
45
- fractal_server/app/routes/api/v2/submit.py,sha256=svI1Oa2zIxUEAomzRt7-M66xKC4Pb9NEGcXNrtN6b5g,6940
46
- fractal_server/app/routes/api/v2/task.py,sha256=gJ0LruSk-Q1iMw8ZOX8C0wrZ4S4DGlQTr_5SdJJud0Q,7130
45
+ fractal_server/app/routes/api/v2/submit.py,sha256=su512HjafuD_lyiLswyxN4T8xwKaMpWOhGsYVPh94sI,7672
46
+ fractal_server/app/routes/api/v2/task.py,sha256=bRTtGgL8BBGbT7csVeRB-a54clgU2xHydi5XpcByDxg,8297
47
47
  fractal_server/app/routes/api/v2/task_collection.py,sha256=eN3NkZaZHkrqnLGRKE7Xd5mo0cHc8aK2lojCt26ErOQ,8988
48
48
  fractal_server/app/routes/api/v2/task_legacy.py,sha256=P_VJv9v0yzFUBuS-DQHhMVSOe20ecGJJcFBqiiFciOM,1628
49
49
  fractal_server/app/routes/api/v2/workflow.py,sha256=2GlcYNjpvCdjwC_Kn7y0UP16B3pOLSNXBvIVsVDtDKM,11863
@@ -64,7 +64,7 @@ fractal_server/app/runner/executors/slurm/_check_jobs_status.py,sha256=8d29a7DQ2
64
64
  fractal_server/app/runner/executors/slurm/_executor_wait_thread.py,sha256=J3tjAx33nBgW4eHAXDte7hDs7Oe9FLEZaElEt8inrbg,4421
65
65
  fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=rF37XDImX1QoWx37MC5hSM9AuY_KfHU5gaWwN4vl4Zk,15552
66
66
  fractal_server/app/runner/executors/slurm/_subprocess_run_as_user.py,sha256=8CCtxWCuB5UDst3C_WJxBU77xwPrpDyq7iMCZMnodXU,5123
67
- fractal_server/app/runner/executors/slurm/executor.py,sha256=O9h6ZPAKM95BUJrZkHCdFJZrw2zR2XmxeB5fCoGp97w,44451
67
+ fractal_server/app/runner/executors/slurm/executor.py,sha256=267YTDvyeA0yX2n2HffxP8OAu_CQF5uB9K-_AaUG3iU,44655
68
68
  fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
69
69
  fractal_server/app/runner/filenames.py,sha256=9lwu3yB4C67yiijYw8XIKaLFn3mJUt6_TCyVFM_aZUQ,206
70
70
  fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
@@ -80,7 +80,7 @@ fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=llTgSOCnCVMvm7Q0SoVp
80
80
  fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6TLWQon8hSicsD7c3yXK4P9xeId0s_H3HOOeMUVGVss,5977
81
81
  fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
82
82
  fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
83
- fractal_server/app/runner/v2/__init__.py,sha256=mnXlC69UBQVpEwEWH3ZbMSsrVLl1yCnROh8_WGnXKUk,12500
83
+ fractal_server/app/runner/v2/__init__.py,sha256=U6FwKhiIF1INKU8LCoGHBCb04S2xOa3rpbltcodWXLw,11952
84
84
  fractal_server/app/runner/v2/_local/__init__.py,sha256=Q1s-DwXleUq6w1ZNv6tlh3tZv6cyBqxB_hMvZlqVYaM,5881
85
85
  fractal_server/app/runner/v2/_local/_local_config.py,sha256=lR0Js-l63mQUzN9hK0HkfdLsrTf-W6GHvPvbPC64amY,3630
86
86
  fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=deagsLSy6A3ZHKaSDcQqrdvbQVM3i4kgyTcbVc0tC5U,1614
@@ -121,8 +121,7 @@ fractal_server/app/schemas/v2/task_collection.py,sha256=sY29NQfJrbjiidmVkVjSIH-2
121
121
  fractal_server/app/schemas/v2/workflow.py,sha256=Zzx3e-qgkH8le0FUmAx9UrV5PWd7bj14PPXUh_zgZXM,1827
122
122
  fractal_server/app/schemas/v2/workflowtask.py,sha256=atVuVN4aXsVEOmSd-vyg-8_8OnPmqx-gT75rXcn_AlQ,6552
123
123
  fractal_server/app/security/__init__.py,sha256=wxosoHc3mJYPCdPMyWnRD8w_2OgnKYp2aDkdmwrZh5k,11203
124
- fractal_server/config.py,sha256=2vw5M78aAogELsWut9X5sxxUz2lirNFpgqP6OynmKOQ,14859
125
- fractal_server/data_migrations/2_0_3.py,sha256=7EhwLCZTk1yHD_dlU-HIf2uvx6jUIgfDaA5np27QEEM,2918
124
+ fractal_server/config.py,sha256=lXofyyyMdRQoK39yTBUwVotRT8ptSb5LceHlZrUuK2o,15048
126
125
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
127
126
  fractal_server/images/__init__.py,sha256=xO6jTLE4EZKO6cTDdJsBmK9cdeh9hFTaSbSuWgQg7y4,196
128
127
  fractal_server/images/models.py,sha256=9ipU5h4N6ogBChoB-2vHoqtL0TXOHCv6kRR-fER3mkM,4167
@@ -163,8 +162,8 @@ fractal_server/tasks/v2/background_operations.py,sha256=MAMBn6W2bhkdK59kfUGiD7a1
163
162
  fractal_server/tasks/v2/get_collection_data.py,sha256=Qhf2T_aaqAfqu9_KpUSlXsS7EJoZQbEPEreHHa2jco8,502
164
163
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
165
164
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
166
- fractal_server-2.0.3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
167
- fractal_server-2.0.3.dist-info/METADATA,sha256=DE7GliLouz0Eb5ar6MOL0eShI-bqrvx7jZi6RPavLtc,4202
168
- fractal_server-2.0.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
169
- fractal_server-2.0.3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
170
- fractal_server-2.0.3.dist-info/RECORD,,
165
+ fractal_server-2.0.5.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
166
+ fractal_server-2.0.5.dist-info/METADATA,sha256=cJupXGRdWtFkGReZMLccMSwEAXunkgL2TQoZZ1pesmM,4222
167
+ fractal_server-2.0.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
168
+ fractal_server-2.0.5.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
169
+ fractal_server-2.0.5.dist-info/RECORD,,
@@ -1,79 +0,0 @@
1
- import logging
2
-
3
- from packaging.version import parse
4
- from sqlalchemy import select
5
- from sqlalchemy.orm.attributes import flag_modified
6
-
7
- import fractal_server
8
- from fractal_server.app.db import get_sync_db
9
- from fractal_server.app.models.v1 import ApplyWorkflow
10
- from fractal_server.app.models.v2 import JobV2
11
- from fractal_server.app.schemas.v1 import ApplyWorkflowReadV1
12
- from fractal_server.app.schemas.v2 import JobReadV2
13
-
14
-
15
- def fix_db():
16
- logger = logging.getLogger("fix_db")
17
- logger.warning("START execution of fix_db function")
18
-
19
- # Check that this module matches with the current version
20
- module_version = parse("2.0.3")
21
- current_version = parse(fractal_server.__VERSION__)
22
- if (
23
- current_version.major != module_version.major
24
- or current_version.minor != module_version.minor
25
- or current_version.micro != module_version.micro
26
- ):
27
- raise RuntimeError(
28
- f"{fractal_server.__VERSION__=} not matching with {__file__=}"
29
- )
30
-
31
- with next(get_sync_db()) as db:
32
-
33
- # V1 jobs
34
- stm = select(ApplyWorkflow)
35
- jobs_v1 = db.execute(stm).scalars().all()
36
- for job_v1 in sorted(jobs_v1, key=lambda x: x.id):
37
- for KEY in ["history"]:
38
- logger.warning(
39
- f"Now removing {KEY} from `input/output_dataset_dump`, "
40
- f"for appplyworkflow.id={job_v1.id}."
41
- )
42
- if KEY in job_v1.input_dataset_dump.keys():
43
- job_v1.input_dataset_dump.pop(KEY)
44
- if KEY in job_v1.output_dataset_dump.keys():
45
- job_v1.output_dataset_dump.pop(KEY)
46
- flag_modified(job_v1, "input_dataset_dump")
47
- flag_modified(job_v1, "output_dataset_dump")
48
- db.add(job_v1)
49
- db.commit()
50
- db.refresh(job_v1)
51
- db.expunge(job_v1)
52
- logger.warning(
53
- f"Now validating applyworkflow.id={job_v1.id} with "
54
- "ApplyWorkflowReadV1."
55
- )
56
- ApplyWorkflowReadV1(**job_v1.model_dump())
57
-
58
- # V2 jobs
59
- stm = select(JobV2)
60
- jobs_v2 = db.execute(stm).scalars().all()
61
- for job_v2 in sorted(jobs_v2, key=lambda x: x.id):
62
- for KEY in ["history", "images"]:
63
- logger.warning(
64
- f"Now removing {KEY} from `dataset_dump`, "
65
- f"for jobv2.id={job_v2.id}."
66
- )
67
- if KEY in job_v2.dataset_dump.keys():
68
- job_v2.dataset_dump.pop(KEY)
69
- flag_modified(job_v2, "dataset_dump")
70
- db.add(job_v2)
71
- db.commit()
72
- db.refresh(job_v2)
73
- db.expunge(job_v2)
74
- logger.warning(
75
- f"Now validating jobv2.id={job_v2.id} with JobReadV2."
76
- )
77
- JobReadV2(**job_v2.model_dump())
78
-
79
- logger.warning("END of execution of fix_db function")