fractal-server 2.3.7__py3-none-any.whl → 2.3.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.3.7"
1
+ __VERSION__ = "2.3.8"
@@ -18,6 +18,7 @@ from sqlmodel import select
18
18
  from ....config import get_settings
19
19
  from ....syringe import Inject
20
20
  from ....utils import get_timestamp
21
+ from ....zip_tools import _zip_folder_to_byte_stream_iterator
21
22
  from ...db import AsyncSession
22
23
  from ...db import get_async_db
23
24
  from ...models.security import UserOAuth as User
@@ -34,7 +35,6 @@ from ...schemas.v1 import ProjectReadV1
34
35
  from ...schemas.v1 import WorkflowReadV1
35
36
  from ...security import current_active_superuser
36
37
  from ..aux._job import _write_shutdown_file
37
- from ..aux._job import _zip_folder_to_byte_stream
38
38
  from ..aux._runner import _check_shutdown_is_supported
39
39
 
40
40
  router_admin_v1 = APIRouter()
@@ -387,9 +387,8 @@ async def download_job_logs(
387
387
  # Create and return byte stream for zipped log folder
388
388
  PREFIX_ZIP = Path(job.working_dir).name
389
389
  zip_filename = f"{PREFIX_ZIP}_archive.zip"
390
- byte_stream = _zip_folder_to_byte_stream(folder=job.working_dir)
391
390
  return StreamingResponse(
392
- iter([byte_stream.getvalue()]),
391
+ _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
393
392
  media_type="application/x-zip-compressed",
394
393
  headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
395
394
  )
@@ -21,6 +21,7 @@ from sqlmodel import select
21
21
  from ....config import get_settings
22
22
  from ....syringe import Inject
23
23
  from ....utils import get_timestamp
24
+ from ....zip_tools import _zip_folder_to_byte_stream_iterator
24
25
  from ...db import AsyncSession
25
26
  from ...db import get_async_db
26
27
  from ...models.security import UserOAuth as User
@@ -37,7 +38,6 @@ from ...schemas.v2 import JobUpdateV2
37
38
  from ...schemas.v2 import ProjectReadV2
38
39
  from ...security import current_active_superuser
39
40
  from ..aux._job import _write_shutdown_file
40
- from ..aux._job import _zip_folder_to_byte_stream
41
41
  from ..aux._runner import _check_shutdown_is_supported
42
42
 
43
43
  router_admin_v2 = APIRouter()
@@ -274,9 +274,8 @@ async def download_job_logs(
274
274
  # Create and return byte stream for zipped log folder
275
275
  PREFIX_ZIP = Path(job.working_dir).name
276
276
  zip_filename = f"{PREFIX_ZIP}_archive.zip"
277
- byte_stream = _zip_folder_to_byte_stream(folder=job.working_dir)
278
277
  return StreamingResponse(
279
- iter([byte_stream.getvalue()]),
278
+ _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
280
279
  media_type="application/x-zip-compressed",
281
280
  headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
282
281
  )
@@ -11,6 +11,8 @@ from fastapi import status
11
11
  from sqlmodel import select
12
12
  from sqlmodel.sql.expression import SelectOfScalar
13
13
 
14
+ from .....config import get_settings
15
+ from .....syringe import Inject
14
16
  from ....db import AsyncSession
15
17
  from ....models.v1 import ApplyWorkflow
16
18
  from ....models.v1 import Dataset
@@ -23,6 +25,15 @@ from ....schemas.v1 import JobStatusTypeV1
23
25
  from ....security import User
24
26
 
25
27
 
28
+ def _raise_if_v1_is_read_only() -> None:
29
+ settings = Inject(get_settings)
30
+ if settings.FRACTAL_API_V1_MODE == "include_read_only":
31
+ raise HTTPException(
32
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
33
+ detail="Legacy API is in read-only mode.",
34
+ )
35
+
36
+
26
37
  async def _get_project_check_owner(
27
38
  *,
28
39
  project_id: int,
@@ -33,7 +33,7 @@ from ._aux_functions import _get_dataset_check_owner
33
33
  from ._aux_functions import _get_project_check_owner
34
34
  from ._aux_functions import _get_submitted_jobs_statement
35
35
  from ._aux_functions import _get_workflow_check_owner
36
-
36
+ from ._aux_functions import _raise_if_v1_is_read_only
37
37
 
38
38
  router = APIRouter()
39
39
 
@@ -52,6 +52,7 @@ async def create_dataset(
52
52
  """
53
53
  Add new dataset to current project
54
54
  """
55
+ _raise_if_v1_is_read_only()
55
56
  await _get_project_check_owner(
56
57
  project_id=project_id, user_id=user.id, db=db
57
58
  )
@@ -133,7 +134,7 @@ async def update_dataset(
133
134
  """
134
135
  Edit a dataset associated to the current project
135
136
  """
136
-
137
+ _raise_if_v1_is_read_only()
137
138
  if dataset_update.history is not None:
138
139
  raise HTTPException(
139
140
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -170,6 +171,7 @@ async def delete_dataset(
170
171
  """
171
172
  Delete a dataset associated to the current project
172
173
  """
174
+ _raise_if_v1_is_read_only()
173
175
  output = await _get_dataset_check_owner(
174
176
  project_id=project_id,
175
177
  dataset_id=dataset_id,
@@ -243,6 +245,7 @@ async def create_resource(
243
245
  """
244
246
  Add resource to an existing dataset
245
247
  """
248
+ _raise_if_v1_is_read_only()
246
249
  output = await _get_dataset_check_owner(
247
250
  project_id=project_id,
248
251
  dataset_id=dataset_id,
@@ -299,6 +302,7 @@ async def update_resource(
299
302
  """
300
303
  Edit a resource of a dataset
301
304
  """
305
+ _raise_if_v1_is_read_only()
302
306
  output = await _get_dataset_check_owner(
303
307
  project_id=project_id,
304
308
  dataset_id=dataset_id,
@@ -339,6 +343,7 @@ async def delete_resource(
339
343
  """
340
344
  Delete a resource of a dataset
341
345
  """
346
+ _raise_if_v1_is_read_only()
342
347
  # Get the dataset DB entry
343
348
  output = await _get_dataset_check_owner(
344
349
  project_id=project_id,
@@ -8,6 +8,7 @@ from fastapi import status
8
8
  from fastapi.responses import StreamingResponse
9
9
  from sqlmodel import select
10
10
 
11
+ from .....zip_tools import _zip_folder_to_byte_stream_iterator
11
12
  from ....db import AsyncSession
12
13
  from ....db import get_async_db
13
14
  from ....models.v1 import ApplyWorkflow
@@ -18,7 +19,6 @@ from ....schemas.v1 import ApplyWorkflowReadV1
18
19
  from ....security import current_active_user
19
20
  from ....security import User
20
21
  from ...aux._job import _write_shutdown_file
21
- from ...aux._job import _zip_folder_to_byte_stream
22
22
  from ...aux._runner import _check_shutdown_is_supported
23
23
  from ._aux_functions import _get_job_check_owner
24
24
  from ._aux_functions import _get_project_check_owner
@@ -128,9 +128,8 @@ async def download_job_logs(
128
128
  # Create and return byte stream for zipped log folder
129
129
  PREFIX_ZIP = Path(job.working_dir).name
130
130
  zip_filename = f"{PREFIX_ZIP}_archive.zip"
131
- byte_stream = _zip_folder_to_byte_stream(folder=job.working_dir)
132
131
  return StreamingResponse(
133
- iter([byte_stream.getvalue()]),
132
+ _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
134
133
  media_type="application/x-zip-compressed",
135
134
  headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
136
135
  )
@@ -42,6 +42,7 @@ from ._aux_functions import _get_dataset_check_owner
42
42
  from ._aux_functions import _get_project_check_owner
43
43
  from ._aux_functions import _get_submitted_jobs_statement
44
44
  from ._aux_functions import _get_workflow_check_owner
45
+ from ._aux_functions import _raise_if_v1_is_read_only
45
46
  from ._aux_functions import clean_app_job_list_v1
46
47
 
47
48
  router = APIRouter()
@@ -80,7 +81,7 @@ async def create_project(
80
81
  """
81
82
  Create new poject
82
83
  """
83
-
84
+ _raise_if_v1_is_read_only()
84
85
  # Check that there is no project with the same user and name
85
86
  await _check_project_exists(
86
87
  project_name=project.name, user_id=user.id, db=db
@@ -120,6 +121,7 @@ async def update_project(
120
121
  user: User = Depends(current_active_user),
121
122
  db: AsyncSession = Depends(get_async_db),
122
123
  ):
124
+ _raise_if_v1_is_read_only()
123
125
  project = await _get_project_check_owner(
124
126
  project_id=project_id, user_id=user.id, db=db
125
127
  )
@@ -148,6 +150,7 @@ async def delete_project(
148
150
  """
149
151
  Delete project
150
152
  """
153
+ _raise_if_v1_is_read_only()
151
154
  project = await _get_project_check_owner(
152
155
  project_id=project_id, user_id=user.id, db=db
153
156
  )
@@ -249,16 +252,8 @@ async def apply_workflow(
249
252
  user: User = Depends(current_active_verified_user),
250
253
  db: AsyncSession = Depends(get_async_db),
251
254
  ) -> Optional[ApplyWorkflowReadV1]:
252
-
255
+ _raise_if_v1_is_read_only()
253
256
  settings = Inject(get_settings)
254
- if settings.FRACTAL_API_V1_MODE == "include_without_submission":
255
- raise HTTPException(
256
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
257
- detail=(
258
- "Legacy API is still accessible, "
259
- "but the submission of legacy jobs is not available."
260
- ),
261
- )
262
257
 
263
258
  # Remove non-submitted V1 jobs from the app state when the list grows
264
259
  # beyond a threshold
@@ -21,6 +21,7 @@ from ....security import current_active_user
21
21
  from ....security import current_active_verified_user
22
22
  from ....security import User
23
23
  from ._aux_functions import _get_task_check_owner
24
+ from ._aux_functions import _raise_if_v1_is_read_only
24
25
 
25
26
  router = APIRouter()
26
27
 
@@ -75,7 +76,7 @@ async def patch_task(
75
76
  """
76
77
  Edit a specific task (restricted to superusers and task owner)
77
78
  """
78
-
79
+ _raise_if_v1_is_read_only()
79
80
  if task_update.source:
80
81
  raise HTTPException(
81
82
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -121,6 +122,7 @@ async def create_task(
121
122
  """
122
123
  Create a new task
123
124
  """
125
+ _raise_if_v1_is_read_only()
124
126
  # Set task.owner attribute
125
127
  if user.username:
126
128
  owner = user.username
@@ -174,7 +176,7 @@ async def delete_task(
174
176
  """
175
177
  Delete a task
176
178
  """
177
-
179
+ _raise_if_v1_is_read_only()
178
180
  db_task = await _get_task_check_owner(task_id=task_id, user=user, db=db)
179
181
 
180
182
  # Check that the Task is not in relationship with some WorkflowTask
@@ -25,6 +25,7 @@ from ....schemas.v1 import TaskCollectStatusV1
25
25
  from ....security import current_active_user
26
26
  from ....security import current_active_verified_user
27
27
  from ....security import User
28
+ from ._aux_functions import _raise_if_v1_is_read_only
28
29
  from fractal_server.string_tools import slugify_task_name_for_source
29
30
  from fractal_server.tasks.utils import get_collection_log
30
31
  from fractal_server.tasks.v1._TaskCollectPip import _TaskCollectPip
@@ -71,7 +72,7 @@ async def collect_tasks_pip(
71
72
  Trigger the creation of a dedicated virtual environment, the installation
72
73
  of a package and the collection of tasks as advertised in the manifest.
73
74
  """
74
-
75
+ _raise_if_v1_is_read_only()
75
76
  logger = set_logger(logger_name="collect_tasks_pip")
76
77
 
77
78
  # Validate payload as _TaskCollectPip, which has more strict checks than
@@ -39,6 +39,7 @@ from ._aux_functions import _check_workflow_exists
39
39
  from ._aux_functions import _get_project_check_owner
40
40
  from ._aux_functions import _get_submitted_jobs_statement
41
41
  from ._aux_functions import _get_workflow_check_owner
42
+ from ._aux_functions import _raise_if_v1_is_read_only
42
43
  from ._aux_functions import _workflow_insert_task
43
44
 
44
45
 
@@ -84,6 +85,7 @@ async def create_workflow(
84
85
  """
85
86
  Create a workflow, associate to a project
86
87
  """
88
+ _raise_if_v1_is_read_only()
87
89
  await _get_project_check_owner(
88
90
  project_id=project_id,
89
91
  user_id=user.id,
@@ -136,6 +138,7 @@ async def update_workflow(
136
138
  """
137
139
  Edit a workflow
138
140
  """
141
+ _raise_if_v1_is_read_only()
139
142
  workflow = await _get_workflow_check_owner(
140
143
  project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
141
144
  )
@@ -187,7 +190,7 @@ async def delete_workflow(
187
190
  """
188
191
  Delete a workflow
189
192
  """
190
-
193
+ _raise_if_v1_is_read_only()
191
194
  workflow = await _get_workflow_check_owner(
192
195
  project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
193
196
  )
@@ -275,7 +278,7 @@ async def import_workflow(
275
278
  Also create all required objects (i.e. Workflow and WorkflowTask's) along
276
279
  the way.
277
280
  """
278
-
281
+ _raise_if_v1_is_read_only()
279
282
  # Preliminary checks
280
283
  await _get_project_check_owner(
281
284
  project_id=project_id,
@@ -30,6 +30,7 @@ from ....security import current_active_user
30
30
  from ....security import User
31
31
  from ._aux_functions import _get_workflow_check_owner
32
32
  from ._aux_functions import _get_workflow_task_check_owner
33
+ from ._aux_functions import _raise_if_v1_is_read_only
33
34
  from ._aux_functions import _workflow_insert_task
34
35
 
35
36
  router = APIRouter()
@@ -51,7 +52,7 @@ async def create_workflowtask(
51
52
  """
52
53
  Add a WorkflowTask to a Workflow
53
54
  """
54
-
55
+ _raise_if_v1_is_read_only()
55
56
  workflow = await _get_workflow_check_owner(
56
57
  project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
57
58
  )
@@ -112,7 +113,7 @@ async def update_workflowtask(
112
113
  """
113
114
  Edit a WorkflowTask of a Workflow
114
115
  """
115
-
116
+ _raise_if_v1_is_read_only()
116
117
  db_workflow_task, db_workflow = await _get_workflow_task_check_owner(
117
118
  project_id=project_id,
118
119
  workflow_task_id=workflow_task_id,
@@ -167,7 +168,7 @@ async def delete_workflowtask(
167
168
  """
168
169
  Delete a WorkflowTask of a Workflow
169
170
  """
170
-
171
+ _raise_if_v1_is_read_only()
171
172
  db_workflow_task, db_workflow = await _get_workflow_task_check_owner(
172
173
  project_id=project_id,
173
174
  workflow_task_id=workflow_task_id,
@@ -8,6 +8,7 @@ from fastapi import status
8
8
  from fastapi.responses import StreamingResponse
9
9
  from sqlmodel import select
10
10
 
11
+ from .....zip_tools import _zip_folder_to_byte_stream_iterator
11
12
  from ....db import AsyncSession
12
13
  from ....db import get_async_db
13
14
  from ....models.v2 import JobV2
@@ -18,7 +19,6 @@ from ....schemas.v2 import JobStatusTypeV2
18
19
  from ....security import current_active_user
19
20
  from ....security import User
20
21
  from ...aux._job import _write_shutdown_file
21
- from ...aux._job import _zip_folder_to_byte_stream
22
22
  from ...aux._runner import _check_shutdown_is_supported
23
23
  from ._aux_functions import _get_job_check_owner
24
24
  from ._aux_functions import _get_project_check_owner
@@ -118,7 +118,7 @@ async def download_job_logs(
118
118
  db: AsyncSession = Depends(get_async_db),
119
119
  ) -> StreamingResponse:
120
120
  """
121
- Download job folder
121
+ Download zipped job folder
122
122
  """
123
123
  output = await _get_job_check_owner(
124
124
  project_id=project_id,
@@ -127,15 +127,11 @@ async def download_job_logs(
127
127
  db=db,
128
128
  )
129
129
  job = output["job"]
130
-
131
- # Create and return byte stream for zipped log folder
132
- PREFIX_ZIP = Path(job.working_dir).name
133
- zip_filename = f"{PREFIX_ZIP}_archive.zip"
134
- byte_stream = _zip_folder_to_byte_stream(folder=job.working_dir)
130
+ zip_name = f"{Path(job.working_dir).name}_archive.zip"
135
131
  return StreamingResponse(
136
- iter([byte_stream.getvalue()]),
132
+ _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
137
133
  media_type="application/x-zip-compressed",
138
- headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
134
+ headers={"Content-Disposition": f"attachment;filename={zip_name}"},
139
135
  )
140
136
 
141
137
 
@@ -1,9 +1,5 @@
1
- import os
2
- from io import BytesIO
3
1
  from pathlib import Path
4
2
  from typing import Union
5
- from zipfile import ZIP_DEFLATED
6
- from zipfile import ZipFile
7
3
 
8
4
  from ...models.v1 import ApplyWorkflow
9
5
  from ...models.v2 import JobV2
@@ -24,22 +20,3 @@ def _write_shutdown_file(*, job: Union[ApplyWorkflow, JobV2]):
24
20
  shutdown_file = Path(job.working_dir) / SHUTDOWN_FILENAME
25
21
  with shutdown_file.open("w") as f:
26
22
  f.write(f"Trigger executor shutdown for {job.id=}.")
27
-
28
-
29
- def _zip_folder_to_byte_stream(*, folder: str) -> BytesIO:
30
- """
31
- Get byte stream with the zipped log folder of a job.
32
-
33
- Args:
34
- folder: the folder to zip
35
- """
36
-
37
- byte_stream = BytesIO()
38
- with ZipFile(byte_stream, mode="w", compression=ZIP_DEFLATED) as zipfile:
39
- for root, dirs, files in os.walk(folder):
40
- for file in files:
41
- file_path = os.path.join(root, file)
42
- archive_path = os.path.relpath(file_path, folder)
43
- zipfile.write(file_path, archive_path)
44
-
45
- return byte_stream
@@ -5,7 +5,6 @@ This module is the single entry point to the runner backend subsystem V2.
5
5
  Other subystems should only import this module and not its submodules or
6
6
  the individual backends.
7
7
  """
8
- import logging
9
8
  import os
10
9
  import traceback
11
10
  from pathlib import Path
@@ -21,6 +20,7 @@ from ....logger import set_logger
21
20
  from ....ssh._fabric import FractalSSH
22
21
  from ....syringe import Inject
23
22
  from ....utils import get_timestamp
23
+ from ....zip_tools import _zip_folder_to_file_and_remove
24
24
  from ...db import DB
25
25
  from ...models.v2 import DatasetV2
26
26
  from ...models.v2 import JobV2
@@ -114,9 +114,34 @@ async def submit_workflow(
114
114
 
115
115
  with next(DB.get_sync_db()) as db_sync:
116
116
 
117
- job: JobV2 = db_sync.get(JobV2, job_id)
118
- if not job:
117
+ try:
118
+ job: Optional[JobV2] = db_sync.get(JobV2, job_id)
119
+ dataset: Optional[DatasetV2] = db_sync.get(DatasetV2, dataset_id)
120
+ workflow: Optional[WorkflowV2] = db_sync.get(
121
+ WorkflowV2, workflow_id
122
+ )
123
+ except Exception as e:
124
+ logger.error(
125
+ f"Error conneting to the database. Original error: {str(e)}"
126
+ )
127
+ reset_logger_handlers(logger)
128
+ return
129
+
130
+ if job is None:
119
131
  logger.error(f"JobV2 {job_id} does not exist")
132
+ reset_logger_handlers(logger)
133
+ return
134
+ if dataset is None or workflow is None:
135
+ log_msg = ""
136
+ if not dataset:
137
+ log_msg += f"Cannot fetch dataset {dataset_id} from database\n"
138
+ if not workflow:
139
+ log_msg += (
140
+ f"Cannot fetch workflow {workflow_id} from database\n"
141
+ )
142
+ fail_job(
143
+ db=db_sync, job=job, log_msg=log_msg, logger_name=logger_name
144
+ )
120
145
  return
121
146
 
122
147
  # Declare runner backend and set `process_workflow` function
@@ -137,21 +162,6 @@ async def submit_workflow(
137
162
  )
138
163
  return
139
164
 
140
- dataset: DatasetV2 = db_sync.get(DatasetV2, dataset_id)
141
- workflow: WorkflowV2 = db_sync.get(WorkflowV2, workflow_id)
142
- if not (dataset and workflow):
143
- log_msg = ""
144
- if not dataset:
145
- log_msg += f"Cannot fetch dataset {dataset_id} from database\n"
146
- if not workflow:
147
- log_msg += (
148
- f"Cannot fetch workflow {workflow_id} from database\n"
149
- )
150
- fail_job(
151
- db=db_sync, job=job, log_msg=log_msg, logger_name=logger_name
152
- )
153
- return
154
-
155
165
  # Define and create server-side working folder
156
166
  WORKFLOW_DIR_LOCAL = Path(job.working_dir)
157
167
  if WORKFLOW_DIR_LOCAL.exists():
@@ -192,9 +202,9 @@ async def submit_workflow(
192
202
  fractal_ssh.mkdir(
193
203
  folder=str(WORKFLOW_DIR_REMOTE),
194
204
  )
195
- logging.info(f"Created {str(WORKFLOW_DIR_REMOTE)} via SSH.")
205
+ logger.info(f"Created {str(WORKFLOW_DIR_REMOTE)} via SSH.")
196
206
  else:
197
- logging.error(
207
+ logger.error(
198
208
  "Invalid FRACTAL_RUNNER_BACKEND="
199
209
  f"{settings.FRACTAL_RUNNER_BACKEND}."
200
210
  )
@@ -219,7 +229,7 @@ async def submit_workflow(
219
229
  user=slurm_user,
220
230
  )
221
231
  else:
222
- logging.info("Skip remote-subfolder creation")
232
+ logger.info("Skip remote-subfolder creation")
223
233
  except Exception as e:
224
234
  error_type = type(e).__name__
225
235
  fail_job(
@@ -448,3 +458,4 @@ async def submit_workflow(
448
458
  finally:
449
459
  reset_logger_handlers(logger)
450
460
  db_sync.close()
461
+ _zip_folder_to_file_and_remove(folder=job.working_dir)
fractal_server/config.py CHANGED
@@ -547,7 +547,7 @@ class Settings(BaseSettings):
547
547
  """
548
548
 
549
549
  FRACTAL_API_V1_MODE: Literal[
550
- "include", "include_without_submission", "exclude"
550
+ "include", "include_read_only", "exclude"
551
551
  ] = "include"
552
552
  """
553
553
  Whether to include the v1 API.
@@ -3,7 +3,7 @@ import os
3
3
  import signal
4
4
 
5
5
  from gunicorn.glogging import Logger as GunicornLogger
6
- from uvicorn.workers import UvicornWorker
6
+ from uvicorn_worker import UvicornWorker
7
7
 
8
8
  logger = logging.getLogger("uvicorn.error")
9
9
 
@@ -0,0 +1,110 @@
1
+ import os
2
+ import shutil
3
+ from io import BytesIO
4
+ from pathlib import Path
5
+ from typing import Iterator
6
+ from typing import TypeVar
7
+ from zipfile import ZIP_DEFLATED
8
+ from zipfile import ZipFile
9
+
10
+ T = TypeVar("T", str, BytesIO)
11
+ THRESHOLD_ZIP_FILE_SIZE_MB = 1.0
12
+
13
+
14
+ def _create_zip(folder: str, output: T) -> T:
15
+ """
16
+ Zip a folder into a zip-file or into a BytesIO.
17
+
18
+ Args:
19
+ folder: Folder to be zipped.
20
+ output: Either a string with the path of the zip file, or a BytesIO
21
+ object.
22
+
23
+ Returns:
24
+ Either the zip-file path string, or the modified BytesIO object.
25
+ """
26
+ if isinstance(output, str) and os.path.exists(output):
27
+ raise FileExistsError(f"Zip file '{output}' already exists")
28
+ if isinstance(output, BytesIO) and output.getbuffer().nbytes > 0:
29
+ raise ValueError("BytesIO is not empty")
30
+
31
+ with ZipFile(output, mode="w", compression=ZIP_DEFLATED) as zipfile:
32
+ for root, dirs, files in os.walk(folder):
33
+ for file in files:
34
+ file_path = os.path.join(root, file)
35
+ archive_path = os.path.relpath(file_path, folder)
36
+ zipfile.write(file_path, archive_path)
37
+ return output
38
+
39
+
40
+ def _zip_folder_to_byte_stream_iterator(folder: str) -> Iterator:
41
+ """
42
+ Returns byte stream with the zipped log folder of a job.
43
+
44
+ Args:
45
+ folder: the folder to zip
46
+ """
47
+ zip_file = Path(f"{folder}.zip")
48
+
49
+ if os.path.exists(zip_file):
50
+
51
+ def iterfile():
52
+ """
53
+ https://fastapi.tiangolo.com/advanced/custom-response/#using-streamingresponse-with-file-like-objects
54
+ """
55
+ with open(zip_file, mode="rb") as file_like:
56
+ yield from file_like
57
+
58
+ return iterfile()
59
+
60
+ else:
61
+
62
+ byte_stream = _create_zip(folder, output=BytesIO())
63
+ return iter([byte_stream.getvalue()])
64
+
65
+
66
+ def _folder_can_be_deleted(folder: str) -> bool:
67
+ """
68
+ Given the path of a folder as string, returns `False` if either:
69
+ - the related zip file `{folder}.zip` does already exists,
70
+ - the folder and the zip file have a different number of internal files,
71
+ - the zip file has a very small size.
72
+ Otherwise returns `True`.
73
+ """
74
+ # CHECK 1: zip file exists
75
+ zip_file = f"{folder}.zip"
76
+ if not os.path.exists(zip_file):
77
+ return False
78
+
79
+ # CHECK 2: folder and zip file have the same number of files
80
+ folder_files_count = sum(1 for f in Path(folder).rglob("*") if f.is_file())
81
+ with ZipFile(zip_file, "r") as zip_ref:
82
+ zip_files_count = len(zip_ref.namelist())
83
+ if folder_files_count != zip_files_count:
84
+ return False
85
+
86
+ # CHECK 3: zip file size is >= than `THRESHOLD_ZIP_FILE_SIZE_MB`
87
+ zip_size = os.path.getsize(zip_file)
88
+ if zip_size < THRESHOLD_ZIP_FILE_SIZE_MB * 1024 * 1024:
89
+ return False
90
+
91
+ return True
92
+
93
+
94
+ def _zip_folder_to_file_and_remove(folder: str) -> None:
95
+ """
96
+ Creates a ZIP archive of the specified folder and removes the original
97
+ folder (if it can be deleted).
98
+
99
+ This function performs the following steps:
100
+ 1. Creates a ZIP archive of the `folder` and names it with a temporary
101
+ suffix `_tmp.zip`.
102
+ 2. Renames the ZIP removing the suffix (this would possibly overwrite a
103
+ file with the same name already present).
104
+ 3. Checks if the folder can be safely deleted using the
105
+ `_folder_can_be_deleted` function. If so, deletes the original folder.
106
+ """
107
+ _create_zip(folder, f"{folder}_tmp.zip")
108
+ shutil.move(f"{folder}_tmp.zip", f"{folder}.zip")
109
+ if _folder_can_be_deleted(folder):
110
+ shutil.rmtree(folder)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.3.7
3
+ Version: 2.3.8
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -23,7 +23,7 @@ Requires-Dist: bcrypt (==4.0.1)
23
23
  Requires-Dist: cloudpickle (>=3.0.0,<3.1.0)
24
24
  Requires-Dist: clusterfutures (>=0.5,<0.6)
25
25
  Requires-Dist: fabric (>=3.2.2,<4.0.0)
26
- Requires-Dist: fastapi (>=0.110.0,<0.111.0)
26
+ Requires-Dist: fastapi (>=0.112.0,<0.113.0)
27
27
  Requires-Dist: fastapi-users[oauth] (>=12.1.0,<13.0.0)
28
28
  Requires-Dist: gunicorn (>=21.2,<23.0) ; extra == "gunicorn"
29
29
  Requires-Dist: packaging (>=23.2,<24.0)
@@ -35,6 +35,7 @@ Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
35
35
  Requires-Dist: sqlalchemy[asyncio] (>=2.0.23,<2.1)
36
36
  Requires-Dist: sqlmodel (>=0.0.21,<0.0.22)
37
37
  Requires-Dist: uvicorn (>=0.29.0,<0.30.0)
38
+ Requires-Dist: uvicorn-worker (>=0.2.0,<0.3.0)
38
39
  Project-URL: Changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
39
40
  Project-URL: Documentation, https://fractal-analytics-platform.github.io/fractal-server
40
41
  Project-URL: Repository, https://github.com/fractal-analytics-platform/fractal-server
@@ -48,10 +49,9 @@ Description-Content-Type: text/markdown
48
49
  [![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
49
50
  [![Benchmarks](https://img.shields.io/badge/Benchmarks-Done-blue)](https://htmlpreview.github.io/?https://github.com/fractal-analytics-platform/fractal-server/blob/benchmark-api/benchmarks/bench.html)
50
51
 
51
- Fractal is a framework to process high content imaging data at scale and
52
- prepare it for interactive visualization.
52
+ [Fractal](https://fractal-analytics-platform.github.io/) is a framework developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) to process bioimaging data at scale in the OME-Zarr format and prepare the images for interactive visualization.
53
53
 
54
- ![Fractal_Overview](https://fractal-analytics-platform.github.io/assets/fractal_overview.jpg)
54
+ ![Fractal_overview](https://github.com/user-attachments/assets/286122d9-08cf-48e8-996d-3cf53e0a81c6)
55
55
 
56
56
  This is the server component of the fractal analytics platform.
57
57
  Find more information about Fractal in general and the other repositories at
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=RnhDw1BSFncjEr_850CnTxRPd0CUQSmMtBKZ6CaNXg0,22
1
+ fractal_server/__init__.py,sha256=pPimcjq1LECZwUR8cl5IoaPogld-1apxfHo12UP1w0w,22
2
2
  fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -23,23 +23,23 @@ fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO
23
23
  fractal_server/app/models/v2/workflowtask.py,sha256=3jEkObsSnlI05Pur_dSsXYdJxRqPL60Z7tK5-EJLOks,1532
24
24
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- fractal_server/app/routes/admin/v1.py,sha256=ShmsUFtfyRqP84QScqmRDVrJFpbR4p-8baLxAkI3n1U,13926
27
- fractal_server/app/routes/admin/v2.py,sha256=JuG1qqVeQIgVJgPyqrB1053il22mGPGpKBiJi6zVsqQ,13687
26
+ fractal_server/app/routes/admin/v1.py,sha256=jOBESHtU-RmkxqK3yxsYnea0ecJaeoYEIBPUTU72cMk,13898
27
+ fractal_server/app/routes/admin/v2.py,sha256=A8TwuxlhhA1dJZqd5f3lkIYijXK_yKArbR4a5WM2IqM,13659
28
28
  fractal_server/app/routes/api/__init__.py,sha256=XlJUFd-0FossfyKyJti4dmwY6SMysQn1yiisMrNzgBE,615
29
29
  fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
30
- fractal_server/app/routes/api/v1/_aux_functions.py,sha256=CeaVrNVYs_lEbiJbu4uaTeeiajljeXfdq1iLkt5RoRo,12636
31
- fractal_server/app/routes/api/v1/dataset.py,sha256=HRE-8vPmVkeXf7WFYkI19mDtbY-iJZeJ7PmMiV0LMgY,16923
32
- fractal_server/app/routes/api/v1/job.py,sha256=217fGh7U37esib1JG8INpLhE0W88t9X0fFwCNVt2r_M,5313
33
- fractal_server/app/routes/api/v1/project.py,sha256=0DavnACBDr8-BHWGQ0YPfxVNJLsYmbuo-TeKJk1s3Hw,16436
34
- fractal_server/app/routes/api/v1/task.py,sha256=udbKnenzc-Q10elYCVB9JmOPWATraa9tZi0AaByvWo0,6129
35
- fractal_server/app/routes/api/v1/task_collection.py,sha256=82XBsJHlPiDPCbpLa-16ojKDpj2LYj9_jFSZt0t58bQ,8911
36
- fractal_server/app/routes/api/v1/workflow.py,sha256=7r9IoIevg_rvYCrerMOsIsUabSOQatxdPCfLdkP0dRs,10942
37
- fractal_server/app/routes/api/v1/workflowtask.py,sha256=qcHQlzlSFf_k8gtId-mA3tnyzgSR7i1m7pvR4R86blE,5582
30
+ fractal_server/app/routes/api/v1/_aux_functions.py,sha256=hDBo2nBW04k3NKYCvrwO0T1FwzJQuMi2QfLNDDEek90,13000
31
+ fractal_server/app/routes/api/v1/dataset.py,sha256=Yz3La_Vl6lUPRtAEMX730zNXal1D_8n1l0jQf8lWQSI,17167
32
+ fractal_server/app/routes/api/v1/job.py,sha256=Ulec7Ik_wJkkapcLnMB1jlDf1Mt3r5BMu9cKKaKs9ZI,5285
33
+ fractal_server/app/routes/api/v1/project.py,sha256=_zuaT2w6yTGP6YKjpY7UIlz7RxydJEaQO5PIaISUEfk,16289
34
+ fractal_server/app/routes/api/v1/task.py,sha256=-dWV-xrLm25dQa8DXncFK4v3jBb00Yi0s6MoH1EyzTQ,6277
35
+ fractal_server/app/routes/api/v1/task_collection.py,sha256=qWYaHmAg35coHBS5GOieMPWQfyjG_IrM5qA_wL4lv5k,8996
36
+ fractal_server/app/routes/api/v1/workflow.py,sha256=0mvooYkLipblhDEzuaYtABstidLDSms30GENtsc2LlY,11122
37
+ fractal_server/app/routes/api/v1/workflowtask.py,sha256=SogMLhpqUtW6QO008mOAIncfu9xP-7R0wC1GnQsY_4s,5729
38
38
  fractal_server/app/routes/api/v2/__init__.py,sha256=JrPWfKIJy9btRCP-zw2nZwLpSdBxEKY5emuCuJbqG0s,1813
39
39
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=yeA0650pBk43M5ZQGpVQ17nH5D97NIGY-3tNNLQIW1M,14901
40
40
  fractal_server/app/routes/api/v2/dataset.py,sha256=_HjKNP9XsMGoqyubGdF2ZyeW7vXC3VdK_0_TaUxgIF0,8248
41
41
  fractal_server/app/routes/api/v2/images.py,sha256=4r_HblPWyuKSZSJZfn8mbDaLv1ncwZU0gWdKneZcNG4,7894
42
- fractal_server/app/routes/api/v2/job.py,sha256=RkIj7ANK-nkxUvcG9K2r4dFdPnvGomx7jdB6U9bqOVQ,5202
42
+ fractal_server/app/routes/api/v2/job.py,sha256=EUoM-cg9761uY0W1B8B0ZhQYb_g6JwS8GdNepRub40k,5086
43
43
  fractal_server/app/routes/api/v2/project.py,sha256=U4TxD-J4TtQuu1D4BOhL1kTse5fCiNc3BwGH7bnlo38,6592
44
44
  fractal_server/app/routes/api/v2/status.py,sha256=osLexiMOSqmYcEV-41tlrwt9ofyFbtRm5HmPS5BU0t4,6394
45
45
  fractal_server/app/routes/api/v2/submit.py,sha256=Oqggq3GeBrUsE535tmw-JsRZEWa7jziU34fKdlj4QUE,8734
@@ -51,7 +51,7 @@ fractal_server/app/routes/api/v2/workflow.py,sha256=2GlcYNjpvCdjwC_Kn7y0UP16B3pO
51
51
  fractal_server/app/routes/api/v2/workflowtask.py,sha256=l_eQPniK1zR0u249bJj4c2hFlyDwsSJgsFR6hxJaOjs,8007
52
52
  fractal_server/app/routes/auth.py,sha256=Xv80iqdyfY3lyicYs2Y8B6zEDEnyUu_H6_6psYtv3R4,4885
53
53
  fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
- fractal_server/app/routes/aux/_job.py,sha256=HUItNm0SZFAYsyL1rXSjBre1-rXSe6x51qH9KAQWS1w,1361
54
+ fractal_server/app/routes/aux/_job.py,sha256=q-RCiW17yXnZKAC_0La52RLvhqhxuvbgQJ2MlGXOj8A,702
55
55
  fractal_server/app/routes/aux/_runner.py,sha256=FdCVla5DxGAZ__aB7Z8dEJzD_RIeh5tftjrPyqkr8N8,895
56
56
  fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
57
57
  fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -90,7 +90,7 @@ fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=KO9c694d318adoPQh9UG
90
90
  fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6pQNNx997bLIfLp0guF09t_O0ZYRXnbEGLktSAcKnic,5999
91
91
  fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
92
92
  fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
93
- fractal_server/app/runner/v2/__init__.py,sha256=nD4uFi-RGsN6JAmJNpV2dS603u8KqFuGwXZS8jIrf50,16917
93
+ fractal_server/app/runner/v2/__init__.py,sha256=OYmJRpYTGUCwnbLeIDTms7erZxOQMV8vq5tdJmL0pNo,17368
94
94
  fractal_server/app/runner/v2/_local/__init__.py,sha256=KTj14K6jH8fXGUi5P7u5_RqEE1zF4aXtgPxCKzw46iw,5971
95
95
  fractal_server/app/runner/v2/_local/_local_config.py,sha256=9oi209Dlp35ANfxb_DISqmMKKc6DPaMsmYVWbZLseME,3630
96
96
  fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=MucNOo8Er0F5ZIwH7CnTeXgnFMc6d3pKPkv563QNVi0,1630
@@ -139,9 +139,9 @@ fractal_server/app/schemas/v2/task_collection.py,sha256=8PG1bOqkfQqORMN0brWf6mHD
139
139
  fractal_server/app/schemas/v2/workflow.py,sha256=Zzx3e-qgkH8le0FUmAx9UrV5PWd7bj14PPXUh_zgZXM,1827
140
140
  fractal_server/app/schemas/v2/workflowtask.py,sha256=atVuVN4aXsVEOmSd-vyg-8_8OnPmqx-gT75rXcn_AlQ,6552
141
141
  fractal_server/app/security/__init__.py,sha256=2-QbwuR-nsuHM_uwKS_WzYvkhnuhO5jUv8UVROetyVk,11169
142
- fractal_server/config.py,sha256=-9x8grPTrM-rnztRAOlqHKuHCHWEg_-qnccU3kjuxbY,25058
142
+ fractal_server/config.py,sha256=R0VezSe2PEDjQjHEX2V29A1jMdoomdyECBjWNY15v_0,25049
143
143
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
144
- fractal_server/gunicorn_fractal.py,sha256=2AOkgxu-oQ-XB578_voT0VuhmAXFTmb0c-nYn1XLy_Q,1231
144
+ fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
145
145
  fractal_server/images/__init__.py,sha256=xO6jTLE4EZKO6cTDdJsBmK9cdeh9hFTaSbSuWgQg7y4,196
146
146
  fractal_server/images/models.py,sha256=9ipU5h4N6ogBChoB-2vHoqtL0TXOHCv6kRR-fER3mkM,4167
147
147
  fractal_server/images/tools.py,sha256=gxeniYy4Z-cp_ToK2LHPJUTVVUUrdpogYdcBUvBuLiY,2209
@@ -193,8 +193,9 @@ fractal_server/tasks/v2/templates/_5_pip_show.sh,sha256=GrJ19uHYQxANEy9JaeNJZVTq
193
193
  fractal_server/tasks/v2/utils.py,sha256=JOyCacb6MNvrwfLNTyLwcz8y79J29YuJeJ2MK5kqXRM,1657
194
194
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
195
195
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
196
- fractal_server-2.3.7.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
197
- fractal_server-2.3.7.dist-info/METADATA,sha256=dv6zpe0qPcyg3o-cHrxCJMiwNW22FFX4O_sFdsn3XWE,4425
198
- fractal_server-2.3.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
199
- fractal_server-2.3.7.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
200
- fractal_server-2.3.7.dist-info/RECORD,,
196
+ fractal_server/zip_tools.py,sha256=TbFiNhuP4e-o3a3Y-Aq-0likPNtBzTOWdxqA83nY2lE,3617
197
+ fractal_server-2.3.8.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
198
+ fractal_server-2.3.8.dist-info/METADATA,sha256=GNthWF58sDUb_M6943aMtnh0DuGxaEaEN5OnQT1vwQM,4628
199
+ fractal_server-2.3.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
200
+ fractal_server-2.3.8.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
201
+ fractal_server-2.3.8.dist-info/RECORD,,