fractal-server 2.9.1__py3-none-any.whl → 2.10.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/security.py +0 -1
  3. fractal_server/app/models/user_settings.py +1 -2
  4. fractal_server/app/routes/api/v1/project.py +7 -1
  5. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +2 -2
  6. fractal_server/app/routes/api/v2/submit.py +9 -5
  7. fractal_server/app/routes/api/v2/task_collection.py +110 -21
  8. fractal_server/app/routes/api/v2/workflow_import.py +4 -7
  9. fractal_server/app/runner/executors/slurm/_slurm_config.py +2 -2
  10. fractal_server/app/runner/executors/slurm/ssh/executor.py +4 -3
  11. fractal_server/app/runner/v1/_common.py +1 -2
  12. fractal_server/app/runner/v2/_local/__init__.py +0 -4
  13. fractal_server/app/runner/v2/_slurm_ssh/__init__.py +0 -6
  14. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +1 -7
  15. fractal_server/app/schemas/user_settings.py +0 -18
  16. fractal_server/app/schemas/v2/__init__.py +1 -0
  17. fractal_server/app/schemas/v2/task_collection.py +31 -35
  18. fractal_server/app/schemas/v2/task_group.py +0 -16
  19. fractal_server/app/user_settings.py +2 -2
  20. fractal_server/config.py +1 -2
  21. fractal_server/logger.py +2 -4
  22. fractal_server/migrations/versions/316140ff7ee1_remove_usersettings_cache_dir.py +36 -0
  23. fractal_server/ssh/_fabric.py +9 -11
  24. fractal_server/tasks/v2/local/collect.py +17 -22
  25. fractal_server/tasks/v2/ssh/_utils.py +1 -1
  26. fractal_server/tasks/v2/ssh/collect.py +26 -15
  27. fractal_server/tasks/v2/utils_database.py +6 -3
  28. fractal_server/tasks/v2/utils_python_interpreter.py +2 -2
  29. fractal_server/tasks/v2/utils_templates.py +3 -3
  30. {fractal_server-2.9.1.dist-info → fractal_server-2.10.0a0.dist-info}/METADATA +1 -1
  31. {fractal_server-2.9.1.dist-info → fractal_server-2.10.0a0.dist-info}/RECORD +34 -33
  32. {fractal_server-2.9.1.dist-info → fractal_server-2.10.0a0.dist-info}/LICENSE +0 -0
  33. {fractal_server-2.9.1.dist-info → fractal_server-2.10.0a0.dist-info}/WHEEL +0 -0
  34. {fractal_server-2.9.1.dist-info → fractal_server-2.10.0a0.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.9.1"
1
+ __VERSION__ = "2.10.0a0"
@@ -76,7 +76,6 @@ class UserOAuth(SQLModel, table=True):
76
76
  is_verified:
77
77
  slurm_user:
78
78
  slurm_accounts:
79
- cache_dir:
80
79
  username:
81
80
  oauth_accounts:
82
81
  """
@@ -20,7 +20,7 @@ class UserSettings(SQLModel, table=True):
20
20
  ssh_tasks_dir: Task-venvs base folder on `ssh_host`.
21
21
  ssh_jobs_dir: Jobs base folder on `ssh_host`.
22
22
  slurm_user: Local user, to be impersonated via `sudo -u`
23
- cache_dir: Folder where `slurm_user` can write.
23
+ project_dir: Folder where `slurm_user` can write.
24
24
  """
25
25
 
26
26
  __tablename__ = "user_settings"
@@ -35,5 +35,4 @@ class UserSettings(SQLModel, table=True):
35
35
  ssh_tasks_dir: Optional[str] = None
36
36
  ssh_jobs_dir: Optional[str] = None
37
37
  slurm_user: Optional[str] = None
38
- cache_dir: Optional[str] = None
39
38
  project_dir: Optional[str] = None
@@ -448,6 +448,12 @@ async def apply_workflow(
448
448
  await db.commit()
449
449
  await db.refresh(job)
450
450
 
451
+ cache_dir = (
452
+ f"{user_settings.project_dir}/.fractal_cache"
453
+ if user_settings.project_dir is not None
454
+ else None
455
+ )
456
+
451
457
  background_tasks.add_task(
452
458
  submit_workflow,
453
459
  workflow_id=workflow.id,
@@ -456,7 +462,7 @@ async def apply_workflow(
456
462
  job_id=job.id,
457
463
  worker_init=apply_workflow.worker_init,
458
464
  slurm_user=user_settings.slurm_user,
459
- user_cache_dir=user_settings.cache_dir,
465
+ user_cache_dir=cache_dir,
460
466
  )
461
467
  request.app.state.jobsV1.append(job.id)
462
468
  logger.info(
@@ -182,8 +182,8 @@ async def check_no_submitted_job(
182
182
  Find submitted jobs which include tasks from a given task group.
183
183
 
184
184
  Arguments:
185
- task_id_list: List of TaskV2 IDs
186
- db: Database session
185
+ task_group_id: ID of the `TaskGroupV2` object.
186
+ db: Asynchronous database session.
187
187
  """
188
188
  stm = (
189
189
  select(func.count(JobV2.id))
@@ -186,18 +186,22 @@ async def apply_workflow(
186
186
  f"_{timestamp_string}"
187
187
  )
188
188
 
189
+ cache_dir = (
190
+ Path(user_settings.project_dir) / ".fractal_cache"
191
+ if user_settings.project_dir is not None
192
+ else None
193
+ )
194
+
189
195
  # Define user-side job directory
190
196
  if FRACTAL_RUNNER_BACKEND == "local":
191
197
  WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
192
198
  elif FRACTAL_RUNNER_BACKEND == "local_experimental":
193
199
  WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
194
200
  elif FRACTAL_RUNNER_BACKEND == "slurm":
195
- WORKFLOW_DIR_REMOTE = (
196
- Path(user_settings.cache_dir) / f"{WORKFLOW_DIR_LOCAL.name}"
197
- )
201
+ WORKFLOW_DIR_REMOTE = cache_dir / WORKFLOW_DIR_LOCAL.name
198
202
  elif FRACTAL_RUNNER_BACKEND == "slurm_ssh":
199
203
  WORKFLOW_DIR_REMOTE = (
200
- Path(user_settings.ssh_jobs_dir) / f"{WORKFLOW_DIR_LOCAL.name}"
204
+ Path(user_settings.ssh_jobs_dir) / WORKFLOW_DIR_LOCAL.name
201
205
  )
202
206
 
203
207
  # Update job folders in the db
@@ -229,7 +233,7 @@ async def apply_workflow(
229
233
  user_settings=user_settings,
230
234
  worker_init=job.worker_init,
231
235
  slurm_user=user_settings.slurm_user,
232
- user_cache_dir=user_settings.cache_dir,
236
+ user_cache_dir=cache_dir.as_posix() if cache_dir else None,
233
237
  fractal_ssh=fractal_ssh,
234
238
  )
235
239
  request.app.state.jobsV2.append(job.id)
@@ -1,13 +1,19 @@
1
+ import json
1
2
  from pathlib import Path
2
3
  from typing import Optional
3
4
 
4
5
  from fastapi import APIRouter
5
6
  from fastapi import BackgroundTasks
6
7
  from fastapi import Depends
8
+ from fastapi import File
9
+ from fastapi import Form
7
10
  from fastapi import HTTPException
8
11
  from fastapi import Request
9
12
  from fastapi import Response
10
13
  from fastapi import status
14
+ from fastapi import UploadFile
15
+ from pydantic import BaseModel
16
+ from pydantic import root_validator
11
17
  from pydantic import ValidationError
12
18
  from sqlmodel import select
13
19
 
@@ -22,6 +28,7 @@ from ....schemas.v2 import TaskCollectPipV2
22
28
  from ....schemas.v2 import TaskGroupActivityStatusV2
23
29
  from ....schemas.v2 import TaskGroupActivityV2Read
24
30
  from ....schemas.v2 import TaskGroupCreateV2Strict
31
+ from ....schemas.v2 import WheelFile
25
32
  from ...aux.validate_user_settings import validate_user_settings
26
33
  from ._aux_functions_task_lifecycle import get_package_version_from_pypi
27
34
  from ._aux_functions_tasks import _get_valid_user_group_id
@@ -44,20 +51,99 @@ from fractal_server.tasks.v2.utils_python_interpreter import (
44
51
  get_python_interpreter_v2,
45
52
  )
46
53
 
54
+
47
55
  router = APIRouter()
48
56
 
49
57
  logger = set_logger(__name__)
50
58
 
51
59
 
60
+ class CollectionRequestData(BaseModel):
61
+ """
62
+ Validate form data _and_ wheel file.
63
+ """
64
+
65
+ task_collect: TaskCollectPipV2
66
+ file: Optional[UploadFile] = None
67
+ origin: TaskGroupV2OriginEnum
68
+
69
+ @root_validator(pre=True)
70
+ def validate_data(cls, values):
71
+ file = values.get("file")
72
+ package = values.get("task_collect").package
73
+ package_version = values.get("task_collect").package_version
74
+
75
+ if file is None:
76
+ if package is None:
77
+ raise ValueError(
78
+ "When no `file` is provided, `package` is required."
79
+ )
80
+ values["origin"] = TaskGroupV2OriginEnum.PYPI
81
+ else:
82
+ if package is not None:
83
+ raise ValueError(
84
+ "Cannot set `package` when `file` is provided "
85
+ f"(given package='{package}')."
86
+ )
87
+ if package_version is not None:
88
+ raise ValueError(
89
+ "Cannot set `package_version` when `file` is "
90
+ f"provided (given package_version='{package_version}')."
91
+ )
92
+ values["origin"] = TaskGroupV2OriginEnum.WHEELFILE
93
+ return values
94
+
95
+
96
+ def parse_request_data(
97
+ package: Optional[str] = Form(None),
98
+ package_version: Optional[str] = Form(None),
99
+ package_extras: Optional[str] = Form(None),
100
+ python_version: Optional[str] = Form(None),
101
+ pinned_package_versions: Optional[str] = Form(None),
102
+ file: Optional[UploadFile] = File(None),
103
+ ) -> CollectionRequestData:
104
+ """
105
+ Expand the parsing/validation of `parse_form_data`, based on `file`.
106
+ """
107
+
108
+ try:
109
+ # Convert dict_pinned_pkg from a JSON string into a Python dictionary
110
+ dict_pinned_pkg = (
111
+ json.loads(pinned_package_versions)
112
+ if pinned_package_versions
113
+ else None
114
+ )
115
+ # Validate and coerce form data
116
+ task_collect_pip = TaskCollectPipV2(
117
+ package=package,
118
+ package_version=package_version,
119
+ package_extras=package_extras,
120
+ python_version=python_version,
121
+ pinned_package_versions=dict_pinned_pkg,
122
+ )
123
+
124
+ data = CollectionRequestData(
125
+ task_collect=task_collect_pip,
126
+ file=file,
127
+ )
128
+
129
+ except (ValidationError, json.JSONDecodeError) as e:
130
+ raise HTTPException(
131
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
132
+ detail=f"Invalid request-body\n{str(e)}",
133
+ )
134
+
135
+ return data
136
+
137
+
52
138
  @router.post(
53
139
  "/collect/pip/",
54
140
  response_model=TaskGroupActivityV2Read,
55
141
  )
56
142
  async def collect_tasks_pip(
57
- task_collect: TaskCollectPipV2,
58
- background_tasks: BackgroundTasks,
59
- response: Response,
60
143
  request: Request,
144
+ response: Response,
145
+ background_tasks: BackgroundTasks,
146
+ request_data: CollectionRequestData = Depends(parse_request_data),
61
147
  private: bool = False,
62
148
  user_group_id: Optional[int] = None,
63
149
  user: UserOAuth = Depends(current_active_verified_user),
@@ -66,12 +152,17 @@ async def collect_tasks_pip(
66
152
  """
67
153
  Task-collection endpoint
68
154
  """
69
-
70
155
  # Get settings
71
156
  settings = Inject(get_settings)
72
157
 
158
+ # Get some validated request data
159
+ task_collect = request_data.task_collect
160
+
73
161
  # Initialize task-group attributes
74
- task_group_attrs = dict(user_id=user.id)
162
+ task_group_attrs = dict(
163
+ user_id=user.id,
164
+ origin=request_data.origin,
165
+ )
75
166
 
76
167
  # Set/check python version
77
168
  if task_collect.python_version is None:
@@ -103,12 +194,19 @@ async def collect_tasks_pip(
103
194
  "pinned_package_versions"
104
195
  ] = task_collect.pinned_package_versions
105
196
 
197
+ # Initialize wheel_file_content as None
198
+ wheel_file = None
199
+
106
200
  # Set pkg_name, version, origin and wheel_path
107
- if task_collect.package.endswith(".whl"):
201
+ if request_data.origin == TaskGroupV2OriginEnum.WHEELFILE:
108
202
  try:
109
- task_group_attrs["wheel_path"] = task_collect.package
110
- wheel_filename = Path(task_group_attrs["wheel_path"]).name
203
+ wheel_filename = request_data.file.filename
111
204
  wheel_info = _parse_wheel_filename(wheel_filename)
205
+ wheel_file_content = await request_data.file.read()
206
+ wheel_file = WheelFile(
207
+ filename=wheel_filename,
208
+ contents=wheel_file_content,
209
+ )
112
210
  except ValueError as e:
113
211
  raise HTTPException(
114
212
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -121,11 +219,9 @@ async def collect_tasks_pip(
121
219
  wheel_info["distribution"]
122
220
  )
123
221
  task_group_attrs["version"] = wheel_info["version"]
124
- task_group_attrs["origin"] = TaskGroupV2OriginEnum.WHEELFILE
125
- else:
222
+ elif request_data.origin == TaskGroupV2OriginEnum.PYPI:
126
223
  pkg_name = task_collect.package
127
224
  task_group_attrs["pkg_name"] = normalize_package_name(pkg_name)
128
- task_group_attrs["origin"] = TaskGroupV2OriginEnum.PYPI
129
225
  latest_version = await get_package_version_from_pypi(
130
226
  task_collect.package,
131
227
  task_collect.package_version,
@@ -202,7 +298,6 @@ async def collect_tasks_pip(
202
298
  # On-disk checks
203
299
 
204
300
  if settings.FRACTAL_RUNNER_BACKEND != "slurm_ssh":
205
-
206
301
  # Verify that folder does not exist (for local collection)
207
302
  if Path(task_group_path).exists():
208
303
  raise HTTPException(
@@ -210,15 +305,6 @@ async def collect_tasks_pip(
210
305
  detail=f"{task_group_path} already exists.",
211
306
  )
212
307
 
213
- # Verify that wheel file exists
214
- wheel_path = task_group_attrs.get("wheel_path", None)
215
- if wheel_path is not None:
216
- if not Path(wheel_path).exists():
217
- raise HTTPException(
218
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
219
- detail=f"No such file: {wheel_path}.",
220
- )
221
-
222
308
  # Create TaskGroupV2 object
223
309
  task_group = TaskGroupV2(**task_group_attrs)
224
310
  db.add(task_group)
@@ -259,14 +345,17 @@ async def collect_tasks_pip(
259
345
  task_group_activity_id=task_group_activity.id,
260
346
  fractal_ssh=fractal_ssh,
261
347
  tasks_base_dir=user_settings.ssh_tasks_dir,
348
+ wheel_file=wheel_file,
262
349
  )
263
350
 
264
351
  else:
265
352
  # Local task collection
353
+
266
354
  background_tasks.add_task(
267
355
  collect_local,
268
356
  task_group_id=task_group.id,
269
357
  task_group_activity_id=task_group_activity.id,
358
+ wheel_file=wheel_file,
270
359
  )
271
360
  logger.debug(
272
361
  "Task-collection endpoint: start background collection "
@@ -68,11 +68,8 @@ async def _get_task_by_source(
68
68
  Find task with a given source.
69
69
 
70
70
  Args:
71
- task_import: Info on task to be imported.
72
- user_id: ID of current user.
73
- default_group_id: ID of default user group.
74
- task_group_list: Current list of valid task groups.
75
- db: Asynchronous db session
71
+ source: `source` of the task to be imported.
72
+ task_groups_list: Current list of valid task groups.
76
73
 
77
74
  Return:
78
75
  `id` of the matching task, or `None`.
@@ -172,10 +169,10 @@ async def _get_task_by_taskimport(
172
169
 
173
170
  Args:
174
171
  task_import: Info on task to be imported.
172
+ task_groups_list: Current list of valid task groups.
175
173
  user_id: ID of current user.
176
174
  default_group_id: ID of default user group.
177
- task_group_list: Current list of valid task groups.
178
- db: Asynchronous db session
175
+ db: Asynchronous database session.
179
176
 
180
177
  Return:
181
178
  `id` of the matching task, or `None`.
@@ -327,7 +327,7 @@ class SlurmConfig(BaseModel, extra=Extra.forbid):
327
327
  script.
328
328
 
329
329
  Arguments:
330
- user_cache_dir:
330
+ remote_export_dir:
331
331
  Base directory for exports defined in
332
332
  `self.user_local_exports`.
333
333
  """
@@ -378,7 +378,7 @@ class SlurmConfig(BaseModel, extra=Extra.forbid):
378
378
  if self.user_local_exports:
379
379
  if remote_export_dir is None:
380
380
  raise ValueError(
381
- f"user_cache_dir=None but {self.user_local_exports=}"
381
+ f"remote_export_dir=None but {self.user_local_exports=}"
382
382
  )
383
383
  for key, value in self.user_local_exports.items():
384
384
  tmp_value = str(Path(remote_export_dir) / value)
@@ -917,7 +917,7 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
917
917
  thread via `fut.set_exception(...)`.
918
918
 
919
919
  Arguments:
920
- jobid: ID of the SLURM job
920
+ job_ids: IDs of the SLURM jobs to handle.
921
921
  """
922
922
  # Handle all uncaught exceptions in this broad try/except block
923
923
  try:
@@ -1109,8 +1109,9 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1109
1109
  Fetch a remote folder via tar+sftp+tar
1110
1110
 
1111
1111
  Arguments:
1112
- job:
1113
- `SlurmJob` object (needed for its prefixes-related attributes).
1112
+ jobs:
1113
+ List of `SlurmJob` object (needed for their prefix-related
1114
+ attributes).
1114
1115
  """
1115
1116
 
1116
1117
  # Check that the subfolder is unique
@@ -524,8 +524,7 @@ def execute_tasks(
524
524
 
525
525
  **Note:** At the end of each task, write current metadata to
526
526
  `workflow_dir_local / METADATA_FILENAME`, so that they can be read as part
527
- of the [`get_job`
528
- endpoint](../../api/v1/job/#fractal_server.app.routes.api.v1.job.get_job).
527
+ of the `get_job` endpoint.
529
528
 
530
529
  Arguments:
531
530
  executor:
@@ -44,10 +44,6 @@ def _process_workflow(
44
44
  Internal processing routine
45
45
 
46
46
  Schedules the workflow using a `FractalThreadPoolExecutor`.
47
-
48
- Cf.
49
- [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
50
- for the call signature.
51
47
  """
52
48
 
53
49
  with FractalThreadPoolExecutor() as executor:
@@ -55,9 +55,6 @@ def _process_workflow(
55
55
  workflow working dir and user to impersonate. It then schedules the
56
56
  workflow tasks and returns the new dataset attributes
57
57
 
58
- Cf.
59
- [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
60
-
61
58
  Returns:
62
59
  new_dataset_attributes:
63
60
  """
@@ -115,9 +112,6 @@ async def process_workflow(
115
112
  ) -> dict:
116
113
  """
117
114
  Process workflow (SLURM backend public interface)
118
-
119
- Cf.
120
- [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
121
115
  """
122
116
 
123
117
  # Set values of first_task_index and last_task_index
@@ -51,9 +51,6 @@ def _process_workflow(
51
51
  workflow working dir and user to impersonate. It then schedules the
52
52
  workflow tasks and returns the new dataset attributes
53
53
 
54
- Cf.
55
- [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
56
-
57
54
  Returns:
58
55
  new_dataset_attributes:
59
56
  """
@@ -106,10 +103,7 @@ async def process_workflow(
106
103
  worker_init: Optional[str] = None,
107
104
  ) -> dict:
108
105
  """
109
- Process workflow (SLURM backend public interface)
110
-
111
- Cf.
112
- [process_workflow][fractal_server.app.runner.v2._local.process_workflow]
106
+ Process workflow (SLURM backend public interface).
113
107
  """
114
108
 
115
109
  # Set values of first_task_index and last_task_index
@@ -31,14 +31,12 @@ class UserSettingsRead(BaseModel):
31
31
  ssh_jobs_dir: Optional[str] = None
32
32
  slurm_user: Optional[str] = None
33
33
  slurm_accounts: list[str]
34
- cache_dir: Optional[str] = None
35
34
  project_dir: Optional[str] = None
36
35
 
37
36
 
38
37
  class UserSettingsReadStrict(BaseModel):
39
38
  slurm_user: Optional[str] = None
40
39
  slurm_accounts: list[str]
41
- cache_dir: Optional[str] = None
42
40
  ssh_username: Optional[str] = None
43
41
  project_dir: Optional[str] = None
44
42
 
@@ -55,7 +53,6 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
55
53
  ssh_jobs_dir: Optional[str] = None
56
54
  slurm_user: Optional[str] = None
57
55
  slurm_accounts: Optional[list[StrictStr]] = None
58
- cache_dir: Optional[str] = None
59
56
  project_dir: Optional[str] = None
60
57
 
61
58
  _ssh_host = validator("ssh_host", allow_reuse=True)(
@@ -87,13 +84,6 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
87
84
  value[i] = valstr(f"slurm_accounts[{i}]")(item)
88
85
  return val_unique_list("slurm_accounts")(value)
89
86
 
90
- @validator("cache_dir")
91
- def cache_dir_validator(cls, value):
92
- if value is None:
93
- return None
94
- validate_cmd(value)
95
- return val_absolute_path("cache_dir")(value)
96
-
97
87
  @validator("project_dir")
98
88
  def project_dir_validator(cls, value):
99
89
  if value is None:
@@ -104,15 +94,7 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
104
94
 
105
95
  class UserSettingsUpdateStrict(BaseModel, extra=Extra.forbid):
106
96
  slurm_accounts: Optional[list[StrictStr]] = None
107
- cache_dir: Optional[str] = None
108
97
 
109
98
  _slurm_accounts = validator("slurm_accounts", allow_reuse=True)(
110
99
  val_unique_list("slurm_accounts")
111
100
  )
112
-
113
- @validator("cache_dir")
114
- def cache_dir_validator(cls, value):
115
- if value is None:
116
- return value
117
- validate_cmd(value)
118
- return val_absolute_path("cache_dir")(value)
@@ -25,6 +25,7 @@ from .task import TaskReadV2 # noqa F401
25
25
  from .task import TaskUpdateV2 # noqa F401
26
26
  from .task_collection import TaskCollectCustomV2 # noqa F401
27
27
  from .task_collection import TaskCollectPipV2 # noqa F401
28
+ from .task_collection import WheelFile # noqa F401
28
29
  from .task_group import TaskGroupActivityActionV2 # noqa F401
29
30
  from .task_group import TaskGroupActivityStatusV2 # noqa F401
30
31
  from .task_group import TaskGroupActivityV2Read # noqa F401
@@ -12,6 +12,15 @@ from fractal_server.app.schemas.v2 import ManifestV2
12
12
  from fractal_server.string_tools import validate_cmd
13
13
 
14
14
 
15
+ class WheelFile(BaseModel):
16
+ """
17
+ Model for data sent from the endpoint to the background task.
18
+ """
19
+
20
+ filename: str
21
+ contents: bytes
22
+
23
+
15
24
  class TaskCollectPipV2(BaseModel, extra=Extra.forbid):
16
25
  """
17
26
  TaskCollectPipV2 class
@@ -22,14 +31,11 @@ class TaskCollectPipV2(BaseModel, extra=Extra.forbid):
22
31
 
23
32
  Two cases are supported:
24
33
 
25
- 1. `package` is the path of a local wheel file;
26
- 2. `package` is the name of a package that can be installed via `pip`.
27
-
34
+ 1. `package` is the name of a package that can be installed via `pip`.
35
+ 1. `package=None`, and a wheel file is uploaded within the API request.
28
36
 
29
37
  Attributes:
30
- package:
31
- The name of a `pip`-installable package, or the path to a local
32
- wheel file.
38
+ package: The name of a `pip`-installable package, or `None`.
33
39
  package_version: Version of the package
34
40
  package_extras: Package extras to include in the `pip install` command
35
41
  python_version: Python version to install and run the package tasks
@@ -39,12 +45,28 @@ class TaskCollectPipV2(BaseModel, extra=Extra.forbid):
39
45
 
40
46
  """
41
47
 
42
- package: str
48
+ package: Optional[str] = None
43
49
  package_version: Optional[str] = None
44
50
  package_extras: Optional[str] = None
45
51
  python_version: Optional[Literal["3.9", "3.10", "3.11", "3.12"]] = None
46
52
  pinned_package_versions: Optional[dict[str, str]] = None
47
53
 
54
+ @validator("package")
55
+ def package_validator(cls, value: Optional[str]) -> Optional[str]:
56
+ if value is None:
57
+ return value
58
+ value = valstr("package")(value)
59
+ validate_cmd(value, attribute_name="package")
60
+ return value
61
+
62
+ @validator("package_version")
63
+ def package_version_validator(cls, value: Optional[str]) -> Optional[str]:
64
+ if value is None:
65
+ return value
66
+ value = valstr("package_version")(value)
67
+ validate_cmd(value, attribute_name="package_version")
68
+ return value
69
+
48
70
  @validator("pinned_package_versions")
49
71
  def pinned_package_versions_validator(cls, value):
50
72
  if value is None:
@@ -65,36 +87,10 @@ class TaskCollectPipV2(BaseModel, extra=Extra.forbid):
65
87
  validate_cmd(version)
66
88
  return value
67
89
 
68
- @validator("package")
69
- def package_validator(cls, value):
70
- value = valstr("package")(value)
71
- if "/" in value or value.endswith(".whl"):
72
- if not value.endswith(".whl"):
73
- raise ValueError(
74
- "Local-package path must be a wheel file "
75
- f"(given {value})."
76
- )
77
- if not Path(value).is_absolute():
78
- raise ValueError(
79
- f"Local-package path must be absolute: (given {value})."
80
- )
81
- validate_cmd(value, attribute_name="package")
82
- return value
83
-
84
- @validator("package_version")
85
- def package_version_validator(
86
- cls, v: Optional[str], values
87
- ) -> Optional[str]:
88
- v = valstr("package_version")(v)
89
- if values["package"].endswith(".whl"):
90
- raise ValueError(
91
- "Cannot provide package version when package is a wheel file."
92
- )
93
- validate_cmd(v, attribute_name="package_version")
94
- return v
95
-
96
90
  @validator("package_extras")
97
91
  def package_extras_validator(cls, value: Optional[str]) -> Optional[str]:
92
+ if value is None:
93
+ return value
98
94
  value = valstr("package_extras")(value)
99
95
  validate_cmd(value, attribute_name="package_extras")
100
96
  return value
@@ -5,7 +5,6 @@ from typing import Optional
5
5
  from pydantic import BaseModel
6
6
  from pydantic import Extra
7
7
  from pydantic import Field
8
- from pydantic import root_validator
9
8
  from pydantic import validator
10
9
 
11
10
  from .._validators import val_absolute_path
@@ -77,21 +76,6 @@ class TaskGroupCreateV2Strict(TaskGroupCreateV2):
77
76
  version: str
78
77
  python_version: str
79
78
 
80
- @root_validator
81
- def check_wheel_file(cls, values):
82
- origin = values.get("origin")
83
- wheel_path = values.get("wheel_path")
84
- bad_condition_1 = (
85
- origin == TaskGroupV2OriginEnum.WHEELFILE and wheel_path is None
86
- )
87
- bad_condition_2 = (
88
- origin != TaskGroupV2OriginEnum.WHEELFILE
89
- and wheel_path is not None
90
- )
91
- if bad_condition_1 or bad_condition_2:
92
- raise ValueError(f"Cannot have {origin=} and {wheel_path=}.")
93
- return values
94
-
95
79
 
96
80
  class TaskGroupReadV2(BaseModel):
97
81
  id: int
@@ -32,11 +32,11 @@ class SlurmSudoUserSettings(BaseModel):
32
32
 
33
33
  Attributes:
34
34
  slurm_user: User to be impersonated via `sudo -u`.
35
- cache_dir: Folder where `slurm_user` can write.
35
+ project_dir: Folder where `slurm_user` can write.
36
36
  slurm_accounts:
37
37
  List of SLURM accounts, to be used upon Fractal job submission.
38
38
  """
39
39
 
40
40
  slurm_user: str
41
- cache_dir: str
41
+ project_dir: str
42
42
  slurm_accounts: list[str]
fractal_server/config.py CHANGED
@@ -302,8 +302,7 @@ class Settings(BaseSettings):
302
302
  """
303
303
  Logging-level threshold for logging
304
304
 
305
- Only logs of with this level (or higher) will appear in the console logs;
306
- see details [here](../internals/logs/).
305
+ Only logs of with this level (or higher) will appear in the console logs.
307
306
  """
308
307
 
309
308
  FRACTAL_LOCAL_CONFIG_FILE: Optional[Path]
fractal_server/logger.py CHANGED
@@ -66,10 +66,8 @@ def set_logger(
66
66
 
67
67
  * The attribute `Logger.propagate` set to `False`;
68
68
  * One and only one `logging.StreamHandler` handler, with severity level set
69
- to
70
- [`FRACTAL_LOGGING_LEVEL`](../../../../configuration/#fractal_server.config.Settings.FRACTAL_LOGGING_LEVEL)
71
- and formatter set as in the `logger.LOG_FORMAT` variable from the current
72
- module;
69
+ to `FRACTAL_LOGGING_LEVEL` and formatter set as in the `logger.LOG_FORMAT`
70
+ variable from the current module;
73
71
  * One or many `logging.FileHandler` handlers, including one pointint to
74
72
  `log_file_path` (if set); all these handlers have severity level set to
75
73
  `logging.DEBUG`.
@@ -0,0 +1,36 @@
1
+ """Remove UserSettings.cache_dir
2
+
3
+ Revision ID: 316140ff7ee1
4
+ Revises: d256a7379ab8
5
+ Create Date: 2024-12-03 10:15:53.255958
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "316140ff7ee1"
14
+ down_revision = "d256a7379ab8"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("user_settings", schema=None) as batch_op:
22
+ batch_op.drop_column("cache_dir")
23
+
24
+ # ### end Alembic commands ###
25
+
26
+
27
+ def downgrade() -> None:
28
+ # ### commands auto generated by Alembic - please adjust! ###
29
+ with op.batch_alter_table("user_settings", schema=None) as batch_op:
30
+ batch_op.add_column(
31
+ sa.Column(
32
+ "cache_dir", sa.VARCHAR(), autoincrement=False, nullable=True
33
+ )
34
+ )
35
+
36
+ # ### end Alembic commands ###
@@ -366,10 +366,9 @@ class FractalSSH(object):
366
366
  Transfer a file via SSH
367
367
 
368
368
  Args:
369
- local: Local path to file
370
- remote: Target path on remote host
371
- fractal_ssh: FractalSSH connection object with custom lock
372
- logger_name: Name of the logger
369
+ local: Local path to file.
370
+ remote: Target path on remote host.
371
+ lock_timeout: Timeout for lock acquisition (overrides default).
373
372
  """
374
373
  try:
375
374
  self.logger.info(
@@ -407,10 +406,9 @@ class FractalSSH(object):
407
406
  Transfer a file via SSH
408
407
 
409
408
  Args:
410
- local: Local path to file
411
- remote: Target path on remote host
412
- logger_name: Name of the logger
413
- lock_timeout:
409
+ local: Local path to file.
410
+ remote: Target path on remote host.
411
+ lock_timeout: Timeout for lock acquisition (overrides default).
414
412
  """
415
413
  try:
416
414
  prefix = "[fetch_file] "
@@ -499,9 +497,9 @@ class FractalSSH(object):
499
497
  Open a remote file via SFTP and write it.
500
498
 
501
499
  Args:
502
- path: Absolute path
503
- contents: File contents
504
- lock_timeout:
500
+ path: Absolute path of remote file.
501
+ content: Contents to be written to file.
502
+ lock_timeout: Timeout for lock acquisition (overrides default).
505
503
  """
506
504
  self.logger.info(f"START writing to remote file {path}.")
507
505
  actual_lock_timeout = self.default_lock_timeout
@@ -4,6 +4,7 @@ import shutil
4
4
  import time
5
5
  from pathlib import Path
6
6
  from tempfile import TemporaryDirectory
7
+ from typing import Optional
7
8
 
8
9
  from ..utils_database import create_db_tasks_and_update_task_group
9
10
  from ._utils import _customize_and_run_template
@@ -12,8 +13,8 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2
12
13
  from fractal_server.app.models.v2 import TaskGroupV2
13
14
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
14
15
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
16
+ from fractal_server.app.schemas.v2 import WheelFile
15
17
  from fractal_server.app.schemas.v2.manifest import ManifestV2
16
- from fractal_server.logger import get_logger
17
18
  from fractal_server.logger import set_logger
18
19
  from fractal_server.tasks.utils import get_log_path
19
20
  from fractal_server.tasks.v2.local._utils import check_task_files_exist
@@ -35,30 +36,19 @@ from fractal_server.utils import get_timestamp
35
36
  LOGGER_NAME = __name__
36
37
 
37
38
 
38
- def _copy_wheel_file_local(task_group: TaskGroupV2) -> str:
39
- logger = get_logger(LOGGER_NAME)
40
- source = task_group.wheel_path
41
- dest = (
42
- Path(task_group.path) / Path(task_group.wheel_path).name
43
- ).as_posix()
44
- logger.debug(f"[_copy_wheel_file] START {source=} {dest=}")
45
- shutil.copy(task_group.wheel_path, task_group.path)
46
- logger.debug(f"[_copy_wheel_file] END {source=} {dest=}")
47
- return dest
48
-
49
-
50
39
  def collect_local(
51
40
  *,
52
41
  task_group_activity_id: int,
53
42
  task_group_id: int,
43
+ wheel_file: Optional[WheelFile] = None,
54
44
  ) -> None:
55
45
  """
56
46
  Collect a task package.
57
47
 
58
- This function is run as a background task, therefore exceptions must be
48
+ This function runs as a background task, therefore exceptions must be
59
49
  handled.
60
50
 
61
- NOTE: by making this function sync, it runs within a thread - due to
51
+ NOTE: since this function is sync, it runs within a thread - due to
62
52
  starlette/fastapi handling of background tasks (see
63
53
  https://github.com/encode/starlette/blob/master/starlette/background.py).
64
54
 
@@ -66,6 +56,7 @@ def collect_local(
66
56
  Arguments:
67
57
  task_group_id:
68
58
  task_group_activity_id:
59
+ wheel_file:
69
60
  """
70
61
 
71
62
  with TemporaryDirectory() as tmpdir:
@@ -76,7 +67,6 @@ def collect_local(
76
67
  )
77
68
 
78
69
  with next(get_sync_db()) as db:
79
-
80
70
  # Get main objects from db
81
71
  activity = db.get(TaskGroupActivityV2, task_group_activity_id)
82
72
  task_group = db.get(TaskGroupV2, task_group_id)
@@ -109,17 +99,22 @@ def collect_local(
109
99
  return
110
100
 
111
101
  try:
112
-
113
102
  # Create task_group.path folder
114
103
  Path(task_group.path).mkdir(parents=True)
115
104
  logger.debug(f"Created {task_group.path}")
116
105
 
117
- # Copy wheel file into task group path
118
- if task_group.wheel_path:
119
- new_wheel_path = _copy_wheel_file_local(
120
- task_group=task_group
106
+ # Write wheel file and set task_group.wheel_path
107
+ if wheel_file is not None:
108
+
109
+ wheel_path = (
110
+ Path(task_group.path) / wheel_file.filename
111
+ ).as_posix()
112
+ logger.debug(
113
+ f"Write wheel-file contents into {wheel_path}"
121
114
  )
122
- task_group.wheel_path = new_wheel_path
115
+ with open(wheel_path, "wb") as f:
116
+ f.write(wheel_file.contents)
117
+ task_group.wheel_path = wheel_path
123
118
  task_group = add_commit_refresh(obj=task_group, db=db)
124
119
 
125
120
  # Prepare replacements for templates
@@ -24,7 +24,7 @@ def _customize_and_run_template(
24
24
  Args:
25
25
  template_filename: Filename of the template file (ends with ".sh").
26
26
  replacements: Dictionary of replacements.
27
- script_dir: Local folder where the script will be placed.
27
+ script_dir_local: Local folder where the script will be placed.
28
28
  prefix: Prefix for the script filename.
29
29
  fractal_ssh: FractalSSH object
30
30
  script_dir_remote: Remote scripts directory
@@ -2,6 +2,7 @@ import logging
2
2
  import time
3
3
  from pathlib import Path
4
4
  from tempfile import TemporaryDirectory
5
+ from typing import Optional
5
6
 
6
7
  from ..utils_background import _prepare_tasks_metadata
7
8
  from ..utils_background import fail_and_cleanup
@@ -11,10 +12,10 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2
11
12
  from fractal_server.app.models.v2 import TaskGroupV2
12
13
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
13
14
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
15
+ from fractal_server.app.schemas.v2 import WheelFile
14
16
  from fractal_server.app.schemas.v2.manifest import ManifestV2
15
17
  from fractal_server.logger import set_logger
16
18
  from fractal_server.ssh._fabric import FractalSSH
17
- from fractal_server.tasks.v2.ssh._utils import _copy_wheel_file_ssh
18
19
  from fractal_server.tasks.v2.ssh._utils import _customize_and_run_template
19
20
  from fractal_server.tasks.v2.utils_background import add_commit_refresh
20
21
  from fractal_server.tasks.v2.utils_background import get_current_log
@@ -38,14 +39,15 @@ def collect_ssh(
38
39
  task_group_activity_id: int,
39
40
  fractal_ssh: FractalSSH,
40
41
  tasks_base_dir: str,
42
+ wheel_file: Optional[WheelFile] = None,
41
43
  ) -> None:
42
44
  """
43
45
  Collect a task package over SSH
44
46
 
45
- This function is run as a background task, therefore exceptions must be
47
+ This function runs as a background task, therefore exceptions must be
46
48
  handled.
47
49
 
48
- NOTE: by making this function sync, it runs within a thread - due to
50
+ NOTE: since this function is sync, it runs within a thread - due to
49
51
  starlette/fastapi handling of background tasks (see
50
52
  https://github.com/encode/starlette/blob/master/starlette/background.py).
51
53
 
@@ -57,6 +59,7 @@ def collect_ssh(
57
59
  tasks_base_dir:
58
60
  Only used as a `safe_root` in `remove_dir`, and typically set to
59
61
  `user_settings.ssh_tasks_dir`.
62
+ wheel_file:
60
63
  """
61
64
 
62
65
  # Work within a temporary folder, where also logs will be placed
@@ -116,27 +119,36 @@ def collect_ssh(
116
119
  return
117
120
 
118
121
  try:
119
- script_dir_remote = (
120
- Path(task_group.path) / SCRIPTS_SUBFOLDER
121
- ).as_posix()
122
122
  # Create remote `task_group.path` and `script_dir_remote`
123
123
  # folders (note that because of `parents=True` we are in
124
124
  # the `no error if existing, make parent directories as
125
125
  # needed` scenario for `mkdir`)
126
+ script_dir_remote = (
127
+ Path(task_group.path) / SCRIPTS_SUBFOLDER
128
+ ).as_posix()
126
129
  fractal_ssh.mkdir(folder=task_group.path, parents=True)
127
130
  fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
128
131
 
129
- # Copy wheel file into task group path
130
- if task_group.wheel_path:
131
- new_wheel_path = _copy_wheel_file_ssh(
132
- task_group=task_group,
133
- fractal_ssh=fractal_ssh,
134
- logger_name=LOGGER_NAME,
132
+ # Write wheel file locally and send it to remote path,
133
+ # and set task_group.wheel_path
134
+ if wheel_file is not None:
135
+ wheel_filename = wheel_file.filename
136
+ wheel_path = (
137
+ Path(task_group.path) / wheel_filename
138
+ ).as_posix()
139
+ tmp_wheel_path = (Path(tmpdir) / wheel_filename).as_posix()
140
+ logger.debug(
141
+ f"Write wheel-file contents into {tmp_wheel_path}"
135
142
  )
136
- task_group.wheel_path = new_wheel_path
143
+ with open(tmp_wheel_path, "wb") as f:
144
+ f.write(wheel_file.contents)
145
+ fractal_ssh.send_file(
146
+ local=tmp_wheel_path,
147
+ remote=wheel_path,
148
+ )
149
+ task_group.wheel_path = wheel_path
137
150
  task_group = add_commit_refresh(obj=task_group, db=db)
138
151
 
139
- # Prepare replacements for templates
140
152
  replacements = get_collection_replacements(
141
153
  task_group=task_group,
142
154
  python_bin=get_python_interpreter_v2(
@@ -173,7 +185,6 @@ def collect_ssh(
173
185
  )
174
186
  activity.log = get_current_log(log_file_path)
175
187
  activity = add_commit_refresh(obj=activity, db=db)
176
-
177
188
  # Run script 2
178
189
  stdout = _customize_and_run_template(
179
190
  template_filename="2_pip_install.sh",
@@ -24,9 +24,12 @@ def create_db_tasks_and_update_task_group(
24
24
  Create a `TaskGroupV2` with N `TaskV2`s, and insert them into the database.
25
25
 
26
26
  Arguments:
27
- task_group: ID of an existing TaskGroupV2 object.
28
- task_list: A list of TaskCreateV2 objects to be inserted into the db.
29
- db: A synchronous database session
27
+ task_group_id: ID of an existing `TaskGroupV2` object.
28
+ task_list: List of `TaskCreateV2` objects to be inserted into the db.
29
+ db: Synchronous database session
30
+
31
+ Returns:
32
+ Updated `TaskGroupV2` object.
30
33
  """
31
34
  actual_task_list = [
32
35
  TaskV2(
@@ -8,10 +8,10 @@ def get_python_interpreter_v2(
8
8
  python_version: Literal["3.9", "3.10", "3.11", "3.12"]
9
9
  ) -> str:
10
10
  """
11
- Return the path to the python interpreter
11
+ Return the path to the Python interpreter
12
12
 
13
13
  Args:
14
- version: Python version
14
+ python_version: Python version
15
15
 
16
16
  Raises:
17
17
  ValueError: If the python version requested is not available on the
@@ -19,9 +19,9 @@ def customize_template(
19
19
  Customize a bash-script template and write it to disk.
20
20
 
21
21
  Args:
22
- template_filename:
23
- templates_folder:
24
- replacements:
22
+ template_name: Name of the template that will be customized.
23
+ replacements: List of replacements for template customization.
24
+ script_path: Local path where the customized template will be written.
25
25
  """
26
26
  # Read template
27
27
  template_path = TEMPLATES_DIR / template_name
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.9.1
3
+ Version: 2.10.0a0
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=PV3pFNkPgVbTBTGekRDdEwLUrBsGNXOb1hGr6F9R4OE,22
1
+ fractal_server/__init__.py,sha256=Qb19IXoi0fWBhP9l0Xx5EOFNybthKtzgo46qgF8V_s0,25
2
2
  fractal_server/__main__.py,sha256=dEkCfzLLQrIlxsGC-HBfoR-RBMWnJDgNrxYTyzmE9c0,6146
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -6,8 +6,8 @@ fractal_server/app/db/__init__.py,sha256=wup2wcOkyOh8Vd0Xm76PZn_naxeMqaL4eF8DHHX
6
6
  fractal_server/app/models/__init__.py,sha256=aG7mf1zZbsgzDSp7GHEcZhdjHfW3TGPOLCI8MrvYhPw,500
7
7
  fractal_server/app/models/linkusergroup.py,sha256=LWTUfhH2uAnn_4moK7QdRUIHWtpw-hPZuW-5jClv_OE,610
8
8
  fractal_server/app/models/linkuserproject.py,sha256=eQaourbGRshvlMVlKzLYJKHEjfsW1CbWws9yW4eHXhA,567
9
- fractal_server/app/models/security.py,sha256=2npjgRKBZ7OAnhAXNbYxjtuOsSm1P4kak__qfk2SpeM,3770
10
- fractal_server/app/models/user_settings.py,sha256=3LodhERbRz3ajjmCnZiU1TOitduKu_9Lyv_Rgdnyusw,1350
9
+ fractal_server/app/models/security.py,sha256=kLvarGwG1CxvtbpV2HkkOobzHU5Ia0PHyNzHghKSEx4,3751
10
+ fractal_server/app/models/user_settings.py,sha256=Y-ZV-uZAFLZqXxy8c5_Qeh_F7zQuZDWOgLpU6Zs6iqU,1316
11
11
  fractal_server/app/models/v1/__init__.py,sha256=hUI7dEbPaiZGN0IbHW4RSmSicyvtn_xeuevoX7zvUwI,466
12
12
  fractal_server/app/models/v1/dataset.py,sha256=99GDgt7njx8yYQApkImqp_7bHA5HH3ElvbR6Oyj9kVI,2017
13
13
  fractal_server/app/models/v1/job.py,sha256=QLGXcWdVRHaUHQNDapYYlLpEfw4K7QyD8TmcwhrWw2o,3304
@@ -37,28 +37,28 @@ fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0
37
37
  fractal_server/app/routes/api/v1/_aux_functions.py,sha256=P9Q48thGH95w0h5cacYoibxqgiiLW4oqZ8rNJ2LIISY,13219
38
38
  fractal_server/app/routes/api/v1/dataset.py,sha256=KVfKdp-bT8eB14kCjTSmpji4a2IPIHxGID8L10h3Wac,17282
39
39
  fractal_server/app/routes/api/v1/job.py,sha256=0jGxvu0xNQnWuov2qnoo9yE7Oat37XbcVn4Ute-UsiE,5370
40
- fractal_server/app/routes/api/v1/project.py,sha256=k-GBDSUyQvt2L1CgOckcn86GcmqRWa_MZ_fzy9ORwqI,15419
40
+ fractal_server/app/routes/api/v1/project.py,sha256=3NsdNXLIsE8QiNgKP1Kp1-B0zYG0Zi5HKBzWA0LjlQg,15551
41
41
  fractal_server/app/routes/api/v1/task.py,sha256=eW89nMCjpD4G6tHXDo2qGBKqWaPirjH6M3hpdJQhfa0,6528
42
42
  fractal_server/app/routes/api/v1/task_collection.py,sha256=5EMh3yhS1Z4x25kp5Iaxalrf7RgJh-XD1nBjrFvgwsg,9072
43
43
  fractal_server/app/routes/api/v1/workflow.py,sha256=2T93DuEnSshaDCue-JPmjuvGCtbk6lt9pFMuPt783t8,11217
44
44
  fractal_server/app/routes/api/v1/workflowtask.py,sha256=OYYConwJbmNULDw5I3T-UbSJKrbbBiAHbbBeVcpoFKQ,5785
45
45
  fractal_server/app/routes/api/v2/__init__.py,sha256=w4c9WzagaVV5d4TWBX5buu5ENk8jf3YftMQYmhavz9Q,2172
46
46
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=mb4R_qqFxeW0LAis2QJIIfVx8Sydv1jTYaRIMsMxnIk,11720
47
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=fZNglsjv3Smc77olkXV4WzVS9oja5J-ejYdjs5RslAA,6739
47
+ fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=c8eqPXdMhc3nIixX50B1Ka5n7LgbOZm2JbEs7lICQ04,6767
48
48
  fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=PuapLtvSk9yhBAsKNEp1w2oagOMr0YZTo247-CU3hdM,11008
49
49
  fractal_server/app/routes/api/v2/dataset.py,sha256=Y6uZz--YSEGgnPYu05rZ9sr1Ug08bNl2v1h3VeApBe8,9441
50
50
  fractal_server/app/routes/api/v2/images.py,sha256=JR1rR6qEs81nacjriOXAOBQjAbCXF4Ew7M7mkWdxBU0,7920
51
51
  fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
52
52
  fractal_server/app/routes/api/v2/project.py,sha256=eWYFJ7F2ZYQcpi-_n-rhPF-Q4gJhzYBsVGYFhHZZXAE,6653
53
53
  fractal_server/app/routes/api/v2/status.py,sha256=6N9DSZ4iFqbZImorWfEAPoyoFUgEruo4Hweqo0x0xXU,6435
54
- fractal_server/app/routes/api/v2/submit.py,sha256=bTL1upaRIpOQNGfSsvNRwh-4LxROTvnzrC9QC9UQI10,8129
54
+ fractal_server/app/routes/api/v2/submit.py,sha256=cQwt0oK8xjHMGA_bQrw4Um8jd_aCvgmWfoqSQDh12hQ,8246
55
55
  fractal_server/app/routes/api/v2/task.py,sha256=K0ik33t7vL8BAK5S7fqyJDNdRK4stGqb_73bSa8tvPE,7159
56
- fractal_server/app/routes/api/v2/task_collection.py,sha256=TIr1IPO15TX6CZIQ_LPc0zFtTltuleDISAdMVaVQxfw,9633
56
+ fractal_server/app/routes/api/v2/task_collection.py,sha256=snX_E3OSBsgjbVwQMgKvV7pLmfNGD0OyqgAsxSGtB5E,12359
57
57
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
58
58
  fractal_server/app/routes/api/v2/task_group.py,sha256=4o2N0z7jK7VUVlJZMM4GveCCc4JKxYJx9-PMmsYIlJQ,8256
59
59
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
60
60
  fractal_server/app/routes/api/v2/workflow.py,sha256=vjCNRzMHaAB4YWbAEWGlELHXDN4GjtE26IkIiB15RGM,8682
61
- fractal_server/app/routes/api/v2/workflow_import.py,sha256=WJST1AZypvOTGUrjhomYVh4R2ow8RoGpuwzNiq81Pzc,10971
61
+ fractal_server/app/routes/api/v2/workflow_import.py,sha256=-7Er3FWGF_1xI2qHFO9gfLVQAok5bojd7mbzQxa9Ofw,10858
62
62
  fractal_server/app/routes/api/v2/workflowtask.py,sha256=ciHTwXXFiFnMF7ZpJ3Xs0q6YfuZrFvIjqndlzAEdZpo,6969
63
63
  fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
64
64
  fractal_server/app/routes/auth/_aux_auth.py,sha256=ifkNocTYatBSMYGwiR14qohmvR9SfMldceiEj6uJBrU,4783
@@ -82,12 +82,12 @@ fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrG
82
82
  fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
83
  fractal_server/app/runner/executors/slurm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
84
84
  fractal_server/app/runner/executors/slurm/_batching.py,sha256=3mfeFuYm3UA4EXh4VWuqZTF-dcINECZgTHoPOaOszDo,8840
85
- fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=RkFrp9bltfVxrp5Ei2KuCMEft6q3mBArTvSBvtHA2n4,15682
85
+ fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=P0TDfIFf07h0hIhVNZUcY3t5vgdjptU-2T0uC_ZBEB4,15688
86
86
  fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
87
87
  fractal_server/app/runner/executors/slurm/ssh/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
88
88
  fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py,sha256=bKo5Ja0IGxJWpPWyh9dN0AG-PwzTDZzD5LyaEHB3YU4,3742
89
89
  fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py,sha256=rwlqZzoGo4SAb4nSlFjsQJdaCgfM1J6YGcjb8yYxlqc,4506
90
- fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=yRn5v0ZUX_dQdN1MN8gjRBMCXVWZ_PZgcI2wnWXIAO8,54070
90
+ fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=U2-tNE_5ECHFIoXjEvBlaSXKaIf-1IXZlDs0c34mab8,54110
91
91
  fractal_server/app/runner/executors/slurm/sudo/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
92
92
  fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py,sha256=wAgwpVcr6JIslKHOuS0FhRa_6T1KCManyRJqA-fifzw,1909
93
93
  fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py,sha256=z5LlhaiqAb8pHsF1WwdzXN39C5anQmwjo1rSQgtRAYE,4422
@@ -101,7 +101,7 @@ fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2X
101
101
  fractal_server/app/runner/shutdown.py,sha256=I_o2iYKJwzku0L3E85ETjrve3QPECygR5xhhsAo5huM,2910
102
102
  fractal_server/app/runner/task_files.py,sha256=sd_MpJ01C8c9QTO8GzGMidFGdlq_hXX_ARDRhd_YMnI,3762
103
103
  fractal_server/app/runner/v1/__init__.py,sha256=VvJFk4agX2X3fQfDcoNmOB2ouNCaQU7dAqaFmpcdP8I,15063
104
- fractal_server/app/runner/v1/_common.py,sha256=EiSfp-PvhtTD3uijSec5CNKxe50ITts2DyGCFcjfVBw,21619
104
+ fractal_server/app/runner/v1/_common.py,sha256=bHM_fU0ubwoxiv0V3uUnb0uVBPpJ_rJaUaQGqACwHS4,21549
105
105
  fractal_server/app/runner/v1/_local/__init__.py,sha256=KlSML4LqF4p1IfhSd8tAkiu3aeDzifeanuNXjATDsYE,6929
106
106
  fractal_server/app/runner/v1/_local/_local_config.py,sha256=hM7SPxR07luXPcXdrWXRpEB2uOyjSSRUdqW3QBKJn9c,3147
107
107
  fractal_server/app/runner/v1/_local/_submit_setup.py,sha256=XyBDPb4IYdKEEnzLYdcYteIHWVWofJxKMmQCyRkn5Bc,1509
@@ -112,7 +112,7 @@ fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6pQNNx997bLIfLp0g
112
112
  fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
113
113
  fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
114
114
  fractal_server/app/runner/v2/__init__.py,sha256=4RTlY34bOqgmzqVHXER0-lpnKaG15boMgDyf1L40JWg,17362
115
- fractal_server/app/runner/v2/_local/__init__.py,sha256=KTj14K6jH8fXGUi5P7u5_RqEE1zF4aXtgPxCKzw46iw,5971
115
+ fractal_server/app/runner/v2/_local/__init__.py,sha256=nTQrdPaxsWvUAhgq-1hMq8f5W-LwUlaapyjOdQ7BfQ8,5857
116
116
  fractal_server/app/runner/v2/_local/_local_config.py,sha256=9oi209Dlp35ANfxb_DISqmMKKc6DPaMsmYVWbZLseME,3630
117
117
  fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=MucNOo8Er0F5ZIwH7CnTeXgnFMc6d3pKPkv563QNVi0,1630
118
118
  fractal_server/app/runner/v2/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
@@ -122,9 +122,9 @@ fractal_server/app/runner/v2/_local_experimental/_submit_setup.py,sha256=we7r-sQ
122
122
  fractal_server/app/runner/v2/_local_experimental/executor.py,sha256=plvEqqdcXOSohYsQoykYlyDwCING7OO5h-4XAZtwdPs,5503
123
123
  fractal_server/app/runner/v2/_slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
124
124
  fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py,sha256=UdkoFF0HF_TdKbay-d9bjkxT2ltcOE5i8H_FoOu64HU,6202
125
- fractal_server/app/runner/v2/_slurm_ssh/__init__.py,sha256=D0Dnbhnzw0BXwQmjqLmxqpE9oreAtasA-9aOzxC4l_I,4530
125
+ fractal_server/app/runner/v2/_slurm_ssh/__init__.py,sha256=q9XL6q6s77-bSudRY1Vg5_NcHXvDEZZJ8PhcrPG28uE,4358
126
126
  fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py,sha256=a5_FDPH_yxYmrjAjMRLgh_Y4DSG3mRslCLQodGM3-t4,2838
127
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py,sha256=q2fwiKqtNpXtfs5wUFQjwJxdYqKPPTbCy1ieBhhi-Bw,4316
127
+ fractal_server/app/runner/v2/_slurm_sudo/__init__.py,sha256=cK0MXXnIqRBgKsv37VqHe2poQfrFYyUXDHOl5YXrXrU,4145
128
128
  fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py,sha256=a5_FDPH_yxYmrjAjMRLgh_Y4DSG3mRslCLQodGM3-t4,2838
129
129
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=-imwO7OB7ATADEnqVbTElUwoY0YIJCTf_SbWJNN9OZg,639
130
130
  fractal_server/app/runner/v2/handle_failed_job.py,sha256=fipRJT5Y8UY0US4bXUX-4ORTAQ1AetZcCAOVCjDO3_c,5202
@@ -138,7 +138,7 @@ fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMo
138
138
  fractal_server/app/schemas/_validators.py,sha256=T5EswIJAJRvawfzqWtPcN2INAfiBXyE4m0iwQm4ht-0,3149
139
139
  fractal_server/app/schemas/user.py,sha256=icjox9gK_invW44Nh_L4CvqfRa92qghyQhmevyg09nQ,2243
140
140
  fractal_server/app/schemas/user_group.py,sha256=t30Kd07PY43G_AqFDb8vjdInTeLeU9WvFZDx8fVLPSI,1750
141
- fractal_server/app/schemas/user_settings.py,sha256=TalISeEfCrtN8LgqbLx1Q8ZPoeiZnbksg5NYAVzkIqY,3527
141
+ fractal_server/app/schemas/user_settings.py,sha256=re7ZFS8BLjR9MdIoZNRt2DNPc7znCgDpEYFKr8ZsAZg,2980
142
142
  fractal_server/app/schemas/v1/__init__.py,sha256=CrBGgBhoemCvmZ70ZUchM-jfVAICnoa7AjZBAtL2UB0,1852
143
143
  fractal_server/app/schemas/v1/applyworkflow.py,sha256=dYArxQAOBdUIEXX_Ejz8b9fBhEYu1nMm6b_Z6_P6TgA,4052
144
144
  fractal_server/app/schemas/v1/dataset.py,sha256=DWFCxZjApcKt2M6UJMK0tmejXwUT09vjUULf2D7Y-f0,3293
@@ -149,7 +149,7 @@ fractal_server/app/schemas/v1/state.py,sha256=tBXzp_qW2TNNNPBo-AWEaffEU-1GkMBtUo
149
149
  fractal_server/app/schemas/v1/task.py,sha256=7BxOZ_qoRQ8n3YbQpDvB7VMcxB5fSYQmR5RLIWhuJ5U,3704
150
150
  fractal_server/app/schemas/v1/task_collection.py,sha256=uvq9bcMaGD_qHsh7YtcpoSAkVAbw12eY4DocIO3MKOg,3057
151
151
  fractal_server/app/schemas/v1/workflow.py,sha256=oRKamLSuAgrTcv3gMMxGcotDloLL2c3NNgPA39UEmmM,4467
152
- fractal_server/app/schemas/v2/__init__.py,sha256=jAmAxPulME4hFnQJXMTxwoMZsDjZp9jjb-m__OByiXo,2505
152
+ fractal_server/app/schemas/v2/__init__.py,sha256=ILWYZu_PfVVuZ8-qMRAuRuzBhne6nhS6sUzsigLcl-E,2557
153
153
  fractal_server/app/schemas/v2/dataset.py,sha256=zRlcO0wDZahTW1PINdVEuARZ7GZUuEqqop7UdE3-5do,2470
154
154
  fractal_server/app/schemas/v2/dumps.py,sha256=s6dg-pHZFui6t2Ktm0SMxjKDN-v-ZqBHz9iTsBQF3eU,1712
155
155
  fractal_server/app/schemas/v2/job.py,sha256=42V-bFfMvysRplwTKGsL_WshAVsWSM6yjFqypxwrY3k,3020
@@ -157,20 +157,20 @@ fractal_server/app/schemas/v2/manifest.py,sha256=Uqtd7DbyOkf9bxBOKkU7Sv7nToBIFGU
157
157
  fractal_server/app/schemas/v2/project.py,sha256=ABv9LSLVCq1QYthEhBvZOTn_4DFEC-7cH28tFGFdM7I,589
158
158
  fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
159
159
  fractal_server/app/schemas/v2/task.py,sha256=FFAbYwDlqowB8gVMdjFVPVHvAM0T89PYLixUth49xfQ,6870
160
- fractal_server/app/schemas/v2/task_collection.py,sha256=yHpCRxoj6tKqCiQfUjaTj8SfCn1ChD_P6okfEOzyUDE,6518
161
- fractal_server/app/schemas/v2/task_group.py,sha256=fSjdLbClrpmrPj5hFZMu9DoJW4Y33EnbOh0HjMBsGVc,3784
160
+ fractal_server/app/schemas/v2/task_collection.py,sha256=9c_yyFcVBXdAZpQQniy1bROhYnQT7G1BflOpMY1joPE,6250
161
+ fractal_server/app/schemas/v2/task_group.py,sha256=EPQ1WHjIA8WDrpsTfvfRESjwUVzu6jKiaKZx45b36N4,3215
162
162
  fractal_server/app/schemas/v2/workflow.py,sha256=-KWvXnbHBFA3pj5n7mfSyLKJQSqkJmoziIEe7mpLl3M,1875
163
163
  fractal_server/app/schemas/v2/workflowtask.py,sha256=vDdMktYbHeYBgB5OuWSv6wRPRXWqvetkeqQ7IC5YtfA,5751
164
164
  fractal_server/app/security/__init__.py,sha256=MlWVrLFPj9M2Gug-k8yATM-Cw066RugVU4KK6kMRbnQ,13019
165
- fractal_server/app/user_settings.py,sha256=aZgQ3i0JkHfgwLGW1ee6Gzr1ae3IioFfJKKSsSS8Svk,1312
166
- fractal_server/config.py,sha256=Bk6EFKnU07sjgThf2NVEqrFAx9F4s0BfCvDKtWHzJTc,23217
165
+ fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
166
+ fractal_server/config.py,sha256=wRWJqyEeH4j2puH-fGlCYKLoKFh9pzRsQkS6q1VtO9M,23173
167
167
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
168
168
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
169
169
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
170
170
  fractal_server/images/__init__.py,sha256=xO6jTLE4EZKO6cTDdJsBmK9cdeh9hFTaSbSuWgQg7y4,196
171
171
  fractal_server/images/models.py,sha256=UlWazUOFQtpS3pZuROjcJXviG_Ai453jqUDHdzuvD5w,4170
172
172
  fractal_server/images/tools.py,sha256=gxeniYy4Z-cp_ToK2LHPJUTVVUUrdpogYdcBUvBuLiY,2209
173
- fractal_server/logger.py,sha256=56wfka6fHaa3Rx5qO009nEs_y8gx5wZ2NUNZZ1I-uvc,5130
173
+ fractal_server/logger.py,sha256=zwg_AjIHkNP0ruciXjm5lI5UFP3n6tMHullsM9lDjz4,5039
174
174
  fractal_server/main.py,sha256=gStLT9Du5QMpc9SyvRvtKU21EKwp-dG4HL3zGHzE06A,4908
175
175
  fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
176
176
  fractal_server/migrations/env.py,sha256=9t_OeKVlhM8WRcukmTrLbWNup-imiBGP_9xNgwCbtpI,2730
@@ -179,6 +179,7 @@ fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhR
179
179
  fractal_server/migrations/versions/034a469ec2eb_task_groups.py,sha256=vrPhC8hfFu1c4HmLHNZyCuqEfecFD8-bWc49bXMNes0,6199
180
180
  fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py,sha256=-BSS9AFTPcu3gYC-sYbawSy4MWQQx8TfMb5BW5EBKmQ,1450
181
181
  fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py,sha256=Q1Gj1cJ0UrdLBJ5AXfFK9QpxTtmcv-4Z3NEGDnxOme4,961
182
+ fractal_server/migrations/versions/316140ff7ee1_remove_usersettings_cache_dir.py,sha256=lANgTox0rz459_yo1Rw7fGCT1qw5sUCUXTLUMc_Bzf8,911
182
183
  fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
183
184
  fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py,sha256=Mob8McGYAcmgvrseyyYOa54E6Gsgr-4SiGdC-r9O4_A,1157
184
185
  fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py,sha256=5ROUgcoZOdjf8kMt6cxuvPhzHmV6xaCxvZEbhUEyZM4,3271
@@ -203,7 +204,7 @@ fractal_server/migrations/versions/efa89c30e0a4_add_project_timestamp_created.py
203
204
  fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
204
205
  fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
205
206
  fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
206
- fractal_server/ssh/_fabric.py,sha256=56ud2_kkIkdThADjls5EEJ3leFwDS4bxYpyCb0qI578,22815
207
+ fractal_server/ssh/_fabric.py,sha256=lNy4IX1I4We6VoWa4Bz4fUPuApLMSoejpyE6I3jDZeM,22869
207
208
  fractal_server/string_tools.py,sha256=XtMNsr5R7GmgzmFi68zkKMedHs8vjGoVMMCXqWhIk9k,2568
208
209
  fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
209
210
  fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39NHE8,23
@@ -217,12 +218,12 @@ fractal_server/tasks/v1/utils.py,sha256=HYFyNAyZofmf--mVgdwGC5TJpGShIWIDaS01yRr4
217
218
  fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
218
219
  fractal_server/tasks/v2/local/__init__.py,sha256=9RVItnS7OyLsJOuJjWMCicaky4ASUPQEYD4SzDs0hOE,141
219
220
  fractal_server/tasks/v2/local/_utils.py,sha256=EvhmVwYjqaNyDCUMEsTWYOUXLgEwR1xr6bu32apCEI8,2491
220
- fractal_server/tasks/v2/local/collect.py,sha256=BbXSgxExPUxFxcmBs3ejwWzRae-sQgfbk3zZkAQg77Y,12190
221
+ fractal_server/tasks/v2/local/collect.py,sha256=JuMplfREqrPvVEGlT5kJhcmZXC_iYlwvNlkgFrCaCC0,12107
221
222
  fractal_server/tasks/v2/local/deactivate.py,sha256=XR1nvJY3mKCRqwPwV79rVaQmtb3J83KdmJKjTOHD-cU,9250
222
223
  fractal_server/tasks/v2/local/reactivate.py,sha256=R3rArAzUpMGf6xa3dGVwwXHW9WVDi5ia28AFisZsqNc,6112
223
224
  fractal_server/tasks/v2/ssh/__init__.py,sha256=aSQbVi6Ummt9QzcSLWNmSqYjfdxrn9ROmqgH6bDpI7k,135
224
- fractal_server/tasks/v2/ssh/_utils.py,sha256=2E-F_862zM6FZA-im-E8t8kjptWRIhBj1IDHC6QD1H8,2818
225
- fractal_server/tasks/v2/ssh/collect.py,sha256=FkTfyhdwAp4qa4W_dqjT0CmuDMFuCBSOYjg_y1Kq2Bs,12939
225
+ fractal_server/tasks/v2/ssh/_utils.py,sha256=LjaEYVUJDChilu3YuhxuGWYRNnVJ_zqNE9SDHdRTIHY,2824
226
+ fractal_server/tasks/v2/ssh/collect.py,sha256=2XXEPpl4LS22A75v_k4Bd46k46tmnLNZfceHyPi3kXo,13457
226
227
  fractal_server/tasks/v2/ssh/deactivate.py,sha256=Ffk_UuQSBUBNBCiviuKNhEUGyZPQa4_erJKFdwgMcE8,10616
227
228
  fractal_server/tasks/v2/ssh/reactivate.py,sha256=jdO8iyzavzSVPcOpIZrYSEkGPYTvz5XJ5h_5-nz9yzA,7896
228
229
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
@@ -232,15 +233,15 @@ fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=84NGHlg6JIbrQktgGKyfGsggP
232
233
  fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
233
234
  fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=n9C8w76YraLbeTe7NhuLzvAQiJCm_akL3Mc3EMfxrHo,1007
234
235
  fractal_server/tasks/v2/utils_background.py,sha256=tikXhggqxdU7EnKdx2co3UwinlDazEjfOPQOXtO58zs,4240
235
- fractal_server/tasks/v2/utils_database.py,sha256=6r56yyFPnEBrXl6ncmO6D76znzISQCFZqCYcD-Ummd4,1213
236
+ fractal_server/tasks/v2/utils_database.py,sha256=g5m3sNPZKQ3AjflhPURDlAppQcIS5T1A8a1macdswBA,1268
236
237
  fractal_server/tasks/v2/utils_package_names.py,sha256=RDg__xrvQs4ieeVzmVdMcEh95vGQYrv9Hfal-5EDBM8,2393
237
- fractal_server/tasks/v2/utils_python_interpreter.py,sha256=-EWh3Y3VqHLDOWUO_wG_wknqmGqKAD0O2KTLhNjrZaI,948
238
- fractal_server/tasks/v2/utils_templates.py,sha256=C5WLuY3uGG2s53OEL-__H35-fmSlguwZx836BPFHBpE,2732
238
+ fractal_server/tasks/v2/utils_python_interpreter.py,sha256=5_wrlrTqXyo1YuLZvAW9hrSoh5MyLOzdPVUlUwM7uDQ,955
239
+ fractal_server/tasks/v2/utils_templates.py,sha256=MS8zu24qimJSktZaHruPxkwIl81ZoUnIVGtnMHS4Y3o,2876
239
240
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
240
241
  fractal_server/utils.py,sha256=utvmBx8K9I8hRWFquxna2pBaOqe0JifDL_NVPmihEJI,3525
241
242
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
242
- fractal_server-2.9.1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
243
- fractal_server-2.9.1.dist-info/METADATA,sha256=2XOrXmdvOE1zcrKJoCr4VemPcRk3PpZJOUg_0yXxeDc,4543
244
- fractal_server-2.9.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
245
- fractal_server-2.9.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
246
- fractal_server-2.9.1.dist-info/RECORD,,
243
+ fractal_server-2.10.0a0.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
244
+ fractal_server-2.10.0a0.dist-info/METADATA,sha256=G-j04Q-B4uv3h77NOrY7lh-IEVFO_7AzmV3Lp3zqw8w,4546
245
+ fractal_server-2.10.0a0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
246
+ fractal_server-2.10.0a0.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
247
+ fractal_server-2.10.0a0.dist-info/RECORD,,