fractal-server 2.9.0a4__py3-none-any.whl → 2.9.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.9.0a4"
1
+ __VERSION__ = "2.9.0a5"
@@ -49,8 +49,8 @@ class TaskGroupV2(SQLModel, table=True):
49
49
  sa_column=Column(DateTime(timezone=True), nullable=False),
50
50
  )
51
51
  timestamp_last_used: Optional[datetime] = Field(
52
- default=None,
53
- sa_column=Column(DateTime(timezone=True), nullable=True),
52
+ default_factory=get_timestamp,
53
+ sa_column=Column(DateTime(timezone=True), nullable=False),
54
54
  )
55
55
 
56
56
  @property
@@ -33,7 +33,7 @@ from ..aux._job import _write_shutdown_file
33
33
  from ..aux._runner import _check_shutdown_is_supported
34
34
  from fractal_server.app.models import UserOAuth
35
35
  from fractal_server.app.routes.auth import current_active_superuser
36
- from fractal_server.app.routes.aux._timestamp import _convert_to_db_timestamp
36
+ from fractal_server.app.routes.aux import _raise_if_naive_datetime
37
37
 
38
38
  router_admin_v1 = APIRouter()
39
39
 
@@ -54,6 +54,7 @@ async def view_project(
54
54
  id: If not `None`, select a given `project.id`.
55
55
  user_id: If not `None`, select a given `project.user_id`.
56
56
  """
57
+ _raise_if_naive_datetime(timestamp_created_min, timestamp_created_max)
57
58
 
58
59
  stm = select(Project)
59
60
 
@@ -63,10 +64,8 @@ async def view_project(
63
64
  if user_id is not None:
64
65
  stm = stm.where(Project.user_list.any(UserOAuth.id == user_id))
65
66
  if timestamp_created_min is not None:
66
- timestamp_created_min = _convert_to_db_timestamp(timestamp_created_min)
67
67
  stm = stm.where(Project.timestamp_created >= timestamp_created_min)
68
68
  if timestamp_created_max is not None:
69
- timestamp_created_max = _convert_to_db_timestamp(timestamp_created_max)
70
69
  stm = stm.where(Project.timestamp_created <= timestamp_created_max)
71
70
 
72
71
  res = await db.execute(stm)
@@ -96,6 +95,8 @@ async def view_workflow(
96
95
  name_contains: If not `None`, select workflows such that their
97
96
  `name` attribute contains `name_contains` (case-insensitive).
98
97
  """
98
+ _raise_if_naive_datetime(timestamp_created_min, timestamp_created_max)
99
+
99
100
  stm = select(Workflow)
100
101
 
101
102
  if user_id is not None:
@@ -112,10 +113,8 @@ async def view_workflow(
112
113
  func.lower(Workflow.name).contains(name_contains.lower())
113
114
  )
114
115
  if timestamp_created_min is not None:
115
- timestamp_created_min = _convert_to_db_timestamp(timestamp_created_min)
116
116
  stm = stm.where(Workflow.timestamp_created >= timestamp_created_min)
117
117
  if timestamp_created_max is not None:
118
- timestamp_created_max = _convert_to_db_timestamp(timestamp_created_max)
119
118
  stm = stm.where(Workflow.timestamp_created <= timestamp_created_max)
120
119
 
121
120
  res = await db.execute(stm)
@@ -147,6 +146,8 @@ async def view_dataset(
147
146
  `name` attribute contains `name_contains` (case-insensitive).
148
147
  type: If not `None`, select a given `dataset.type`.
149
148
  """
149
+ _raise_if_naive_datetime(timestamp_created_min, timestamp_created_max)
150
+
150
151
  stm = select(Dataset)
151
152
 
152
153
  if user_id is not None:
@@ -165,10 +166,8 @@ async def view_dataset(
165
166
  if type is not None:
166
167
  stm = stm.where(Dataset.type == type)
167
168
  if timestamp_created_min is not None:
168
- timestamp_created_min = _convert_to_db_timestamp(timestamp_created_min)
169
169
  stm = stm.where(Dataset.timestamp_created >= timestamp_created_min)
170
170
  if timestamp_created_max is not None:
171
- timestamp_created_max = _convert_to_db_timestamp(timestamp_created_max)
172
171
  stm = stm.where(Dataset.timestamp_created <= timestamp_created_max)
173
172
 
174
173
  res = await db.execute(stm)
@@ -218,6 +217,13 @@ async def view_job(
218
217
  log: If `True`, include `job.log`, if `False`
219
218
  `job.log` is set to `None`.
220
219
  """
220
+ _raise_if_naive_datetime(
221
+ start_timestamp_min,
222
+ start_timestamp_max,
223
+ end_timestamp_min,
224
+ end_timestamp_max,
225
+ )
226
+
221
227
  stm = select(ApplyWorkflow)
222
228
 
223
229
  if id is not None:
@@ -237,16 +243,12 @@ async def view_job(
237
243
  if status is not None:
238
244
  stm = stm.where(ApplyWorkflow.status == status)
239
245
  if start_timestamp_min is not None:
240
- start_timestamp_min = _convert_to_db_timestamp(start_timestamp_min)
241
246
  stm = stm.where(ApplyWorkflow.start_timestamp >= start_timestamp_min)
242
247
  if start_timestamp_max is not None:
243
- start_timestamp_max = _convert_to_db_timestamp(start_timestamp_max)
244
248
  stm = stm.where(ApplyWorkflow.start_timestamp <= start_timestamp_max)
245
249
  if end_timestamp_min is not None:
246
- end_timestamp_min = _convert_to_db_timestamp(end_timestamp_min)
247
250
  stm = stm.where(ApplyWorkflow.end_timestamp >= end_timestamp_min)
248
251
  if end_timestamp_max is not None:
249
- end_timestamp_max = _convert_to_db_timestamp(end_timestamp_max)
250
252
  stm = stm.where(ApplyWorkflow.end_timestamp <= end_timestamp_max)
251
253
 
252
254
  res = await db.execute(stm)
@@ -16,9 +16,9 @@ from fractal_server.app.models import UserOAuth
16
16
  from fractal_server.app.models.v2 import JobV2
17
17
  from fractal_server.app.models.v2 import ProjectV2
18
18
  from fractal_server.app.routes.auth import current_active_superuser
19
+ from fractal_server.app.routes.aux import _raise_if_naive_datetime
19
20
  from fractal_server.app.routes.aux._job import _write_shutdown_file
20
21
  from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported
21
- from fractal_server.app.routes.aux._timestamp import _convert_to_db_timestamp
22
22
  from fractal_server.app.runner.filenames import WORKFLOW_LOG_FILENAME
23
23
  from fractal_server.app.schemas.v2 import JobReadV2
24
24
  from fractal_server.app.schemas.v2 import JobStatusTypeV2
@@ -66,6 +66,14 @@ async def view_job(
66
66
  log: If `True`, include `job.log`, if `False`
67
67
  `job.log` is set to `None`.
68
68
  """
69
+
70
+ _raise_if_naive_datetime(
71
+ start_timestamp_min,
72
+ start_timestamp_max,
73
+ end_timestamp_min,
74
+ end_timestamp_max,
75
+ )
76
+
69
77
  stm = select(JobV2)
70
78
 
71
79
  if id is not None:
@@ -83,16 +91,16 @@ async def view_job(
83
91
  if status is not None:
84
92
  stm = stm.where(JobV2.status == status)
85
93
  if start_timestamp_min is not None:
86
- start_timestamp_min = _convert_to_db_timestamp(start_timestamp_min)
94
+ start_timestamp_min = start_timestamp_min
87
95
  stm = stm.where(JobV2.start_timestamp >= start_timestamp_min)
88
96
  if start_timestamp_max is not None:
89
- start_timestamp_max = _convert_to_db_timestamp(start_timestamp_max)
97
+ start_timestamp_max = start_timestamp_max
90
98
  stm = stm.where(JobV2.start_timestamp <= start_timestamp_max)
91
99
  if end_timestamp_min is not None:
92
- end_timestamp_min = _convert_to_db_timestamp(end_timestamp_min)
100
+ end_timestamp_min = end_timestamp_min
93
101
  stm = stm.where(JobV2.end_timestamp >= end_timestamp_min)
94
102
  if end_timestamp_max is not None:
95
- end_timestamp_max = _convert_to_db_timestamp(end_timestamp_max)
103
+ end_timestamp_max = end_timestamp_max
96
104
  stm = stm.where(JobV2.end_timestamp <= end_timestamp_max)
97
105
 
98
106
  res = await db.execute(stm)
@@ -20,6 +20,7 @@ from fractal_server.app.routes.auth import current_active_superuser
20
20
  from fractal_server.app.routes.auth._aux_auth import (
21
21
  _verify_user_belongs_to_group,
22
22
  )
23
+ from fractal_server.app.routes.aux import _raise_if_naive_datetime
23
24
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
24
25
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
25
26
  from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
@@ -46,6 +47,8 @@ async def get_task_group_activity_list(
46
47
  db: AsyncSession = Depends(get_async_db),
47
48
  ) -> list[TaskGroupActivityV2Read]:
48
49
 
50
+ _raise_if_naive_datetime(timestamp_started_min)
51
+
49
52
  stm = select(TaskGroupActivityV2)
50
53
  if task_group_activity_id is not None:
51
54
  stm = stm.where(TaskGroupActivityV2.id == task_group_activity_id)
@@ -93,16 +96,26 @@ async def query_task_group_list(
93
96
  active: Optional[bool] = None,
94
97
  pkg_name: Optional[str] = None,
95
98
  origin: Optional[TaskGroupV2OriginEnum] = None,
99
+ timestamp_last_used_min: Optional[datetime] = None,
100
+ timestamp_last_used_max: Optional[datetime] = None,
96
101
  user: UserOAuth = Depends(current_active_superuser),
97
102
  db: AsyncSession = Depends(get_async_db),
98
103
  ) -> list[TaskGroupReadV2]:
99
104
 
100
105
  stm = select(TaskGroupV2)
101
106
 
107
+ _raise_if_naive_datetime(
108
+ timestamp_last_used_max,
109
+ timestamp_last_used_min,
110
+ )
111
+
102
112
  if user_group_id is not None and private is True:
103
113
  raise HTTPException(
104
114
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
105
- detail=f"Cannot set `user_group_id` with {private=}",
115
+ detail=(
116
+ "Cannot get task groups with both "
117
+ f"{user_group_id=} and {private=}."
118
+ ),
106
119
  )
107
120
  if user_id is not None:
108
121
  stm = stm.where(TaskGroupV2.user_id == user_id)
@@ -122,6 +135,14 @@ async def query_task_group_list(
122
135
  stm = stm.where(TaskGroupV2.origin == origin)
123
136
  if pkg_name is not None:
124
137
  stm = stm.where(TaskGroupV2.pkg_name.icontains(pkg_name))
138
+ if timestamp_last_used_min is not None:
139
+ stm = stm.where(
140
+ TaskGroupV2.timestamp_last_used >= timestamp_last_used_min
141
+ )
142
+ if timestamp_last_used_max is not None:
143
+ stm = stm.where(
144
+ TaskGroupV2.timestamp_last_used <= timestamp_last_used_max
145
+ )
125
146
 
126
147
  res = await db.execute(stm)
127
148
  task_groups_list = res.scalars().all()
@@ -13,6 +13,9 @@ from fractal_server.app.models.v2 import TaskGroupActivityV2
13
13
  from fractal_server.app.routes.api.v2._aux_functions_task_lifecycle import (
14
14
  check_no_ongoing_activity,
15
15
  )
16
+ from fractal_server.app.routes.api.v2._aux_functions_task_lifecycle import (
17
+ check_no_submitted_job,
18
+ )
16
19
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
17
20
  _get_task_group_or_404,
18
21
  )
@@ -58,9 +61,6 @@ async def deactivate_task_group(
58
61
  task_group_id=task_group_id, db=db
59
62
  )
60
63
 
61
- # Check no other activity is ongoing
62
- await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
63
-
64
64
  # Check that task-group is active
65
65
  if not task_group.active:
66
66
  raise HTTPException(
@@ -70,6 +70,12 @@ async def deactivate_task_group(
70
70
  ),
71
71
  )
72
72
 
73
+ # Check no other activity is ongoing
74
+ await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
75
+
76
+ # Check no submitted jobs use tasks from this task group
77
+ await check_no_submitted_job(task_group_id=task_group.id, db=db)
78
+
73
79
  # Shortcut for task-group with origin="other"
74
80
  if task_group.origin == TaskGroupV2OriginEnum.OTHER:
75
81
  task_group.active = False
@@ -178,6 +184,9 @@ async def reactivate_task_group(
178
184
  # Check no other activity is ongoing
179
185
  await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
180
186
 
187
+ # Check no submitted jobs use tasks from this task group
188
+ await check_no_submitted_job(task_group_id=task_group.id, db=db)
189
+
181
190
  # Shortcut for task-group with origin="other"
182
191
  if task_group.origin == TaskGroupV2OriginEnum.OTHER:
183
192
  task_group.active = True
@@ -4,10 +4,16 @@ from fastapi import HTTPException
4
4
  from fastapi import status
5
5
  from httpx import AsyncClient
6
6
  from httpx import TimeoutException
7
+ from sqlmodel import func
7
8
  from sqlmodel import select
8
9
 
9
10
  from fractal_server.app.db import AsyncSession
11
+ from fractal_server.app.models.v2 import JobV2
10
12
  from fractal_server.app.models.v2 import TaskGroupActivityV2
13
+ from fractal_server.app.models.v2 import TaskV2
14
+ from fractal_server.app.models.v2 import WorkflowTaskV2
15
+ from fractal_server.app.models.v2 import WorkflowV2
16
+ from fractal_server.app.schemas.v2 import JobStatusTypeV2
11
17
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
12
18
  from fractal_server.logger import set_logger
13
19
 
@@ -165,3 +171,37 @@ async def check_no_ongoing_activity(
165
171
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
166
172
  detail=msg,
167
173
  )
174
+
175
+
176
+ async def check_no_submitted_job(
177
+ *,
178
+ task_group_id: int,
179
+ db: AsyncSession,
180
+ ) -> None:
181
+ """
182
+ Find submitted jobs which include tasks from a given task group.
183
+
184
+ Arguments:
185
+ task_id_list: List of TaskV2 IDs
186
+ db: Database session
187
+ """
188
+ stm = (
189
+ select(func.count(JobV2.id))
190
+ .join(WorkflowV2, JobV2.workflow_id == WorkflowV2.id)
191
+ .join(WorkflowTaskV2, WorkflowTaskV2.workflow_id == WorkflowV2.id)
192
+ .join(TaskV2, WorkflowTaskV2.task_id == TaskV2.id)
193
+ .where(WorkflowTaskV2.order >= JobV2.first_task_index)
194
+ .where(WorkflowTaskV2.order <= JobV2.last_task_index)
195
+ .where(JobV2.status == JobStatusTypeV2.SUBMITTED)
196
+ .where(TaskV2.taskgroupv2_id == task_group_id)
197
+ )
198
+ res = await db.execute(stm)
199
+ num_submitted_jobs = res.scalar()
200
+ if num_submitted_jobs > 0:
201
+ raise HTTPException(
202
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
203
+ detail=(
204
+ f"Cannot act on task group because {num_submitted_jobs} "
205
+ "submitted jobs use its tasks."
206
+ ),
207
+ )
@@ -15,7 +15,6 @@ from sqlmodel import select
15
15
  from .....config import get_settings
16
16
  from .....logger import set_logger
17
17
  from .....syringe import Inject
18
- from .....utils import get_timestamp
19
18
  from ....db import AsyncSession
20
19
  from ....db import get_async_db
21
20
  from ....models.v2 import JobV2
@@ -62,8 +61,6 @@ async def apply_workflow(
62
61
  db: AsyncSession = Depends(get_async_db),
63
62
  ) -> Optional[JobReadV2]:
64
63
 
65
- now = get_timestamp()
66
-
67
64
  # Remove non-submitted V2 jobs from the app state when the list grows
68
65
  # beyond a threshold
69
66
  settings = Inject(get_settings)
@@ -194,12 +191,12 @@ async def apply_workflow(
194
191
  )
195
192
  used_task_groups = res.scalars().all()
196
193
  for used_task_group in used_task_groups:
197
- used_task_group.timestamp_last_used = now
194
+ used_task_group.timestamp_last_used = job.start_timestamp
198
195
  db.add(used_task_group)
199
196
  await db.commit()
200
197
 
201
198
  # Define server-side job directory
202
- timestamp_string = now.strftime("%Y%m%d_%H%M%S")
199
+ timestamp_string = job.start_timestamp.strftime("%Y%m%d_%H%M%S")
203
200
  WORKFLOW_DIR_LOCAL = settings.FRACTAL_RUNNER_WORKING_BASE_DIR / (
204
201
  f"proj_v2_{project_id:07d}_wf_{workflow_id:07d}_job_{job.id:07d}"
205
202
  f"_{timestamp_string}"
@@ -21,7 +21,7 @@ from ....models.v2 import TaskGroupV2
21
21
  from ....schemas.v2 import TaskCollectPipV2
22
22
  from ....schemas.v2 import TaskGroupActivityStatusV2
23
23
  from ....schemas.v2 import TaskGroupActivityV2Read
24
- from ....schemas.v2 import TaskGroupCreateV2
24
+ from ....schemas.v2 import TaskGroupCreateV2Strict
25
25
  from ...aux.validate_user_settings import validate_user_settings
26
26
  from ._aux_functions_task_lifecycle import get_package_version_from_pypi
27
27
  from ._aux_functions_tasks import _get_valid_user_group_id
@@ -164,7 +164,7 @@ async def collect_tasks_pip(
164
164
 
165
165
  # Validate TaskGroupV2 attributes
166
166
  try:
167
- TaskGroupCreateV2(**task_group_attrs)
167
+ TaskGroupCreateV2Strict(**task_group_attrs)
168
168
  except ValidationError as e:
169
169
  raise HTTPException(
170
170
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -23,6 +23,7 @@ from fractal_server.app.routes.auth import current_active_user
23
23
  from fractal_server.app.routes.auth._aux_auth import (
24
24
  _verify_user_belongs_to_group,
25
25
  )
26
+ from fractal_server.app.routes.aux import _raise_if_naive_datetime
26
27
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
27
28
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
28
29
  from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
@@ -47,6 +48,8 @@ async def get_task_group_activity_list(
47
48
  db: AsyncSession = Depends(get_async_db),
48
49
  ) -> list[TaskGroupActivityV2Read]:
49
50
 
51
+ _raise_if_naive_datetime(timestamp_started_min)
52
+
50
53
  stm = select(TaskGroupActivityV2).where(
51
54
  TaskGroupActivityV2.user_id == user.id
52
55
  )
@@ -8,6 +8,7 @@ from fastapi import status
8
8
 
9
9
  from ...aux.validate_user_settings import validate_user_settings
10
10
  from ._aux_functions_task_lifecycle import check_no_ongoing_activity
11
+ from ._aux_functions_task_lifecycle import check_no_submitted_job
11
12
  from ._aux_functions_tasks import _get_task_group_full_access
12
13
  from fractal_server.app.db import AsyncSession
13
14
  from fractal_server.app.db import get_async_db
@@ -59,6 +60,9 @@ async def deactivate_task_group(
59
60
  # Check no other activity is ongoing
60
61
  await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
61
62
 
63
+ # Check no submitted jobs use tasks from this task group
64
+ await check_no_submitted_job(task_group_id=task_group.id, db=db)
65
+
62
66
  # Check that task-group is active
63
67
  if not task_group.active:
64
68
  raise HTTPException(
@@ -181,6 +185,9 @@ async def reactivate_task_group(
181
185
  # Check no other activity is ongoing
182
186
  await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
183
187
 
188
+ # Check no submitted jobs use tasks from this task group
189
+ await check_no_submitted_job(task_group_id=task_group.id, db=db)
190
+
184
191
  # Shortcut for task-group with origin="other"
185
192
  if task_group.origin == TaskGroupV2OriginEnum.OTHER:
186
193
  task_group.active = True
@@ -0,0 +1,20 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from fastapi import HTTPException
5
+ from fastapi import status
6
+
7
+
8
+ def _raise_if_naive_datetime(*timestamps: tuple[Optional[datetime]]) -> None:
9
+ """
10
+ Raise 422 if any not-null argument is a naive `datetime` object:
11
+ https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive
12
+ """
13
+ for timestamp in filter(None, timestamps):
14
+ if (timestamp.tzinfo is None) or (
15
+ timestamp.tzinfo.utcoffset(timestamp) is None
16
+ ):
17
+ raise HTTPException(
18
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
19
+ detail=f"{timestamp=} is naive. You must provide a timezone.",
20
+ )
@@ -26,7 +26,6 @@ from typing import Sequence
26
26
 
27
27
  import cloudpickle
28
28
  from cfut import SlurmExecutor
29
- from paramiko.ssh_exception import NoValidConnectionsError
30
29
 
31
30
  from ....filenames import SHUTDOWN_FILENAME
32
31
  from ....task_files import get_task_file_paths
@@ -409,15 +408,7 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
409
408
  args=fun_args,
410
409
  kwargs=fun_kwargs,
411
410
  )
412
- try:
413
- self._put_subfolder_sftp(jobs=[job])
414
- except NoValidConnectionsError as e:
415
- logger.error("NoValidConnectionError")
416
- logger.error(f"{str(e)=}")
417
- logger.error(f"{e.errors=}")
418
- for err in e.errors:
419
- logger.error(f"{str(err)}")
420
- raise e
411
+ self._put_subfolder_sftp(jobs=[job])
421
412
  future, job_id_str = self._submit_job(job)
422
413
  self.wait_thread.wait(job_id=job_id_str)
423
414
  return future
@@ -559,16 +550,7 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
559
550
  current_component_index += batch_size
560
551
  logger.debug("[map] Job preparation - END")
561
552
 
562
- try:
563
- self._put_subfolder_sftp(jobs=jobs_to_submit)
564
- except NoValidConnectionsError as e:
565
- logger.error("NoValidConnectionError")
566
- logger.error(f"{str(e)=}")
567
- logger.error(f"{e.errors=}")
568
- for err in e.errors:
569
- logger.error(f"{str(err)}")
570
-
571
- raise e
553
+ self._put_subfolder_sftp(jobs=jobs_to_submit)
572
554
 
573
555
  # Construct list of futures (one per SLURM job, i.e. one per batch)
574
556
  # FIXME SSH: we may create a single `_submit_many_jobs` method to
@@ -1073,16 +1055,7 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1073
1055
  self.jobs_empty_cond.notify_all()
1074
1056
 
1075
1057
  # Fetch subfolder from remote host
1076
- try:
1077
- self._get_subfolder_sftp(jobs=jobs)
1078
- except NoValidConnectionsError as e:
1079
- logger.error("NoValidConnectionError")
1080
- logger.error(f"{str(e)=}")
1081
- logger.error(f"{e.errors=}")
1082
- for err in e.errors:
1083
- logger.error(f"{str(err)}")
1084
-
1085
- raise e
1058
+ self._get_subfolder_sftp(jobs=jobs)
1086
1059
 
1087
1060
  # First round of checking whether all output files exist
1088
1061
  missing_out_paths = []
@@ -29,6 +29,7 @@ from .task_group import TaskGroupActivityActionV2 # noqa F401
29
29
  from .task_group import TaskGroupActivityStatusV2 # noqa F401
30
30
  from .task_group import TaskGroupActivityV2Read # noqa F401
31
31
  from .task_group import TaskGroupCreateV2 # noqa F401
32
+ from .task_group import TaskGroupCreateV2Strict # noqa F401
32
33
  from .task_group import TaskGroupReadV2 # noqa F401
33
34
  from .task_group import TaskGroupUpdateV2 # noqa F401
34
35
  from .task_group import TaskGroupV2OriginEnum # noqa F401
@@ -5,6 +5,7 @@ from typing import Optional
5
5
  from pydantic import BaseModel
6
6
  from pydantic import Extra
7
7
  from pydantic import Field
8
+ from pydantic import root_validator
8
9
  from pydantic import validator
9
10
 
10
11
  from .._validators import val_absolute_path
@@ -66,6 +67,32 @@ class TaskGroupCreateV2(BaseModel, extra=Extra.forbid):
66
67
  )
67
68
 
68
69
 
70
+ class TaskGroupCreateV2Strict(TaskGroupCreateV2):
71
+ """
72
+ A strict version of TaskGroupCreateV2, to be used for task collection.
73
+ """
74
+
75
+ path: str
76
+ venv_path: str
77
+ version: str
78
+ python_version: str
79
+
80
+ @root_validator
81
+ def check_wheel_file(cls, values):
82
+ origin = values.get("origin")
83
+ wheel_path = values.get("wheel_path")
84
+ bad_condition_1 = (
85
+ origin == TaskGroupV2OriginEnum.WHEELFILE and wheel_path is None
86
+ )
87
+ bad_condition_2 = (
88
+ origin != TaskGroupV2OriginEnum.WHEELFILE
89
+ and wheel_path is not None
90
+ )
91
+ if bad_condition_1 or bad_condition_2:
92
+ raise ValueError(f"Cannot have {origin=} and {wheel_path=}.")
93
+ return values
94
+
95
+
69
96
  class TaskGroupReadV2(BaseModel):
70
97
  id: int
71
98
  task_list: list[TaskReadV2]
@@ -89,7 +116,7 @@ class TaskGroupReadV2(BaseModel):
89
116
 
90
117
  active: bool
91
118
  timestamp_created: datetime
92
- timestamp_last_used: Optional[datetime] = None
119
+ timestamp_last_used: datetime
93
120
 
94
121
 
95
122
  class TaskGroupUpdateV2(BaseModel, extra=Extra.forbid):
@@ -71,7 +71,7 @@ def upgrade() -> None:
71
71
  sa.Column(
72
72
  "timestamp_last_used",
73
73
  sa.DateTime(timezone=True),
74
- nullable=True,
74
+ nullable=False,
75
75
  )
76
76
  )
77
77
 
@@ -11,7 +11,6 @@ from typing import Optional
11
11
 
12
12
  import paramiko.sftp_client
13
13
  from fabric import Connection
14
- from fabric import Result
15
14
  from invoke import UnexpectedExit
16
15
  from paramiko.ssh_exception import NoValidConnectionsError
17
16
 
@@ -116,50 +115,34 @@ class FractalSSH(object):
116
115
  def logger(self) -> logging.Logger:
117
116
  return get_logger(self.logger_name)
118
117
 
119
- def _put(
120
- self,
121
- *,
122
- local: str,
123
- remote: str,
124
- label: str,
125
- lock_timeout: Optional[float] = None,
126
- ) -> Result:
127
- """
128
- Transfer a local file to a remote path, via SFTP.
118
+ def log_and_raise(self, *, e: Exception, message: str) -> None:
129
119
  """
130
- actual_lock_timeout = self.default_lock_timeout
131
- if lock_timeout is not None:
132
- actual_lock_timeout = lock_timeout
133
- with _acquire_lock_with_timeout(
134
- lock=self._lock,
135
- label=label,
136
- timeout=actual_lock_timeout,
137
- ):
138
- return self._sftp_unsafe().put(local, remote)
120
+ Log and re-raise an exception from a FractalSSH method.
139
121
 
140
- def _get(
141
- self,
142
- *,
143
- local: str,
144
- remote: str,
145
- label: str,
146
- lock_timeout: Optional[float] = None,
147
- ) -> Result:
148
- actual_lock_timeout = self.default_lock_timeout
149
- if lock_timeout is not None:
150
- actual_lock_timeout = lock_timeout
151
- with _acquire_lock_with_timeout(
152
- lock=self._lock,
153
- label=label,
154
- timeout=actual_lock_timeout,
155
- ):
156
- return self._sftp_unsafe().get(
157
- remote,
158
- local,
159
- prefetch=self.sftp_get_prefetch,
160
- max_concurrent_prefetch_requests=self.sftp_get_max_requests,
122
+ Arguments:
123
+ message: Additional message to be logged.
124
+ e: Original exception
125
+ """
126
+ try:
127
+ self.logger.error(message)
128
+ self.logger.error(f"Original Error {type(e)} : \n{str(e)}")
129
+ # Handle the specific case of `NoValidConnectionsError`s from
130
+ # paramiko, which store relevant information in the `errors`
131
+ # attribute
132
+ if hasattr(e, "errors"):
133
+ self.logger.error(f"{type(e)=}")
134
+ for err in e.errors:
135
+ self.logger.error(f"{err}")
136
+ except Exception as exception:
137
+ # Handle unexpected cases, e.g. (1) `e` has no `type`, or
138
+ # (2) `errors` is not iterable.
139
+ self.logger.error(
140
+ "Unexpected Error while handling exception above: "
141
+ f"{str(exception)}"
161
142
  )
162
143
 
144
+ raise e
145
+
163
146
  def _run(
164
147
  self, *args, label: str, lock_timeout: Optional[float] = None, **kwargs
165
148
  ) -> Any:
@@ -187,8 +170,17 @@ class FractalSSH(object):
187
170
  label="read_remote_json_file",
188
171
  timeout=self.default_lock_timeout,
189
172
  ):
190
- with self._sftp_unsafe().open(filepath, "r") as f:
191
- data = json.load(f)
173
+
174
+ try:
175
+ with self._sftp_unsafe().open(filepath, "r") as f:
176
+ data = json.load(f)
177
+ except Exception as e:
178
+ self.log_and_raise(
179
+ e=e,
180
+ message=(
181
+ f"Error in `read_remote_json_file`, for {filepath=}."
182
+ ),
183
+ )
192
184
  self.logger.info(f"END reading remote JSON file {filepath}.")
193
185
  return data
194
186
 
@@ -380,21 +372,29 @@ class FractalSSH(object):
380
372
  logger_name: Name of the logger
381
373
  """
382
374
  try:
383
- prefix = "[send_file]"
384
- self.logger.info(f"{prefix} START transfer of '{local}' over SSH.")
385
- self._put(
386
- local=local,
387
- remote=remote,
388
- lock_timeout=lock_timeout,
375
+ self.logger.info(
376
+ f"[send_file] START transfer of '{local}' over SSH."
377
+ )
378
+ actual_lock_timeout = self.default_lock_timeout
379
+ if lock_timeout is not None:
380
+ actual_lock_timeout = lock_timeout
381
+ with _acquire_lock_with_timeout(
382
+ lock=self._lock,
389
383
  label=f"send_file {local=} {remote=}",
384
+ timeout=actual_lock_timeout,
385
+ ):
386
+ self._sftp_unsafe().put(local, remote)
387
+ self.logger.info(
388
+ f"[send_file] END transfer of '{local}' over SSH."
390
389
  )
391
- self.logger.info(f"{prefix} END transfer of '{local}' over SSH.")
392
390
  except Exception as e:
393
- self.logger.error(
394
- f"Transferring {local=} to {remote=} over SSH failed.\n"
395
- f"Original Error:\n{str(e)}."
391
+ self.log_and_raise(
392
+ e=e,
393
+ message=(
394
+ "Error in `send_file`, while "
395
+ f"transferring {local=} to {remote=}."
396
+ ),
396
397
  )
397
- raise e
398
398
 
399
399
  def fetch_file(
400
400
  self,
@@ -415,19 +415,29 @@ class FractalSSH(object):
415
415
  try:
416
416
  prefix = "[fetch_file] "
417
417
  self.logger.info(f"{prefix} START fetching '{remote}' over SSH.")
418
- self._get(
419
- local=local,
420
- remote=remote,
421
- lock_timeout=lock_timeout,
418
+ actual_lock_timeout = self.default_lock_timeout
419
+ if lock_timeout is not None:
420
+ actual_lock_timeout = lock_timeout
421
+ with _acquire_lock_with_timeout(
422
+ lock=self._lock,
422
423
  label=f"fetch_file {local=} {remote=}",
423
- )
424
+ timeout=actual_lock_timeout,
425
+ ):
426
+ self._sftp_unsafe().get(
427
+ remote,
428
+ local,
429
+ prefetch=self.sftp_get_prefetch,
430
+ max_concurrent_prefetch_requests=self.sftp_get_max_requests, # noqa E501
431
+ )
424
432
  self.logger.info(f"{prefix} END fetching '{remote}' over SSH.")
425
433
  except Exception as e:
426
- self.logger.error(
427
- f"Transferring {remote=} to {local=} over SSH failed.\n"
428
- f"Original Error:\n{str(e)}."
434
+ self.log_and_raise(
435
+ e=e,
436
+ message=(
437
+ "Error in `fetch_file`, while "
438
+ f"Transferring {remote=} to {local=}."
439
+ ),
429
440
  )
430
- raise e
431
441
 
432
442
  def mkdir(self, *, folder: str, parents: bool = True) -> None:
433
443
  """
@@ -502,8 +512,14 @@ class FractalSSH(object):
502
512
  label=f"write_remote_file {path=}",
503
513
  timeout=actual_lock_timeout,
504
514
  ):
505
- with self._sftp_unsafe().open(filename=path, mode="w") as f:
506
- f.write(content)
515
+ try:
516
+ with self._sftp_unsafe().open(filename=path, mode="w") as f:
517
+ f.write(content)
518
+ except Exception as e:
519
+ self.log_and_raise(
520
+ e=e, message=f"Error in `write_remote_file`, for {path=}."
521
+ )
522
+
507
523
  self.logger.info(f"END writing to remote file {path}.")
508
524
 
509
525
  def remote_exists(self, path: str) -> bool:
@@ -523,6 +539,10 @@ class FractalSSH(object):
523
539
  except FileNotFoundError:
524
540
  self.logger.info(f"END remote_file_exists {path} / False")
525
541
  return False
542
+ except Exception as e:
543
+ self.log_and_raise(
544
+ e=e, message=f"Error in `remote_exists`, for {path=}."
545
+ )
526
546
 
527
547
 
528
548
  class FractalSSHList(object):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.9.0a4
3
+ Version: 2.9.0a5
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=Y1rSgqglVGXD4u5k97z-tPhXn-XoXu0yguIqzQ4j9Os,24
1
+ fractal_server/__init__.py,sha256=g0C5KyC4Y2BjlLX8ayJEPjFY-PyIFuDQ0SsIkovQNGo,24
2
2
  fractal_server/__main__.py,sha256=dEkCfzLLQrIlxsGC-HBfoR-RBMWnJDgNrxYTyzmE9c0,6146
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -20,18 +20,18 @@ fractal_server/app/models/v2/dataset.py,sha256=-7sxHEw4IIAvF_uSan7tA3o8hvoakBkQ0
20
20
  fractal_server/app/models/v2/job.py,sha256=ypJmN-qspkKBGhBG7Mt-HypSQqcQ2EmB4Bzzb2-y550,1535
21
21
  fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
22
22
  fractal_server/app/models/v2/task.py,sha256=jebD28Pz8tGcsWCItxj6uKjcD8BMMnnU8dqYhvhEB6c,1520
23
- fractal_server/app/models/v2/task_group.py,sha256=Lxtwxk6pfffM8IehqkFvhDuecSXnXqTWeUlf7Y78oss,3254
23
+ fractal_server/app/models/v2/task_group.py,sha256=5ZN3LihhSYiY0Y4zIOzuT9KTpb-QLljjo1ijn_Q4rEw,3272
24
24
  fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO9VBlFCHiSps,1069
25
25
  fractal_server/app/models/v2/workflowtask.py,sha256=iDuJYk8kp4PNqGmbKRtGI7y-QsbjkNd_gDsbMzL4i-g,1274
26
26
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- fractal_server/app/routes/admin/v1.py,sha256=5YxfWz2QK2spUKomnAteaMsWF9Ql-oHwMs1jhSjHznk,13319
28
+ fractal_server/app/routes/admin/v1.py,sha256=ggJZMeKhRijfVe2h2VzfIcpR15FqkKImANhkTXl0mSk,12908
29
29
  fractal_server/app/routes/admin/v2/__init__.py,sha256=KYrw0COmmMuIMp7c6YcYRXah4tEYplCWeROnPK1VTeg,681
30
- fractal_server/app/routes/admin/v2/job.py,sha256=DbkPmUk7V2iEGIlKa_xgC07cc3G8oNsONGDZ_aKq5zE,7589
30
+ fractal_server/app/routes/admin/v2/job.py,sha256=cbkFIRIIXaWmNsUFI7RAu8HpQ0mWn_bgoxtvWZxr-IA,7624
31
31
  fractal_server/app/routes/admin/v2/project.py,sha256=luy-yiGX1JYTdPm1hpIdDUUqPm8xHuipLy9k2X6zu74,1223
32
32
  fractal_server/app/routes/admin/v2/task.py,sha256=Y0eujBgGhVapNXfW9azDxw4EBzLmEmCdh70y1RNQcb0,3895
33
- fractal_server/app/routes/admin/v2/task_group.py,sha256=ldMQ8OIUKEhr1a_BXiwgh4K3d1T299ZaDNJjbn_zAxc,7411
34
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=vDaTRX8AD8ESd-2CmT2rJPDDl4WxazxvSMdaUqcUPOY,9113
33
+ fractal_server/app/routes/admin/v2/task_group.py,sha256=DncrOAB4q-v3BAmxg35m4EohleriW_FLGE5gpW_Or08,8120
34
+ fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=bGD5cbZI7t6t-KQuY9YQ5iIksBT68Qbz2kMkVqrGaqc,9482
35
35
  fractal_server/app/routes/api/__init__.py,sha256=2IDheFi0OFdsUg7nbUiyahqybvpgXqeHUXIL2QtWrQQ,641
36
36
  fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
37
37
  fractal_server/app/routes/api/v1/_aux_functions.py,sha256=P9Q48thGH95w0h5cacYoibxqgiiLW4oqZ8rNJ2LIISY,13219
@@ -44,19 +44,19 @@ fractal_server/app/routes/api/v1/workflow.py,sha256=2T93DuEnSshaDCue-JPmjuvGCtbk
44
44
  fractal_server/app/routes/api/v1/workflowtask.py,sha256=OYYConwJbmNULDw5I3T-UbSJKrbbBiAHbbBeVcpoFKQ,5785
45
45
  fractal_server/app/routes/api/v2/__init__.py,sha256=w4c9WzagaVV5d4TWBX5buu5ENk8jf3YftMQYmhavz9Q,2172
46
46
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=mb4R_qqFxeW0LAis2QJIIfVx8Sydv1jTYaRIMsMxnIk,11720
47
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=04-C7R8aWiXN9-rSTnZIqz7CAGcLm0y9bjIgJ2lHGZw,5332
47
+ fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=fZNglsjv3Smc77olkXV4WzVS9oja5J-ejYdjs5RslAA,6739
48
48
  fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=PuapLtvSk9yhBAsKNEp1w2oagOMr0YZTo247-CU3hdM,11008
49
49
  fractal_server/app/routes/api/v2/dataset.py,sha256=Y6uZz--YSEGgnPYu05rZ9sr1Ug08bNl2v1h3VeApBe8,9441
50
50
  fractal_server/app/routes/api/v2/images.py,sha256=JR1rR6qEs81nacjriOXAOBQjAbCXF4Ew7M7mkWdxBU0,7920
51
51
  fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
52
52
  fractal_server/app/routes/api/v2/project.py,sha256=eWYFJ7F2ZYQcpi-_n-rhPF-Q4gJhzYBsVGYFhHZZXAE,6653
53
53
  fractal_server/app/routes/api/v2/status.py,sha256=6N9DSZ4iFqbZImorWfEAPoyoFUgEruo4Hweqo0x0xXU,6435
54
- fractal_server/app/routes/api/v2/submit.py,sha256=jqYix7X6dUJn8w6RWasu1lOFf7T_CcXQlpVY38njE24,8688
54
+ fractal_server/app/routes/api/v2/submit.py,sha256=72q3HfSKeR-cgNX8CRuqZI6Xd4hFmjlTYDhZvwMZbCo,8656
55
55
  fractal_server/app/routes/api/v2/task.py,sha256=K0ik33t7vL8BAK5S7fqyJDNdRK4stGqb_73bSa8tvPE,7159
56
- fractal_server/app/routes/api/v2/task_collection.py,sha256=_K7c-83jpmaLz1gXM_MbndskkmV6oo_ivg0mWP6hfvs,9621
56
+ fractal_server/app/routes/api/v2/task_collection.py,sha256=TIr1IPO15TX6CZIQ_LPc0zFtTltuleDISAdMVaVQxfw,9633
57
57
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
58
- fractal_server/app/routes/api/v2/task_group.py,sha256=Ove7Vr3p8GKtskHANAZh8TWwOw8dfbFCN2UQFv6DhqM,8136
59
- fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=SPUB-QHvADaJm3zyuPIyCg-TqbHujwiwr7RauvYGXJ0,8972
58
+ fractal_server/app/routes/api/v2/task_group.py,sha256=4o2N0z7jK7VUVlJZMM4GveCCc4JKxYJx9-PMmsYIlJQ,8256
59
+ fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
60
60
  fractal_server/app/routes/api/v2/workflow.py,sha256=vjCNRzMHaAB4YWbAEWGlELHXDN4GjtE26IkIiB15RGM,8682
61
61
  fractal_server/app/routes/api/v2/workflow_import.py,sha256=WJST1AZypvOTGUrjhomYVh4R2ow8RoGpuwzNiq81Pzc,10971
62
62
  fractal_server/app/routes/api/v2/workflowtask.py,sha256=ciHTwXXFiFnMF7ZpJ3Xs0q6YfuZrFvIjqndlzAEdZpo,6969
@@ -69,10 +69,9 @@ fractal_server/app/routes/auth/oauth.py,sha256=AnFHbjqL2AgBX3eksI931xD6RTtmbciHB
69
69
  fractal_server/app/routes/auth/register.py,sha256=DlHq79iOvGd_gt2v9uwtsqIKeO6i_GKaW59VIkllPqY,587
70
70
  fractal_server/app/routes/auth/router.py,sha256=tzJrygXFZlmV_uWelVqTOJMEH-3Fr7ydwlgx1LxRjxY,527
71
71
  fractal_server/app/routes/auth/users.py,sha256=FzKNoB-wD32AkVOj1Vi29lGGyOl8NSMCRL9tEhxqpJk,8403
72
- fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
72
+ fractal_server/app/routes/aux/__init__.py,sha256=LR4bR7RunHAK6jc9IR2bReQd-BdXADdnDccXI4uGeGY,731
73
73
  fractal_server/app/routes/aux/_job.py,sha256=q-RCiW17yXnZKAC_0La52RLvhqhxuvbgQJ2MlGXOj8A,702
74
74
  fractal_server/app/routes/aux/_runner.py,sha256=FdCVla5DxGAZ__aB7Z8dEJzD_RIeh5tftjrPyqkr8N8,895
75
- fractal_server/app/routes/aux/_timestamp.py,sha256=MZenRoLfVSnYnL2Vkd8AsJJ9_mV8yDB6u7OumUFjFMM,530
76
75
  fractal_server/app/routes/aux/validate_user_settings.py,sha256=Y8eubau0julkwVYB5nA83nDtxh_7RU9Iq0zAhb_dXLA,2351
77
76
  fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
78
77
  fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -88,7 +87,7 @@ fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77
88
87
  fractal_server/app/runner/executors/slurm/ssh/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
89
88
  fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py,sha256=bKo5Ja0IGxJWpPWyh9dN0AG-PwzTDZzD5LyaEHB3YU4,3742
90
89
  fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py,sha256=rwlqZzoGo4SAb4nSlFjsQJdaCgfM1J6YGcjb8yYxlqc,4506
91
- fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=si_RHAMnXwQorQ_gWeZ_hQ_cNQbbAuYPjg7nwFQoPVg,58709
90
+ fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=Z0PL5Ch1Ute1d49_1q2wQbTpw-bVTBagtsR4Ah-gfDo,57746
92
91
  fractal_server/app/runner/executors/slurm/sudo/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
93
92
  fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py,sha256=wAgwpVcr6JIslKHOuS0FhRa_6T1KCManyRJqA-fifzw,1909
94
93
  fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py,sha256=z5LlhaiqAb8pHsF1WwdzXN39C5anQmwjo1rSQgtRAYE,4422
@@ -149,7 +148,7 @@ fractal_server/app/schemas/v1/state.py,sha256=tBXzp_qW2TNNNPBo-AWEaffEU-1GkMBtUo
149
148
  fractal_server/app/schemas/v1/task.py,sha256=7BxOZ_qoRQ8n3YbQpDvB7VMcxB5fSYQmR5RLIWhuJ5U,3704
150
149
  fractal_server/app/schemas/v1/task_collection.py,sha256=uvq9bcMaGD_qHsh7YtcpoSAkVAbw12eY4DocIO3MKOg,3057
151
150
  fractal_server/app/schemas/v1/workflow.py,sha256=oRKamLSuAgrTcv3gMMxGcotDloLL2c3NNgPA39UEmmM,4467
152
- fractal_server/app/schemas/v2/__init__.py,sha256=dzDsAzLLMgyQBa3b64n71K1u83tbBn9_Oloaqqv1tjA,2444
151
+ fractal_server/app/schemas/v2/__init__.py,sha256=jAmAxPulME4hFnQJXMTxwoMZsDjZp9jjb-m__OByiXo,2505
153
152
  fractal_server/app/schemas/v2/dataset.py,sha256=zRlcO0wDZahTW1PINdVEuARZ7GZUuEqqop7UdE3-5do,2470
154
153
  fractal_server/app/schemas/v2/dumps.py,sha256=s6dg-pHZFui6t2Ktm0SMxjKDN-v-ZqBHz9iTsBQF3eU,1712
155
154
  fractal_server/app/schemas/v2/job.py,sha256=42V-bFfMvysRplwTKGsL_WshAVsWSM6yjFqypxwrY3k,3020
@@ -158,7 +157,7 @@ fractal_server/app/schemas/v2/project.py,sha256=ABv9LSLVCq1QYthEhBvZOTn_4DFEC-7c
158
157
  fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
159
158
  fractal_server/app/schemas/v2/task.py,sha256=FFAbYwDlqowB8gVMdjFVPVHvAM0T89PYLixUth49xfQ,6870
160
159
  fractal_server/app/schemas/v2/task_collection.py,sha256=yHpCRxoj6tKqCiQfUjaTj8SfCn1ChD_P6okfEOzyUDE,6518
161
- fractal_server/app/schemas/v2/task_group.py,sha256=e4NwFuOmiO0afoZLVsI_XHIpD_o_QWDpzlI7ZoNAYwo,3014
160
+ fractal_server/app/schemas/v2/task_group.py,sha256=fSjdLbClrpmrPj5hFZMu9DoJW4Y33EnbOh0HjMBsGVc,3784
162
161
  fractal_server/app/schemas/v2/workflow.py,sha256=-KWvXnbHBFA3pj5n7mfSyLKJQSqkJmoziIEe7mpLl3M,1875
163
162
  fractal_server/app/schemas/v2/workflowtask.py,sha256=vDdMktYbHeYBgB5OuWSv6wRPRXWqvetkeqQ7IC5YtfA,5751
164
163
  fractal_server/app/security/__init__.py,sha256=8Xd4GxumZgvxEH1Vli3ULehwdesEPiaAbtffJvAEgNo,12509
@@ -179,7 +178,7 @@ fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhR
179
178
  fractal_server/migrations/versions/034a469ec2eb_task_groups.py,sha256=vrPhC8hfFu1c4HmLHNZyCuqEfecFD8-bWc49bXMNes0,6199
180
179
  fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py,sha256=-BSS9AFTPcu3gYC-sYbawSy4MWQQx8TfMb5BW5EBKmQ,1450
181
180
  fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py,sha256=Q1Gj1cJ0UrdLBJ5AXfFK9QpxTtmcv-4Z3NEGDnxOme4,961
182
- fractal_server/migrations/versions/3082479ac4ea_taskgroup_activity_and_venv_info_to_.py,sha256=_wy9Yu2YfhpBffUsboAG9junlQ2jRR7qui9wbLvXp7w,3653
181
+ fractal_server/migrations/versions/3082479ac4ea_taskgroup_activity_and_venv_info_to_.py,sha256=CPGP6ceJOab_2MzxSlxrH7hGxRoXw4d7BTcASL57pMc,3654
183
182
  fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
184
183
  fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py,sha256=Mob8McGYAcmgvrseyyYOa54E6Gsgr-4SiGdC-r9O4_A,1157
185
184
  fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py,sha256=5ROUgcoZOdjf8kMt6cxuvPhzHmV6xaCxvZEbhUEyZM4,3271
@@ -203,7 +202,7 @@ fractal_server/migrations/versions/efa89c30e0a4_add_project_timestamp_created.py
203
202
  fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
204
203
  fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
205
204
  fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
206
- fractal_server/ssh/_fabric.py,sha256=O1Dxl6xlg9pvqdKyKqy18mYQlslJoJHapXtDMCzZcBA,21711
205
+ fractal_server/ssh/_fabric.py,sha256=56ud2_kkIkdThADjls5EEJ3leFwDS4bxYpyCb0qI578,22815
207
206
  fractal_server/string_tools.py,sha256=XtMNsr5R7GmgzmFi68zkKMedHs8vjGoVMMCXqWhIk9k,2568
208
207
  fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
209
208
  fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39NHE8,23
@@ -239,8 +238,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=C5WLuY3uGG2s53OEL-__H35-fmSlgu
239
238
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
240
239
  fractal_server/utils.py,sha256=utvmBx8K9I8hRWFquxna2pBaOqe0JifDL_NVPmihEJI,3525
241
240
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
242
- fractal_server-2.9.0a4.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
243
- fractal_server-2.9.0a4.dist-info/METADATA,sha256=9-4cLdLtEDXeXgb_z6i_czY2VnfpFxlaHGe54WU5YGY,4585
244
- fractal_server-2.9.0a4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
245
- fractal_server-2.9.0a4.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
246
- fractal_server-2.9.0a4.dist-info/RECORD,,
241
+ fractal_server-2.9.0a5.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
242
+ fractal_server-2.9.0a5.dist-info/METADATA,sha256=t0iiNYh719Cy3DbAgI3WK7_Sc8SV7RmCcfcCfVOIbco,4585
243
+ fractal_server-2.9.0a5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
244
+ fractal_server-2.9.0a5.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
245
+ fractal_server-2.9.0a5.dist-info/RECORD,,
@@ -1,18 +0,0 @@
1
- from datetime import datetime
2
- from datetime import timezone
3
-
4
- from fastapi import HTTPException
5
- from fastapi import status
6
-
7
-
8
- def _convert_to_db_timestamp(dt: datetime) -> datetime:
9
- """
10
- This function takes a timezone-aware datetime and converts it to UTC.
11
- """
12
- if dt.tzinfo is None:
13
- raise HTTPException(
14
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
15
- detail=f"The timestamp provided has no timezone information: {dt}",
16
- )
17
- _dt = dt.astimezone(timezone.utc)
18
- return _dt