fractal-server 2.9.0a0__py3-none-any.whl → 2.9.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.9.0a0"
1
+ __VERSION__ = "2.9.0a2"
@@ -2,17 +2,14 @@
2
2
  `db` module, loosely adapted from
3
3
  https://testdriven.io/blog/fastapi-sqlmodel/#async-sqlmodel
4
4
  """
5
- import sqlite3
6
5
  from typing import AsyncGenerator
7
6
  from typing import Generator
8
7
 
9
8
  from sqlalchemy import create_engine
10
- from sqlalchemy import event
11
9
  from sqlalchemy.ext.asyncio import AsyncSession
12
10
  from sqlalchemy.ext.asyncio import create_async_engine
13
11
  from sqlalchemy.orm import Session as DBSyncSession
14
12
  from sqlalchemy.orm import sessionmaker
15
- from sqlalchemy.pool import StaticPool
16
13
 
17
14
  from ...config import get_settings
18
15
  from ...logger import set_logger
@@ -21,14 +18,6 @@ from ...syringe import Inject
21
18
 
22
19
  logger = set_logger(__name__)
23
20
 
24
- SQLITE_WARNING_MESSAGE = (
25
- "SQLite is supported (supported version >=3.37, "
26
- f"current {sqlite3.sqlite_version=}) "
27
- "but discouraged in production. "
28
- "Given its partial support for ForeignKey constraints, "
29
- "database consistency cannot be guaranteed."
30
- )
31
-
32
21
 
33
22
  class DB:
34
23
  """
@@ -56,14 +45,7 @@ class DB:
56
45
  settings = Inject(get_settings)
57
46
  settings.check_db()
58
47
 
59
- if settings.DB_ENGINE == "sqlite":
60
- logger.warning(SQLITE_WARNING_MESSAGE)
61
- # Set some sqlite-specific options
62
- engine_kwargs_async = dict(poolclass=StaticPool)
63
- else:
64
- engine_kwargs_async = {
65
- "pool_pre_ping": True,
66
- }
48
+ engine_kwargs_async = {"pool_pre_ping": True}
67
49
 
68
50
  cls._engine_async = create_async_engine(
69
51
  settings.DATABASE_ASYNC_URL,
@@ -83,15 +65,7 @@ class DB:
83
65
  settings = Inject(get_settings)
84
66
  settings.check_db()
85
67
 
86
- if settings.DB_ENGINE == "sqlite":
87
- logger.warning(SQLITE_WARNING_MESSAGE)
88
- # Set some sqlite-specific options
89
- engine_kwargs_sync = dict(
90
- poolclass=StaticPool,
91
- connect_args={"check_same_thread": False},
92
- )
93
- else:
94
- engine_kwargs_sync = {}
68
+ engine_kwargs_sync = {}
95
69
 
96
70
  cls._engine_sync = create_engine(
97
71
  settings.DATABASE_SYNC_URL,
@@ -107,13 +81,6 @@ class DB:
107
81
  future=True,
108
82
  )
109
83
 
110
- @event.listens_for(cls._engine_sync, "connect")
111
- def set_sqlite_pragma(dbapi_connection, connection_record):
112
- if settings.DB_ENGINE == "sqlite":
113
- cursor = dbapi_connection.cursor()
114
- cursor.execute("PRAGMA journal_mode=WAL")
115
- cursor.close()
116
-
117
84
  @classmethod
118
85
  async def get_async_db(cls) -> AsyncGenerator[AsyncSession, None]:
119
86
  """
@@ -48,6 +48,10 @@ class TaskGroupV2(SQLModel, table=True):
48
48
  default_factory=get_timestamp,
49
49
  sa_column=Column(DateTime(timezone=True), nullable=False),
50
50
  )
51
+ timestamp_last_used: Optional[datetime] = Field(
52
+ default=None,
53
+ sa_column=Column(DateTime(timezone=True), nullable=True),
54
+ )
51
55
 
52
56
  @property
53
57
  def pip_install_string(self) -> str:
@@ -221,27 +221,30 @@ async def _get_valid_user_group_id(
221
221
 
222
222
 
223
223
  async def _get_collection_task_group_activity_status_message(
224
- task_group: TaskGroupV2, db: AsyncSession
224
+ task_group_id: int,
225
+ db: AsyncSession,
225
226
  ) -> str:
227
+
226
228
  res = await db.execute(
227
- select(TaskGroupActivityV2).where(
228
- TaskGroupActivityV2.taskgroupv2_id == task_group.id
229
- and TaskGroupActivityV2.action == TaskGroupActivityActionV2.COLLECT
230
- )
229
+ select(TaskGroupActivityV2)
230
+ .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id)
231
+ .where(TaskGroupActivityV2.action == TaskGroupActivityActionV2.COLLECT)
231
232
  )
232
233
  task_group_activity_list = res.scalars().all()
233
234
  if len(task_group_activity_list) > 1:
234
235
  msg = (
235
- "Expected one TaskGroupActivityV2 associated to TaskGroup "
236
- f"{task_group.id}, found {len(task_group_activity_list)} "
237
- f"(IDs: {[tga.id for tga in task_group_activity_list]}).\n"
236
+ "\nWarning: "
237
+ "Expected only one TaskGroupActivityV2 associated to TaskGroup "
238
+ f"{task_group_id}, found {len(task_group_activity_list)} "
239
+ f"(IDs: {[tga.id for tga in task_group_activity_list]})."
238
240
  "Warning: this should have not happened, please contact an admin."
239
241
  )
240
242
  elif len(task_group_activity_list) == 1:
241
243
  msg = (
242
- "\nThere exists a task-group activity"
243
- f"(ID={task_group_activity_list[0].id}) for "
244
- f"such task group (ID={task_group.id}), with status "
244
+ "\nNote:"
245
+ "There exists another task-group collection "
246
+ f"(activity ID={task_group_activity_list[0].id}) for "
247
+ f"this task group (ID={task_group_id}), with status "
245
248
  f"'{task_group_activity_list[0].status}'."
246
249
  )
247
250
  else:
@@ -277,7 +280,7 @@ async def _verify_non_duplication_user_constraint(
277
280
  ),
278
281
  )
279
282
  state_msg = await _get_collection_task_group_activity_status_message(
280
- duplicate[0], db
283
+ duplicate[0].id, db
281
284
  )
282
285
  raise HTTPException(
283
286
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -319,7 +322,7 @@ async def _verify_non_duplication_group_constraint(
319
322
  ),
320
323
  )
321
324
  state_msg = await _get_collection_task_group_activity_status_message(
322
- duplicate[0], db
325
+ duplicate[0].id, db
323
326
  )
324
327
  raise HTTPException(
325
328
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -30,6 +30,7 @@ from ...aux.validate_user_settings import validate_user_settings
30
30
  from ._aux_functions import _get_dataset_check_owner
31
31
  from ._aux_functions import _get_workflow_check_owner
32
32
  from ._aux_functions import clean_app_job_list_v2
33
+ from fractal_server.app.models import TaskGroupV2
33
34
  from fractal_server.app.models import UserOAuth
34
35
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
35
36
  _get_task_read_access,
@@ -61,6 +62,8 @@ async def apply_workflow(
61
62
  db: AsyncSession = Depends(get_async_db),
62
63
  ) -> Optional[JobReadV2]:
63
64
 
65
+ now = get_timestamp()
66
+
64
67
  # Remove non-submitted V2 jobs from the app state when the list grows
65
68
  # beyond a threshold
66
69
  settings = Inject(get_settings)
@@ -112,22 +115,23 @@ async def apply_workflow(
112
115
  )
113
116
 
114
117
  # Check that tasks have read-access and are `active`
118
+ used_task_group_ids = set()
115
119
  for wftask in workflow.task_list[
116
120
  first_task_index : last_task_index + 1 # noqa: E203
117
121
  ]:
118
- await _get_task_read_access(
122
+ task = await _get_task_read_access(
119
123
  user_id=user.id,
120
124
  task_id=wftask.task_id,
121
125
  require_active=True,
122
126
  db=db,
123
127
  )
128
+ used_task_group_ids.add(task.taskgroupv2_id)
124
129
 
125
130
  # Validate user settings
126
131
  FRACTAL_RUNNER_BACKEND = settings.FRACTAL_RUNNER_BACKEND
127
132
  user_settings = await validate_user_settings(
128
133
  user=user, backend=FRACTAL_RUNNER_BACKEND, db=db
129
134
  )
130
-
131
135
  # Check that no other job with the same dataset_id is SUBMITTED
132
136
  stm = (
133
137
  select(JobV2)
@@ -184,8 +188,18 @@ async def apply_workflow(
184
188
  await db.commit()
185
189
  await db.refresh(job)
186
190
 
191
+ # Update TaskGroupV2.timestamp_last_used
192
+ res = await db.execute(
193
+ select(TaskGroupV2).where(TaskGroupV2.id.in_(used_task_group_ids))
194
+ )
195
+ used_task_groups = res.scalars().all()
196
+ for used_task_group in used_task_groups:
197
+ used_task_group.timestamp_last_used = now
198
+ db.add(used_task_group)
199
+ await db.commit()
200
+
187
201
  # Define server-side job directory
188
- timestamp_string = get_timestamp().strftime("%Y%m%d_%H%M%S")
202
+ timestamp_string = now.strftime("%Y%m%d_%H%M%S")
189
203
  WORKFLOW_DIR_LOCAL = settings.FRACTAL_RUNNER_WORKING_BASE_DIR / (
190
204
  f"proj_v2_{project_id:07d}_wf_{workflow_id:07d}_job_{job.id:07d}"
191
205
  f"_{timestamp_string}"
@@ -35,7 +35,7 @@ from fractal_server.app.schemas.v2 import (
35
35
  )
36
36
  from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
37
37
  from fractal_server.tasks.v2.local.collect import (
38
- collect_package_local,
38
+ collect_local,
39
39
  )
40
40
  from fractal_server.tasks.v2.utils_package_names import _parse_wheel_filename
41
41
  from fractal_server.tasks.v2.utils_package_names import normalize_package_name
@@ -248,7 +248,7 @@ async def collect_tasks_pip(
248
248
  # SSH task collection
249
249
 
250
250
  from fractal_server.tasks.v2.ssh.collect import (
251
- collect_package_ssh,
251
+ collect_ssh,
252
252
  )
253
253
 
254
254
  # User appropriate FractalSSH object
@@ -261,7 +261,7 @@ async def collect_tasks_pip(
261
261
  fractal_ssh = fractal_ssh_list.get(**ssh_credentials)
262
262
 
263
263
  background_tasks.add_task(
264
- collect_package_ssh,
264
+ collect_ssh,
265
265
  task_group_id=task_group.id,
266
266
  task_group_activity_id=task_group_activity.id,
267
267
  fractal_ssh=fractal_ssh,
@@ -271,7 +271,7 @@ async def collect_tasks_pip(
271
271
  else:
272
272
  # Local task collection
273
273
  background_tasks.add_task(
274
- collect_package_local,
274
+ collect_local,
275
275
  task_group_id=task_group.id,
276
276
  task_group_activity_id=task_group_activity.id,
277
277
  )
@@ -4,15 +4,10 @@ from datetime import timezone
4
4
  from fastapi import HTTPException
5
5
  from fastapi import status
6
6
 
7
- from fractal_server.config import get_settings
8
- from fractal_server.syringe import Inject
9
-
10
7
 
11
8
  def _convert_to_db_timestamp(dt: datetime) -> datetime:
12
9
  """
13
10
  This function takes a timezone-aware datetime and converts it to UTC.
14
- If using SQLite, it also removes the timezone information in order to make
15
- the datetime comparable with datetimes in the database.
16
11
  """
17
12
  if dt.tzinfo is None:
18
13
  raise HTTPException(
@@ -20,6 +15,4 @@ def _convert_to_db_timestamp(dt: datetime) -> datetime:
20
15
  detail=f"The timestamp provided has no timezone information: {dt}",
21
16
  )
22
17
  _dt = dt.astimezone(timezone.utc)
23
- if Inject(get_settings).DB_ENGINE == "sqlite":
24
- return _dt.replace(tzinfo=None)
25
18
  return _dt
@@ -108,8 +108,7 @@ def val_unique_list(attribute: str):
108
108
  def valutc(attribute: str):
109
109
  def val(timestamp: Optional[datetime]) -> Optional[datetime]:
110
110
  """
111
- Replacing `tzinfo` with `timezone.utc` is just required by SQLite data.
112
- If using Postgres, this function leaves the datetime exactly as it is.
111
+ Replace `tzinfo` with `timezone.utc`.
113
112
  """
114
113
  if timestamp is not None:
115
114
  return timestamp.replace(tzinfo=timezone.utc)
@@ -1,6 +1,5 @@
1
1
  from datetime import datetime
2
2
  from enum import Enum
3
- from typing import Literal
4
3
  from typing import Optional
5
4
 
6
5
  from pydantic import BaseModel
@@ -74,7 +73,7 @@ class TaskGroupReadV2(BaseModel):
74
73
  user_id: int
75
74
  user_group_id: Optional[int] = None
76
75
 
77
- origin: Literal["pypi", "wheel-file", "other"]
76
+ origin: TaskGroupV2OriginEnum
78
77
  pkg_name: str
79
78
  version: Optional[str] = None
80
79
  python_version: Optional[str] = None
@@ -90,6 +89,7 @@ class TaskGroupReadV2(BaseModel):
90
89
 
91
90
  active: bool
92
91
  timestamp_created: datetime
92
+ timestamp_last_used: Optional[datetime] = None
93
93
 
94
94
 
95
95
  class TaskGroupUpdateV2(BaseModel, extra=Extra.forbid):
fractal_server/config.py CHANGED
@@ -16,7 +16,6 @@ import shutil
16
16
  import sys
17
17
  from os import environ
18
18
  from os import getenv
19
- from os.path import abspath
20
19
  from pathlib import Path
21
20
  from typing import Literal
22
21
  from typing import Optional
@@ -167,10 +166,6 @@ class Settings(BaseSettings):
167
166
  ###########################################################################
168
167
  # DATABASE
169
168
  ###########################################################################
170
- DB_ENGINE: Literal["sqlite", "postgres-psycopg"] = "sqlite"
171
- """
172
- Database engine to use (supported: `sqlite`, `postgres-psycopg`).
173
- """
174
169
  DB_ECHO: bool = False
175
170
  """
176
171
  If `True`, make database operations verbose.
@@ -196,44 +191,21 @@ class Settings(BaseSettings):
196
191
  Name of the PostgreSQL database to connect to.
197
192
  """
198
193
 
199
- SQLITE_PATH: Optional[str]
200
- """
201
- File path where the SQLite database is located (or will be located).
202
- """
203
-
204
194
  @property
205
195
  def DATABASE_ASYNC_URL(self) -> URL:
206
- if self.DB_ENGINE == "postgres-psycopg":
207
- url = URL.create(
208
- drivername="postgresql+psycopg",
209
- username=self.POSTGRES_USER,
210
- password=self.POSTGRES_PASSWORD,
211
- host=self.POSTGRES_HOST,
212
- port=self.POSTGRES_PORT,
213
- database=self.POSTGRES_DB,
214
- )
215
- else:
216
- if not self.SQLITE_PATH:
217
- raise FractalConfigurationError(
218
- "SQLITE_PATH path cannot be None"
219
- )
220
- sqlite_path = abspath(self.SQLITE_PATH)
221
- url = URL.create(
222
- drivername="sqlite+aiosqlite",
223
- database=sqlite_path,
224
- )
196
+ url = URL.create(
197
+ drivername="postgresql+psycopg",
198
+ username=self.POSTGRES_USER,
199
+ password=self.POSTGRES_PASSWORD,
200
+ host=self.POSTGRES_HOST,
201
+ port=self.POSTGRES_PORT,
202
+ database=self.POSTGRES_DB,
203
+ )
225
204
  return url
226
205
 
227
206
  @property
228
207
  def DATABASE_SYNC_URL(self):
229
- if self.DB_ENGINE == "postgres-psycopg":
230
- return self.DATABASE_ASYNC_URL.set(drivername="postgresql+psycopg")
231
- else:
232
- if not self.SQLITE_PATH:
233
- raise FractalConfigurationError(
234
- "SQLITE_PATH path cannot be None"
235
- )
236
- return self.DATABASE_ASYNC_URL.set(drivername="sqlite")
208
+ return self.DATABASE_ASYNC_URL.set(drivername="postgresql+psycopg")
237
209
 
238
210
  ###########################################################################
239
211
  # FRACTAL SPECIFIC
@@ -533,25 +505,8 @@ class Settings(BaseSettings):
533
505
  """
534
506
  Checks that db environment variables are properly set.
535
507
  """
536
- if self.DB_ENGINE == "postgres-psycopg":
537
- if not self.POSTGRES_DB:
538
- raise FractalConfigurationError(
539
- "POSTGRES_DB cannot be None when DB_ENGINE="
540
- "postgres-psycopg."
541
- )
542
-
543
- try:
544
- import psycopg # noqa: F401
545
- except ModuleNotFoundError:
546
- raise FractalConfigurationError(
547
- "DB engine is `postgres-psycopg` but `psycopg` is not "
548
- "available"
549
- )
550
- else:
551
- if not self.SQLITE_PATH:
552
- raise FractalConfigurationError(
553
- "SQLITE_PATH cannot be None when DB_ENGINE=sqlite."
554
- )
508
+ if not self.POSTGRES_DB:
509
+ raise FractalConfigurationError("POSTGRES_DB cannot be None.")
555
510
 
556
511
  def check_runner(self) -> None:
557
512
 
@@ -67,6 +67,13 @@ def upgrade() -> None:
67
67
  batch_op.add_column(
68
68
  sa.Column("venv_file_number", sa.Integer(), nullable=True)
69
69
  )
70
+ batch_op.add_column(
71
+ sa.Column(
72
+ "timestamp_last_used",
73
+ sa.DateTime(timezone=True),
74
+ nullable=True,
75
+ )
76
+ )
70
77
 
71
78
  # ### end Alembic commands ###
72
79
 
@@ -74,6 +81,7 @@ def upgrade() -> None:
74
81
  def downgrade() -> None:
75
82
  # ### commands auto generated by Alembic - please adjust! ###
76
83
  with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
84
+ batch_op.drop_column("timestamp_last_used")
77
85
  batch_op.drop_column("venv_file_number")
78
86
  batch_op.drop_column("venv_size_in_kB")
79
87
  batch_op.drop_column("pip_freeze")
@@ -196,23 +196,57 @@ class FractalSSH(object):
196
196
  """
197
197
  Open the SSH connection and handle exceptions.
198
198
 
199
- This function can be called from within other functions that use
200
- `connection`, so that we can provide a meaningful error in case the
201
- SSH connection cannot be opened.
199
+ This method should always be called at the beginning of background
200
+ operations that use FractalSSH, so that:
201
+
202
+ 1. We try to restore unusable connections (e.g. due to closed socket).
203
+ 2. We provide an informative error if connection cannot be established.
202
204
  """
203
- if not self._connection.is_connected:
205
+ self.logger.debug(
206
+ f"[check_connection] {self._connection.is_connected=}"
207
+ )
208
+ if self._connection.is_connected:
209
+ # Even if the connection appears open, it could be broken for
210
+ # external reasons (e.g. the socket is closed because the SSH
211
+ # server was restarted). In these cases, we catch the error and
212
+ # try to re-open the connection.
204
213
  try:
205
- with _acquire_lock_with_timeout(
206
- lock=self._lock,
207
- label="_connection.open",
208
- timeout=self.default_lock_timeout,
209
- ):
210
- self._connection.open()
211
- except Exception as e:
212
- raise RuntimeError(
213
- f"Cannot open SSH connection. Original error:\n{str(e)}"
214
+ self.logger.info(
215
+ "[check_connection] Run dummy command to check connection."
216
+ )
217
+ # Run both an SFTP and an SSH command, as they correspond to
218
+ # different sockets
219
+ self.remote_exists("/dummy/path/")
220
+ self.run_command(cmd="whoami")
221
+ self.logger.info(
222
+ "[check_connection] SSH connection is already OK, exit."
223
+ )
224
+ return
225
+ except (OSError, EOFError) as e:
226
+ self.logger.warning(
227
+ f"[check_connection] Detected error {str(e)}, re-open."
228
+ )
229
+ # Try opening the connection (if it was closed) or to re-open it (if
230
+ # an error happened).
231
+ try:
232
+ self.close()
233
+ with _acquire_lock_with_timeout(
234
+ lock=self._lock,
235
+ label="_connection.open",
236
+ timeout=self.default_lock_timeout,
237
+ logger_name=self.logger_name,
238
+ ):
239
+ self._connection.open()
240
+ self._connection.client.open_sftp()
241
+ self.logger.info(
242
+ "[check_connection] SSH connection opened, exit."
214
243
  )
215
244
 
245
+ except Exception as e:
246
+ raise RuntimeError(
247
+ f"Cannot open SSH connection. Original error:\n{str(e)}"
248
+ )
249
+
216
250
  def close(self) -> None:
217
251
  """
218
252
  Aggressively close `self._connection`.
@@ -228,9 +262,8 @@ class FractalSSH(object):
228
262
  timeout=self.default_lock_timeout,
229
263
  ):
230
264
  self._connection.close()
231
-
232
- if self._connection.client is not None:
233
- self._connection.client.close()
265
+ if self._connection.client is not None:
266
+ self._connection.client.close()
234
267
 
235
268
  def run_command(
236
269
  self,
@@ -1,3 +1,3 @@
1
- from .collect import collect_package_local # noqa
1
+ from .collect import collect_local # noqa
2
2
  from .deactivate import deactivate_local # noqa
3
3
  from .reactivate import reactivate_local # noqa
@@ -47,7 +47,7 @@ def _copy_wheel_file_local(task_group: TaskGroupV2) -> str:
47
47
  return dest
48
48
 
49
49
 
50
- def collect_package_local(
50
+ def collect_local(
51
51
  *,
52
52
  task_group_activity_id: int,
53
53
  task_group_id: int,
@@ -12,6 +12,7 @@ from fractal_server.app.db import get_sync_db
12
12
  from fractal_server.app.models.v2 import TaskGroupActivityV2
13
13
  from fractal_server.app.models.v2 import TaskGroupV2
14
14
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
15
+ from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
15
16
  from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
16
17
  from fractal_server.logger import set_logger
17
18
  from fractal_server.tasks.utils import get_log_path
@@ -83,6 +84,7 @@ def deactivate_local(
83
84
 
84
85
  activity.status = TaskGroupActivityStatusV2.ONGOING
85
86
  activity = add_commit_refresh(obj=activity, db=db)
87
+
86
88
  if task_group.pip_freeze is None:
87
89
  logger.warning(
88
90
  "Recreate pip-freeze information, since "
@@ -119,34 +121,80 @@ def deactivate_local(
119
121
  task_group = add_commit_refresh(obj=task_group, db=db)
120
122
  logger.info("Add pip freeze stdout to TaskGroupV2 - end")
121
123
 
122
- if task_group.origin == "wheel" and (
123
- task_group.wheel_path is None
124
- or not Path(task_group.wheel_path).exists()
125
- ):
124
+ # Handle some specific cases for wheel-file case
125
+ if task_group.origin == TaskGroupV2OriginEnum.WHEELFILE:
126
126
 
127
- logger.error(
128
- "Cannot find task_group wheel_path with "
129
- f"{task_group_id=} :\n"
130
- f"{task_group=}\n. Exit."
127
+ logger.info(
128
+ f"Handle specific cases for {task_group.origin=}."
131
129
  )
132
- error_msg = f"{task_group.wheel_path} does not exist."
133
- logger.error(error_msg)
134
- fail_and_cleanup(
135
- task_group=task_group,
136
- task_group_activity=activity,
137
- logger_name=LOGGER_NAME,
138
- log_file_path=log_file_path,
139
- exception=FileNotFoundError(error_msg),
140
- db=db,
141
- )
142
- return
143
130
 
144
- # At this point we are sure that venv_path
145
- # wheel_path and pip_freeze exist
131
+ # Blocking situation: `wheel_path` is not set or points
132
+ # to a missing path
133
+ if (
134
+ task_group.wheel_path is None
135
+ or not Path(task_group.wheel_path).exists()
136
+ ):
137
+ error_msg = (
138
+ "Invalid wheel path for task group with "
139
+ f"{task_group_id=}. {task_group.wheel_path=} is "
140
+ "unset or does not exist."
141
+ )
142
+ logger.error(error_msg)
143
+ fail_and_cleanup(
144
+ task_group=task_group,
145
+ task_group_activity=activity,
146
+ logger_name=LOGGER_NAME,
147
+ log_file_path=log_file_path,
148
+ exception=FileNotFoundError(error_msg),
149
+ db=db,
150
+ )
151
+ return
152
+
153
+ # Recoverable situation: `wheel_path` was not yet copied
154
+ # over to the correct server-side folder
155
+ wheel_path_parent_dir = Path(task_group.wheel_path).parent
156
+ if wheel_path_parent_dir != Path(task_group.path):
157
+ logger.warning(
158
+ f"{wheel_path_parent_dir.as_posix()} differs from "
159
+ f"{task_group.path}. NOTE: this should only "
160
+ "happen for task groups created before 2.9.0."
161
+ )
162
+
163
+ if task_group.wheel_path not in task_group.pip_freeze:
164
+ raise ValueError(
165
+ f"Cannot find {task_group.wheel_path=} in "
166
+ "pip-freeze data. Exit."
167
+ )
168
+
169
+ logger.info(
170
+ f"Now copy wheel file into {task_group.path}."
171
+ )
172
+ new_wheel_path = (
173
+ Path(task_group.path)
174
+ / Path(task_group.wheel_path).name
175
+ ).as_posix()
176
+ shutil.copy(task_group.wheel_path, new_wheel_path)
177
+ logger.info(f"Copied wheel file to {new_wheel_path}.")
178
+
179
+ task_group.wheel_path = new_wheel_path
180
+ new_pip_freeze = task_group.pip_freeze.replace(
181
+ task_group.wheel_path,
182
+ new_wheel_path,
183
+ )
184
+ task_group.pip_freeze = new_pip_freeze
185
+ task_group = add_commit_refresh(obj=task_group, db=db)
186
+ logger.info(
187
+ "Updated `wheel_path` and `pip_freeze` "
188
+ "task-group attributes."
189
+ )
190
+
191
+ # We now have all required information for reactivating the
192
+ # virtual environment at a later point
193
+ logger.info(f"Now removing {task_group.venv_path}.")
146
194
  shutil.rmtree(task_group.venv_path)
147
-
148
- activity.log = f"All good, {task_group.venv_path} removed."
195
+ logger.info(f"All good, {task_group.venv_path} removed.")
149
196
  activity.status = TaskGroupActivityStatusV2.OK
197
+ activity.log = get_current_log(log_file_path)
150
198
  activity.timestamp_ended = get_timestamp()
151
199
  activity = add_commit_refresh(obj=activity, db=db)
152
200
 
@@ -1,5 +1,4 @@
1
1
  from pathlib import Path
2
- from typing import Optional
3
2
 
4
3
  from fractal_server.logger import get_logger
5
4
  from fractal_server.tasks.v2.utils_templates import customize_template
@@ -11,7 +10,7 @@ def _customize_and_run_template(
11
10
  replacements: list[tuple[str, str]],
12
11
  script_dir: str,
13
12
  logger_name: str,
14
- prefix: Optional[int] = None,
13
+ prefix: int,
15
14
  ) -> str:
16
15
  """
17
16
  Customize one of the template bash scripts.
@@ -31,12 +30,7 @@ def _customize_and_run_template(
31
30
  f"Invalid {template_filename=} (it must end with '.sh')."
32
31
  )
33
32
 
34
- template_filename_stripped = template_filename[:-3]
35
-
36
- if prefix is not None:
37
- script_filename = f"{prefix}{template_filename_stripped}"
38
- else:
39
- script_filename = template_filename_stripped
33
+ script_filename = f"{prefix}{template_filename}"
40
34
  script_path_local = Path(script_dir) / script_filename
41
35
  # Read template
42
36
  customize_template(
@@ -47,6 +41,5 @@ def _customize_and_run_template(
47
41
  cmd = f"bash {script_path_local}"
48
42
  logger.debug(f"Now run '{cmd}' ")
49
43
  stdout = execute_command_sync(command=cmd, logger_name=logger_name)
50
- logger.debug(f"Standard output of '{cmd}':\n{stdout}")
51
44
  logger.debug(f"_customize_and_run_template {template_filename} - END")
52
45
  return stdout
@@ -63,8 +63,7 @@ def _customize_and_run_template(
63
63
  raise ValueError(
64
64
  f"Invalid {template_filename=} (it must end with '.sh')."
65
65
  )
66
- template_filename_stripped = Path(template_filename).stem
67
- script_filename = f"{prefix}{template_filename_stripped}"
66
+ script_filename = f"{prefix}_{template_filename}"
68
67
  script_path_local = Path(script_dir_local) / script_filename
69
68
 
70
69
  customize_template(
@@ -109,7 +108,7 @@ def _copy_wheel_file_ssh(
109
108
  return dest
110
109
 
111
110
 
112
- def collect_package_ssh(
111
+ def collect_ssh(
113
112
  *,
114
113
  task_group_id: int,
115
114
  task_group_activity_id: int,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.9.0a0
3
+ Version: 2.9.0a2
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -12,7 +12,6 @@ Classifier: Programming Language :: Python :: 3
12
12
  Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
- Requires-Dist: aiosqlite (>=0.19.0,<0.20.0)
16
15
  Requires-Dist: alembic (>=1.13.1,<2.0.0)
17
16
  Requires-Dist: bcrypt (==4.0.1)
18
17
  Requires-Dist: cloudpickle (>=3.0.0,<3.1.0)
@@ -1,8 +1,8 @@
1
- fractal_server/__init__.py,sha256=nUyCW0fhiPHMc-u4jKcBUavNOA93MoQcsdShypf4ins,24
1
+ fractal_server/__init__.py,sha256=d0Y48obcx61ZaWA7ZS1o1kdZRZ6sJMhUfuKDOT5xPKU,24
2
2
  fractal_server/__main__.py,sha256=dEkCfzLLQrIlxsGC-HBfoR-RBMWnJDgNrxYTyzmE9c0,6146
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- fractal_server/app/db/__init__.py,sha256=81rK9w1__Z6PJ5cEcChPVc-wI9YOK4fN--_5Opry0MQ,4119
5
+ fractal_server/app/db/__init__.py,sha256=wup2wcOkyOh8Vd0Xm76PZn_naxeMqaL4eF8DHHXTGlI,2889
6
6
  fractal_server/app/models/__init__.py,sha256=aG7mf1zZbsgzDSp7GHEcZhdjHfW3TGPOLCI8MrvYhPw,500
7
7
  fractal_server/app/models/linkusergroup.py,sha256=LWTUfhH2uAnn_4moK7QdRUIHWtpw-hPZuW-5jClv_OE,610
8
8
  fractal_server/app/models/linkuserproject.py,sha256=eQaourbGRshvlMVlKzLYJKHEjfsW1CbWws9yW4eHXhA,567
@@ -20,7 +20,7 @@ fractal_server/app/models/v2/dataset.py,sha256=-7sxHEw4IIAvF_uSan7tA3o8hvoakBkQ0
20
20
  fractal_server/app/models/v2/job.py,sha256=ypJmN-qspkKBGhBG7Mt-HypSQqcQ2EmB4Bzzb2-y550,1535
21
21
  fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
22
22
  fractal_server/app/models/v2/task.py,sha256=jebD28Pz8tGcsWCItxj6uKjcD8BMMnnU8dqYhvhEB6c,1520
23
- fractal_server/app/models/v2/task_group.py,sha256=X2Qlg-at2RcbvlWbpEMT3kZCk5WOVmJ6cOnKF-RDnQ0,3107
23
+ fractal_server/app/models/v2/task_group.py,sha256=Lxtwxk6pfffM8IehqkFvhDuecSXnXqTWeUlf7Y78oss,3254
24
24
  fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO9VBlFCHiSps,1069
25
25
  fractal_server/app/models/v2/workflowtask.py,sha256=iDuJYk8kp4PNqGmbKRtGI7y-QsbjkNd_gDsbMzL4i-g,1274
26
26
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -44,15 +44,15 @@ fractal_server/app/routes/api/v1/workflowtask.py,sha256=OYYConwJbmNULDw5I3T-UbSJ
44
44
  fractal_server/app/routes/api/v2/__init__.py,sha256=w4c9WzagaVV5d4TWBX5buu5ENk8jf3YftMQYmhavz9Q,2172
45
45
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=mb4R_qqFxeW0LAis2QJIIfVx8Sydv1jTYaRIMsMxnIk,11720
46
46
  fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=04-C7R8aWiXN9-rSTnZIqz7CAGcLm0y9bjIgJ2lHGZw,5332
47
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=kOoLy5a1YwgfSgsbnCVm_pc5DH6oiG0SU0k-pVDeOY0,10947
47
+ fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=PuapLtvSk9yhBAsKNEp1w2oagOMr0YZTo247-CU3hdM,11008
48
48
  fractal_server/app/routes/api/v2/dataset.py,sha256=Y6uZz--YSEGgnPYu05rZ9sr1Ug08bNl2v1h3VeApBe8,9441
49
49
  fractal_server/app/routes/api/v2/images.py,sha256=JR1rR6qEs81nacjriOXAOBQjAbCXF4Ew7M7mkWdxBU0,7920
50
50
  fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
51
51
  fractal_server/app/routes/api/v2/project.py,sha256=eWYFJ7F2ZYQcpi-_n-rhPF-Q4gJhzYBsVGYFhHZZXAE,6653
52
52
  fractal_server/app/routes/api/v2/status.py,sha256=6N9DSZ4iFqbZImorWfEAPoyoFUgEruo4Hweqo0x0xXU,6435
53
- fractal_server/app/routes/api/v2/submit.py,sha256=tq-NGnUlpIcm_MRN47rJRHkRcIJ5HiL4Wj1wItJy3o8,8185
53
+ fractal_server/app/routes/api/v2/submit.py,sha256=jqYix7X6dUJn8w6RWasu1lOFf7T_CcXQlpVY38njE24,8688
54
54
  fractal_server/app/routes/api/v2/task.py,sha256=K0ik33t7vL8BAK5S7fqyJDNdRK4stGqb_73bSa8tvPE,7159
55
- fractal_server/app/routes/api/v2/task_collection.py,sha256=1Jch0xRdcb--al6Oigk5fEMIOcIvNwa_aidFEHCd-FQ,9852
55
+ fractal_server/app/routes/api/v2/task_collection.py,sha256=-DVhultvdI3Jh8Jq8W5np6Lnkh5oisjbKCwxFmwddmo,9820
56
56
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
57
57
  fractal_server/app/routes/api/v2/task_group.py,sha256=Ove7Vr3p8GKtskHANAZh8TWwOw8dfbFCN2UQFv6DhqM,8136
58
58
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=UODAv1kk8qgT7wvcVKK6eRs-fEBw3T1pPwmT6VkkAn4,7404
@@ -71,7 +71,7 @@ fractal_server/app/routes/auth/users.py,sha256=FzKNoB-wD32AkVOj1Vi29lGGyOl8NSMCR
71
71
  fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
72
72
  fractal_server/app/routes/aux/_job.py,sha256=q-RCiW17yXnZKAC_0La52RLvhqhxuvbgQJ2MlGXOj8A,702
73
73
  fractal_server/app/routes/aux/_runner.py,sha256=FdCVla5DxGAZ__aB7Z8dEJzD_RIeh5tftjrPyqkr8N8,895
74
- fractal_server/app/routes/aux/_timestamp.py,sha256=t3X1WQIGZVAf0snxZzUeOi9Gh4zQbxmiYPGc9NeuLHA,850
74
+ fractal_server/app/routes/aux/_timestamp.py,sha256=MZenRoLfVSnYnL2Vkd8AsJJ9_mV8yDB6u7OumUFjFMM,530
75
75
  fractal_server/app/routes/aux/validate_user_settings.py,sha256=Y8eubau0julkwVYB5nA83nDtxh_7RU9Iq0zAhb_dXLA,2351
76
76
  fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
77
77
  fractal_server/app/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -134,7 +134,7 @@ fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=1fWvQ6YZUUnDhO
134
134
  fractal_server/app/runner/v2/task_interface.py,sha256=hT3p-bRGsLNAR_dNv_PYFoqzIF_EQtSsGwl38j1haYA,1824
135
135
  fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
136
136
  fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMoqWc3orFyI,135
137
- fractal_server/app/schemas/_validators.py,sha256=3YBteFMxrEE9DOYlIHvIMsUOWTPSHEZlDL7dUQzBQjs,3616
137
+ fractal_server/app/schemas/_validators.py,sha256=Ft-Ueol_rFwGjEebKVK2bUELm7w7bFG8MOpTamlY8Fs,3503
138
138
  fractal_server/app/schemas/user.py,sha256=aUD8YAcfYTEO06TEUoTx4heVrXFiX7E2Mb8D2--4FsA,2130
139
139
  fractal_server/app/schemas/user_group.py,sha256=YwJvYgj-PI66LWy38CEd_FIZPsBV1_2N5zJPGFcFvBw,2143
140
140
  fractal_server/app/schemas/user_settings.py,sha256=TalISeEfCrtN8LgqbLx1Q8ZPoeiZnbksg5NYAVzkIqY,3527
@@ -157,12 +157,12 @@ fractal_server/app/schemas/v2/project.py,sha256=UXEA0UUUe0bFFOVLLmVtvDFLBO5vmD1J
157
157
  fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
158
158
  fractal_server/app/schemas/v2/task.py,sha256=FFAbYwDlqowB8gVMdjFVPVHvAM0T89PYLixUth49xfQ,6870
159
159
  fractal_server/app/schemas/v2/task_collection.py,sha256=yHpCRxoj6tKqCiQfUjaTj8SfCn1ChD_P6okfEOzyUDE,6518
160
- fractal_server/app/schemas/v2/task_group.py,sha256=0eOjQ1PjvxxHQULX1f6gDGhm30BtKW6k5CcRaSUaQtg,3007
160
+ fractal_server/app/schemas/v2/task_group.py,sha256=e4NwFuOmiO0afoZLVsI_XHIpD_o_QWDpzlI7ZoNAYwo,3014
161
161
  fractal_server/app/schemas/v2/workflow.py,sha256=HSNQSrBRdoBzh8Igr76FUWCAWvVzykrqmUv1vGv-8og,2026
162
162
  fractal_server/app/schemas/v2/workflowtask.py,sha256=vDdMktYbHeYBgB5OuWSv6wRPRXWqvetkeqQ7IC5YtfA,5751
163
163
  fractal_server/app/security/__init__.py,sha256=8Xd4GxumZgvxEH1Vli3ULehwdesEPiaAbtffJvAEgNo,12509
164
164
  fractal_server/app/user_settings.py,sha256=aZgQ3i0JkHfgwLGW1ee6Gzr1ae3IioFfJKKSsSS8Svk,1312
165
- fractal_server/config.py,sha256=-G1RvmaeSb6_wffUFuaAmhJV3u1q3HRpMLEfpGXBrz4,22797
165
+ fractal_server/config.py,sha256=1MmVIbnztrFA0w2gYIjgJXg0bqVDsSeSEsMFimb4y74,21153
166
166
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
167
167
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
168
168
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
@@ -178,7 +178,7 @@ fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhR
178
178
  fractal_server/migrations/versions/034a469ec2eb_task_groups.py,sha256=vrPhC8hfFu1c4HmLHNZyCuqEfecFD8-bWc49bXMNes0,6199
179
179
  fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py,sha256=-BSS9AFTPcu3gYC-sYbawSy4MWQQx8TfMb5BW5EBKmQ,1450
180
180
  fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py,sha256=Q1Gj1cJ0UrdLBJ5AXfFK9QpxTtmcv-4Z3NEGDnxOme4,961
181
- fractal_server/migrations/versions/3082479ac4ea_taskgroup_activity_and_venv_info_to_.py,sha256=oAqSwW9ilKm5x6Wez3JpJFU2-ls2_O8-j1R6KLEir08,3411
181
+ fractal_server/migrations/versions/3082479ac4ea_taskgroup_activity_and_venv_info_to_.py,sha256=_wy9Yu2YfhpBffUsboAG9junlQ2jRR7qui9wbLvXp7w,3653
182
182
  fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
183
183
  fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py,sha256=Mob8McGYAcmgvrseyyYOa54E6Gsgr-4SiGdC-r9O4_A,1157
184
184
  fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py,sha256=5ROUgcoZOdjf8kMt6cxuvPhzHmV6xaCxvZEbhUEyZM4,3271
@@ -202,7 +202,7 @@ fractal_server/migrations/versions/efa89c30e0a4_add_project_timestamp_created.py
202
202
  fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
203
203
  fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
204
204
  fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
205
- fractal_server/ssh/_fabric.py,sha256=udbz8BsbToqf2J_JjSlGUgVQGcP0tY-TjG2plzPoGPM,20180
205
+ fractal_server/ssh/_fabric.py,sha256=Nwsc5uU2BcOE7t1AFFpytKUsZHfbQxB0uKF5HD-dycA,21647
206
206
  fractal_server/string_tools.py,sha256=XtMNsr5R7GmgzmFi68zkKMedHs8vjGoVMMCXqWhIk9k,2568
207
207
  fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
208
208
  fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39NHE8,23
@@ -214,13 +214,13 @@ fractal_server/tasks/v1/endpoint_operations.py,sha256=NQYvgh-_qEI9YhsLiulfOFPDac
214
214
  fractal_server/tasks/v1/get_collection_data.py,sha256=5C22jp356rCH5IIC0J57wOu-DCC_kp3B6p68JooN7IM,508
215
215
  fractal_server/tasks/v1/utils.py,sha256=HYFyNAyZofmf--mVgdwGC5TJpGShIWIDaS01yRr4HxM,1771
216
216
  fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
217
- fractal_server/tasks/v2/local/__init__.py,sha256=taFip1YHvnsfTlReqwZsdo4EmIq7hlAl2WV1g3Vp_TY,149
218
- fractal_server/tasks/v2/local/collect.py,sha256=qUSKpI5h5qJjzwXtezEzzo9jIHfXR8mbWCgDCJ4Huyc,12207
219
- fractal_server/tasks/v2/local/deactivate.py,sha256=f3j-a5npujZ5lsCBI_f39bKR7n2n79iSprmgm7_FJYk,6437
217
+ fractal_server/tasks/v2/local/__init__.py,sha256=9RVItnS7OyLsJOuJjWMCicaky4ASUPQEYD4SzDs0hOE,141
218
+ fractal_server/tasks/v2/local/collect.py,sha256=kTgMDiOX9qVDncmrgieOfYE71jqFwBpDgO1cbD5ZQVQ,12199
219
+ fractal_server/tasks/v2/local/deactivate.py,sha256=PyJm3PoQrYOBCm0F6p6xD7Gx9Tfnn_YZlXY-j3Fi6pM,8975
220
220
  fractal_server/tasks/v2/local/reactivate.py,sha256=eTSrEoZ54_6ExviozxRTgjNsrESG9PbQW-TiGqNMJcA,6117
221
- fractal_server/tasks/v2/local/utils_local.py,sha256=987EzL2BByPszoTc2dP-ucZigXp7fRdbBffeFGzbtPw,1785
221
+ fractal_server/tasks/v2/local/utils_local.py,sha256=JHHiS_SvPLSezxGWSWslMhLEWmMp9PepnzePXV6r0e8,1521
222
222
  fractal_server/tasks/v2/ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
223
- fractal_server/tasks/v2/ssh/collect.py,sha256=IwsA8Xr0d6VXvajdFxwNIEB6jZ--hA1McEu6tydKq9E,15341
223
+ fractal_server/tasks/v2/ssh/collect.py,sha256=j50euINjxGS4S5-aYTIWwV34xydKvT6W5P9Vxv5TMuk,15263
224
224
  fractal_server/tasks/v2/ssh/deactivate.py,sha256=fwBCtj-8I_8s8Zd-nyQX7YaTjoPOK1pn07Tznvjbv8Y,31
225
225
  fractal_server/tasks/v2/ssh/reactivate.py,sha256=0pqK-g5uyykCkLsjYDVr8QTEBrIB31XQXvi0MQfqv-w,31
226
226
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
@@ -237,8 +237,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=C5WLuY3uGG2s53OEL-__H35-fmSlgu
237
237
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
238
238
  fractal_server/utils.py,sha256=utvmBx8K9I8hRWFquxna2pBaOqe0JifDL_NVPmihEJI,3525
239
239
  fractal_server/zip_tools.py,sha256=xYpzBshysD2nmxkD5WLYqMzPYUcCRM3kYy-7n9bJL-U,4426
240
- fractal_server-2.9.0a0.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
241
- fractal_server-2.9.0a0.dist-info/METADATA,sha256=6_oIEXxiAVbTAy9QvP753-1NvPXYmuWG-f-E4LMAQeo,4629
242
- fractal_server-2.9.0a0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
243
- fractal_server-2.9.0a0.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
244
- fractal_server-2.9.0a0.dist-info/RECORD,,
240
+ fractal_server-2.9.0a2.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
241
+ fractal_server-2.9.0a2.dist-info/METADATA,sha256=Qk7nGzAcl_hZRCJp8pdCV2PQACAWwqOzTCH7nFW__pw,4585
242
+ fractal_server-2.9.0a2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
243
+ fractal_server-2.9.0a2.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
244
+ fractal_server-2.9.0a2.dist-info/RECORD,,