fractal-server 2.6.4__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +1 -1
  3. fractal_server/app/models/linkusergroup.py +11 -0
  4. fractal_server/app/models/v2/__init__.py +2 -0
  5. fractal_server/app/models/v2/collection_state.py +1 -0
  6. fractal_server/app/models/v2/task.py +67 -2
  7. fractal_server/app/routes/admin/v2/__init__.py +16 -0
  8. fractal_server/app/routes/admin/{v2.py → v2/job.py} +20 -191
  9. fractal_server/app/routes/admin/v2/project.py +43 -0
  10. fractal_server/app/routes/admin/v2/task.py +133 -0
  11. fractal_server/app/routes/admin/v2/task_group.py +162 -0
  12. fractal_server/app/routes/api/v1/task_collection.py +4 -4
  13. fractal_server/app/routes/api/v2/__init__.py +8 -0
  14. fractal_server/app/routes/api/v2/_aux_functions.py +1 -68
  15. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +343 -0
  16. fractal_server/app/routes/api/v2/submit.py +16 -35
  17. fractal_server/app/routes/api/v2/task.py +85 -110
  18. fractal_server/app/routes/api/v2/task_collection.py +184 -196
  19. fractal_server/app/routes/api/v2/task_collection_custom.py +70 -64
  20. fractal_server/app/routes/api/v2/task_group.py +173 -0
  21. fractal_server/app/routes/api/v2/workflow.py +39 -102
  22. fractal_server/app/routes/api/v2/workflow_import.py +360 -0
  23. fractal_server/app/routes/api/v2/workflowtask.py +4 -8
  24. fractal_server/app/routes/auth/_aux_auth.py +86 -40
  25. fractal_server/app/routes/auth/current_user.py +5 -5
  26. fractal_server/app/routes/auth/group.py +73 -23
  27. fractal_server/app/routes/auth/router.py +0 -2
  28. fractal_server/app/routes/auth/users.py +8 -7
  29. fractal_server/app/runner/executors/slurm/ssh/executor.py +82 -63
  30. fractal_server/app/runner/v2/__init__.py +13 -7
  31. fractal_server/app/runner/v2/task_interface.py +4 -9
  32. fractal_server/app/schemas/user.py +1 -2
  33. fractal_server/app/schemas/v2/__init__.py +7 -0
  34. fractal_server/app/schemas/v2/dataset.py +2 -7
  35. fractal_server/app/schemas/v2/dumps.py +1 -2
  36. fractal_server/app/schemas/v2/job.py +1 -1
  37. fractal_server/app/schemas/v2/manifest.py +25 -1
  38. fractal_server/app/schemas/v2/project.py +1 -1
  39. fractal_server/app/schemas/v2/task.py +95 -36
  40. fractal_server/app/schemas/v2/task_collection.py +8 -6
  41. fractal_server/app/schemas/v2/task_group.py +85 -0
  42. fractal_server/app/schemas/v2/workflow.py +7 -2
  43. fractal_server/app/schemas/v2/workflowtask.py +9 -6
  44. fractal_server/app/security/__init__.py +8 -1
  45. fractal_server/config.py +8 -28
  46. fractal_server/data_migrations/2_7_0.py +323 -0
  47. fractal_server/images/models.py +2 -4
  48. fractal_server/main.py +1 -1
  49. fractal_server/migrations/versions/034a469ec2eb_task_groups.py +184 -0
  50. fractal_server/ssh/_fabric.py +186 -73
  51. fractal_server/string_tools.py +6 -2
  52. fractal_server/tasks/utils.py +19 -5
  53. fractal_server/tasks/v1/_TaskCollectPip.py +1 -1
  54. fractal_server/tasks/v1/background_operations.py +5 -5
  55. fractal_server/tasks/v1/get_collection_data.py +2 -2
  56. fractal_server/tasks/v2/_venv_pip.py +67 -70
  57. fractal_server/tasks/v2/background_operations.py +180 -69
  58. fractal_server/tasks/v2/background_operations_ssh.py +57 -70
  59. fractal_server/tasks/v2/database_operations.py +44 -0
  60. fractal_server/tasks/v2/endpoint_operations.py +104 -116
  61. fractal_server/tasks/v2/templates/_1_create_venv.sh +9 -5
  62. fractal_server/tasks/v2/templates/{_2_upgrade_pip.sh → _2_preliminary_pip_operations.sh} +1 -0
  63. fractal_server/tasks/v2/utils.py +5 -0
  64. fractal_server/utils.py +3 -2
  65. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/METADATA +3 -7
  66. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/RECORD +69 -60
  67. fractal_server/app/routes/auth/group_names.py +0 -34
  68. fractal_server/tasks/v2/_TaskCollectPip.py +0 -132
  69. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/LICENSE +0 -0
  70. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/WHEEL +0 -0
  71. {fractal_server-2.6.4.dist-info → fractal_server-2.7.0.dist-info}/entry_points.txt +0 -0
@@ -1,3 +1,4 @@
1
+ import json
1
2
  import logging
2
3
  import time
3
4
  from contextlib import contextmanager
@@ -23,21 +24,58 @@ class FractalSSHTimeoutError(RuntimeError):
23
24
  pass
24
25
 
25
26
 
26
- class FractalSSHListTimeoutError(RuntimeError):
27
- pass
27
+ logger = set_logger(__name__)
28
28
 
29
29
 
30
- logger = set_logger(__name__)
30
+ @contextmanager
31
+ def _acquire_lock_with_timeout(
32
+ lock: Lock,
33
+ label: str,
34
+ timeout: float,
35
+ logger_name: str = __name__,
36
+ ) -> Generator[Literal[True], Any, None]:
37
+ """
38
+ Given a `threading.Lock` object, try to acquire it within a given timeout.
39
+
40
+ Arguments:
41
+ lock:
42
+ label:
43
+ timeout:
44
+ logger_name:
45
+ """
46
+ logger = get_logger(logger_name)
47
+ logger.info(f"Trying to acquire lock for '{label}', with {timeout=}")
48
+ result = lock.acquire(timeout=timeout)
49
+ try:
50
+ if not result:
51
+ logger.error(f"Lock for '{label}' was *not* acquired.")
52
+ raise FractalSSHTimeoutError(
53
+ f"Failed to acquire lock for '{label}' within "
54
+ f"{timeout} seconds"
55
+ )
56
+ logger.info(f"Lock for '{label}' was acquired.")
57
+ yield result
58
+ finally:
59
+ if result:
60
+ lock.release()
61
+ logger.info(f"Lock for '{label}' was released.")
31
62
 
32
63
 
33
64
  class FractalSSH(object):
34
65
  """
35
- FIXME SSH: Fix docstring
66
+ Wrapper of `fabric.Connection` object, enriched with locks.
67
+
68
+ Note: methods marked as `_unsafe` should not be used directly,
69
+ since they do not enforce locking.
36
70
 
37
71
  Attributes:
38
72
  _lock:
39
- connection:
73
+ _connection:
40
74
  default_lock_timeout:
75
+ default_max_attempts:
76
+ default_base_interval:
77
+ sftp_get_prefetch:
78
+ sftp_get_max_requests:
41
79
  logger_name:
42
80
  """
43
81
 
@@ -46,6 +84,8 @@ class FractalSSH(object):
46
84
  default_lock_timeout: float
47
85
  default_max_attempts: int
48
86
  default_base_interval: float
87
+ sftp_get_prefetch: bool
88
+ sftp_get_max_requests: int
49
89
  logger_name: str
50
90
 
51
91
  def __init__(
@@ -54,6 +94,8 @@ class FractalSSH(object):
54
94
  default_timeout: float = 250,
55
95
  default_max_attempts: int = 5,
56
96
  default_base_interval: float = 3.0,
97
+ sftp_get_prefetch: bool = False,
98
+ sftp_get_max_requests: int = 64,
57
99
  logger_name: str = __name__,
58
100
  ):
59
101
  self._lock = Lock()
@@ -61,28 +103,11 @@ class FractalSSH(object):
61
103
  self.default_lock_timeout = default_timeout
62
104
  self.default_base_interval = default_base_interval
63
105
  self.default_max_attempts = default_max_attempts
106
+ self.sftp_get_prefetch = sftp_get_prefetch
107
+ self.sftp_get_max_requests = sftp_get_max_requests
64
108
  self.logger_name = logger_name
65
109
  set_logger(self.logger_name)
66
110
 
67
- @contextmanager
68
- def acquire_timeout(
69
- self, timeout: float
70
- ) -> Generator[Literal[True], Any, None]:
71
- self.logger.debug(f"Trying to acquire lock, with {timeout=}")
72
- result = self._lock.acquire(timeout=timeout)
73
- try:
74
- if not result:
75
- self.logger.error("Lock was *NOT* acquired.")
76
- raise FractalSSHTimeoutError(
77
- f"Failed to acquire lock within {timeout} seconds"
78
- )
79
- self.logger.debug("Lock was acquired.")
80
- yield result
81
- finally:
82
- if result:
83
- self._lock.release()
84
- self.logger.debug("Lock was released")
85
-
86
111
  @property
87
112
  def is_connected(self) -> bool:
88
113
  return self._connection.is_connected
@@ -91,36 +116,82 @@ class FractalSSH(object):
91
116
  def logger(self) -> logging.Logger:
92
117
  return get_logger(self.logger_name)
93
118
 
94
- def put(
95
- self, *args, lock_timeout: Optional[float] = None, **kwargs
119
+ def _put(
120
+ self,
121
+ *,
122
+ local: str,
123
+ remote: str,
124
+ label: str,
125
+ lock_timeout: Optional[float] = None,
96
126
  ) -> Result:
127
+ """
128
+ Transfer a local file to a remote path, via SFTP.
129
+ """
97
130
  actual_lock_timeout = self.default_lock_timeout
98
131
  if lock_timeout is not None:
99
132
  actual_lock_timeout = lock_timeout
100
- with self.acquire_timeout(timeout=actual_lock_timeout):
101
- return self._connection.put(*args, **kwargs)
133
+ with _acquire_lock_with_timeout(
134
+ lock=self._lock,
135
+ label=label,
136
+ timeout=actual_lock_timeout,
137
+ ):
138
+ return self._sftp_unsafe().put(local, remote)
102
139
 
103
- def get(
104
- self, *args, lock_timeout: Optional[float] = None, **kwargs
140
+ def _get(
141
+ self,
142
+ *,
143
+ local: str,
144
+ remote: str,
145
+ label: str,
146
+ lock_timeout: Optional[float] = None,
105
147
  ) -> Result:
106
148
  actual_lock_timeout = self.default_lock_timeout
107
149
  if lock_timeout is not None:
108
150
  actual_lock_timeout = lock_timeout
109
- with self.acquire_timeout(timeout=actual_lock_timeout):
110
- return self._connection.get(*args, **kwargs)
151
+ with _acquire_lock_with_timeout(
152
+ lock=self._lock,
153
+ label=label,
154
+ timeout=actual_lock_timeout,
155
+ ):
156
+ return self._sftp_unsafe().get(
157
+ remote,
158
+ local,
159
+ prefetch=self.sftp_get_prefetch,
160
+ max_concurrent_prefetch_requests=self.sftp_get_max_requests,
161
+ )
111
162
 
112
- def run(
113
- self, *args, lock_timeout: Optional[float] = None, **kwargs
163
+ def _run(
164
+ self, *args, label: str, lock_timeout: Optional[float] = None, **kwargs
114
165
  ) -> Any:
115
166
  actual_lock_timeout = self.default_lock_timeout
116
167
  if lock_timeout is not None:
117
168
  actual_lock_timeout = lock_timeout
118
- with self.acquire_timeout(timeout=actual_lock_timeout):
169
+ with _acquire_lock_with_timeout(
170
+ lock=self._lock,
171
+ label=label,
172
+ timeout=actual_lock_timeout,
173
+ ):
119
174
  return self._connection.run(*args, **kwargs)
120
175
 
121
- def sftp(self) -> paramiko.sftp_client.SFTPClient:
176
+ def _sftp_unsafe(self) -> paramiko.sftp_client.SFTPClient:
177
+ """
178
+ This is marked as unsafe because you should only use its methods
179
+ after acquiring a lock.
180
+ """
122
181
  return self._connection.sftp()
123
182
 
183
+ def read_remote_json_file(self, filepath: str) -> dict[str, Any]:
184
+ self.logger.info(f"START reading remote JSON file {filepath}.")
185
+ with _acquire_lock_with_timeout(
186
+ lock=self._lock,
187
+ label="read_remote_json_file",
188
+ timeout=self.default_lock_timeout,
189
+ ):
190
+ with self._sftp_unsafe().open(filepath, "r") as f:
191
+ data = json.load(f)
192
+ self.logger.info(f"END reading remote JSON file {filepath}.")
193
+ return data
194
+
124
195
  def check_connection(self) -> None:
125
196
  """
126
197
  Open the SSH connection and handle exceptions.
@@ -131,7 +202,12 @@ class FractalSSH(object):
131
202
  """
132
203
  if not self._connection.is_connected:
133
204
  try:
134
- self._connection.open()
205
+ with _acquire_lock_with_timeout(
206
+ lock=self._lock,
207
+ label="_connection.open",
208
+ timeout=self.default_lock_timeout,
209
+ ):
210
+ self._connection.open()
135
211
  except Exception as e:
136
212
  raise RuntimeError(
137
213
  f"Cannot open SSH connection. Original error:\n{str(e)}"
@@ -146,8 +222,12 @@ class FractalSSH(object):
146
222
  because we observed cases where `is_connected=False` but the underlying
147
223
  `Transport` object was not closed.
148
224
  """
149
-
150
- self._connection.close()
225
+ with _acquire_lock_with_timeout(
226
+ lock=self._lock,
227
+ label="_connection.close",
228
+ timeout=self.default_lock_timeout,
229
+ ):
230
+ self._connection.close()
151
231
 
152
232
  if self._connection.client is not None:
153
233
  self._connection.client.close()
@@ -197,8 +277,11 @@ class FractalSSH(object):
197
277
  self.logger.info(f"{prefix} START running '{cmd}' over SSH.")
198
278
  try:
199
279
  # Case 1: Command runs successfully
200
- res = self.run(
201
- cmd, lock_timeout=actual_lock_timeout, hide=True
280
+ res = self._run(
281
+ cmd,
282
+ label=f"run {cmd}",
283
+ lock_timeout=actual_lock_timeout,
284
+ hide=True,
202
285
  )
203
286
  t_1 = time.perf_counter()
204
287
  self.logger.info(
@@ -250,7 +333,6 @@ class FractalSSH(object):
250
333
  *,
251
334
  local: str,
252
335
  remote: str,
253
- logger_name: Optional[str] = None,
254
336
  lock_timeout: Optional[float] = None,
255
337
  ) -> None:
256
338
  """
@@ -261,28 +343,65 @@ class FractalSSH(object):
261
343
  remote: Target path on remote host
262
344
  fractal_ssh: FractalSSH connection object with custom lock
263
345
  logger_name: Name of the logger
264
-
265
346
  """
266
347
  try:
267
- self.put(local=local, remote=remote, lock_timeout=lock_timeout)
348
+ prefix = "[send_file]"
349
+ self.logger.info(f"{prefix} START transfer of '{local}' over SSH.")
350
+ self._put(
351
+ local=local,
352
+ remote=remote,
353
+ lock_timeout=lock_timeout,
354
+ label=f"send_file {local=} {remote=}",
355
+ )
356
+ self.logger.info(f"{prefix} END transfer of '{local}' over SSH.")
268
357
  except Exception as e:
269
- logger = get_logger(logger_name=logger_name)
270
- logger.error(
358
+ self.logger.error(
271
359
  f"Transferring {local=} to {remote=} over SSH failed.\n"
272
360
  f"Original Error:\n{str(e)}."
273
361
  )
274
362
  raise e
275
363
 
364
+ def fetch_file(
365
+ self,
366
+ *,
367
+ local: str,
368
+ remote: str,
369
+ lock_timeout: Optional[float] = None,
370
+ ) -> None:
371
+ """
372
+ Transfer a file via SSH
373
+
374
+ Args:
375
+ local: Local path to file
376
+ remote: Target path on remote host
377
+ logger_name: Name of the logger
378
+ lock_timeout:
379
+ """
380
+ try:
381
+ prefix = "[fetch_file] "
382
+ self.logger.info(f"{prefix} START fetching '{remote}' over SSH.")
383
+ self._get(
384
+ local=local,
385
+ remote=remote,
386
+ lock_timeout=lock_timeout,
387
+ label=f"fetch_file {local=} {remote=}",
388
+ )
389
+ self.logger.info(f"{prefix} END fetching '{remote}' over SSH.")
390
+ except Exception as e:
391
+ self.logger.error(
392
+ f"Transferring {remote=} to {local=} over SSH failed.\n"
393
+ f"Original Error:\n{str(e)}."
394
+ )
395
+ raise e
396
+
276
397
  def mkdir(self, *, folder: str, parents: bool = True) -> None:
277
398
  """
278
399
  Create a folder remotely via SSH.
279
400
 
280
401
  Args:
281
402
  folder:
282
- fractal_ssh:
283
403
  parents:
284
404
  """
285
- # FIXME SSH: try using `mkdir` method of `paramiko.SFTPClient`
286
405
  if parents:
287
406
  cmd = f"mkdir -p {folder}"
288
407
  else:
@@ -339,12 +458,18 @@ class FractalSSH(object):
339
458
  contents: File contents
340
459
  lock_timeout:
341
460
  """
461
+ self.logger.info(f"START writing to remote file {path}.")
342
462
  actual_lock_timeout = self.default_lock_timeout
343
463
  if lock_timeout is not None:
344
464
  actual_lock_timeout = lock_timeout
345
- with self.acquire_timeout(timeout=actual_lock_timeout):
346
- with self.sftp().open(filename=path, mode="w") as f:
465
+ with _acquire_lock_with_timeout(
466
+ lock=self._lock,
467
+ label=f"write_remote_file {path=}",
468
+ timeout=actual_lock_timeout,
469
+ ):
470
+ with self._sftp_unsafe().open(filename=path, mode="w") as f:
347
471
  f.write(content)
472
+ self.logger.info(f"END writing to remote file {path}.")
348
473
 
349
474
 
350
475
  class FractalSSHList(object):
@@ -425,7 +550,11 @@ class FractalSSHList(object):
425
550
  "look_for_keys": False,
426
551
  },
427
552
  )
428
- with self.acquire_lock_with_timeout():
553
+ with _acquire_lock_with_timeout(
554
+ lock=self._lock,
555
+ label="FractalSSHList.get",
556
+ timeout=self._timeout,
557
+ ):
429
558
  self._data[key] = FractalSSH(connection=connection)
430
559
  return self._data[key]
431
560
 
@@ -465,7 +594,11 @@ class FractalSSHList(object):
465
594
  key_path:
466
595
  """
467
596
  key = (host, user, key_path)
468
- with self.acquire_lock_with_timeout():
597
+ with _acquire_lock_with_timeout(
598
+ lock=self._lock,
599
+ timeout=self._timeout,
600
+ label="FractalSSHList.remove",
601
+ ):
469
602
  self.logger.info(
470
603
  f"Removing FractalSSH object for {user}@{host} "
471
604
  "from collection."
@@ -492,24 +625,4 @@ class FractalSSHList(object):
492
625
  f"Closing FractalSSH object for {user}@{host} "
493
626
  f"({fractal_ssh_obj.is_connected=})."
494
627
  )
495
- with fractal_ssh_obj.acquire_timeout(timeout=timeout):
496
- fractal_ssh_obj.close()
497
-
498
- @contextmanager
499
- def acquire_lock_with_timeout(self) -> Generator[Literal[True], Any, None]:
500
- self.logger.debug(
501
- f"Trying to acquire lock, with timeout {self._timeout} s"
502
- )
503
- result = self._lock.acquire(timeout=self._timeout)
504
- try:
505
- if not result:
506
- self.logger.error("Lock was *NOT* acquired.")
507
- raise FractalSSHListTimeoutError(
508
- f"Failed to acquire lock within {self._timeout} ss"
509
- )
510
- self.logger.debug("Lock was acquired.")
511
- yield result
512
- finally:
513
- if result:
514
- self._lock.release()
515
- self.logger.debug("Lock was released")
628
+ fractal_ssh_obj.close()
@@ -33,14 +33,18 @@ def sanitize_string(value: str) -> str:
33
33
  return new_value
34
34
 
35
35
 
36
- def slugify_task_name_for_source(task_name: str) -> str:
36
+ def slugify_task_name_for_source_v1(task_name: str) -> str:
37
37
  """
38
38
  NOTE: this function is used upon creation of tasks' sources, therefore
39
39
  for the moment we cannot replace it with its more comprehensive version
40
40
  from `fractal_server.string_tools.sanitize_string`, nor we can remove it.
41
- As 2.3.1, we are renaming it to `slugify_task_name_for_source`, to make
41
+
42
+ As of 2.3.1, we are renaming it to `slugify_task_name_for_source`, to make
42
43
  it clear that it should not be used for other purposes.
43
44
 
45
+ As of 2.7.0, we are renaming it to `slugify_task_name_for_source_v1`, to
46
+ make it clear that it is not used for v2.
47
+
44
48
  Args:
45
49
  task_name:
46
50
 
@@ -9,9 +9,11 @@ COLLECTION_LOG_FILENAME = "collection.log"
9
9
  COLLECTION_FREEZE_FILENAME = "collection_freeze.txt"
10
10
 
11
11
 
12
- def get_absolute_venv_path(venv_path: Path) -> Path:
12
+ def get_absolute_venv_path_v1(venv_path: Path) -> Path:
13
13
  """
14
14
  If a path is not absolute, make it a relative path of FRACTAL_TASKS_DIR.
15
+
16
+ As of v2.7.0, we rename this to v1 since it is only to be used in v1.
15
17
  """
16
18
  if venv_path.is_absolute():
17
19
  package_path = venv_path
@@ -33,20 +35,32 @@ def get_freeze_path(base: Path) -> Path:
33
35
  return base / COLLECTION_FREEZE_FILENAME
34
36
 
35
37
 
36
- def get_collection_log(venv_path: Path) -> str:
37
- package_path = get_absolute_venv_path(venv_path)
38
+ def get_collection_log_v1(path: Path) -> str:
39
+ package_path = get_absolute_venv_path_v1(path)
38
40
  log_path = get_log_path(package_path)
39
41
  log = log_path.open().read()
40
42
  return log
41
43
 
42
44
 
43
- def get_collection_freeze(venv_path: Path) -> str:
44
- package_path = get_absolute_venv_path(venv_path)
45
+ def get_collection_log_v2(path: Path) -> str:
46
+ log_path = get_log_path(path)
47
+ log = log_path.open().read()
48
+ return log
49
+
50
+
51
+ def get_collection_freeze_v1(venv_path: Path) -> str:
52
+ package_path = get_absolute_venv_path_v1(venv_path)
45
53
  freeze_path = get_freeze_path(package_path)
46
54
  freeze = freeze_path.open().read()
47
55
  return freeze
48
56
 
49
57
 
58
+ def get_collection_freeze_v2(path: Path) -> str:
59
+ freeze_path = get_freeze_path(path)
60
+ freeze = freeze_path.open().read()
61
+ return freeze
62
+
63
+
50
64
  def _normalize_package_name(name: str) -> str:
51
65
  """
52
66
  Implement PyPa specifications for package-name normalization
@@ -77,7 +77,7 @@ class _TaskCollectPip(TaskCollectPipV1):
77
77
  if self.python_version:
78
78
  python_version = f"py{self.python_version}"
79
79
  else:
80
- python_version = "" # FIXME: can we allow this?
80
+ python_version = ""
81
81
 
82
82
  source = ":".join(
83
83
  (
@@ -6,9 +6,9 @@ import json
6
6
  from pathlib import Path
7
7
  from shutil import rmtree as shell_rmtree
8
8
 
9
- from ...string_tools import slugify_task_name_for_source
9
+ from ...string_tools import slugify_task_name_for_source_v1
10
10
  from ..utils import _normalize_package_name
11
- from ..utils import get_collection_log
11
+ from ..utils import get_collection_log_v1
12
12
  from ..utils import get_collection_path
13
13
  from ..utils import get_log_path
14
14
  from ._TaskCollectPip import _TaskCollectPip
@@ -215,7 +215,7 @@ async def create_package_environment_pip(
215
215
  # Fill in attributes for TaskCreate
216
216
  task_executable = package_root / t.executable
217
217
  cmd = f"{python_bin.as_posix()} {task_executable.as_posix()}"
218
- task_name_slug = slugify_task_name_for_source(t.name)
218
+ task_name_slug = slugify_task_name_for_source_v1(t.name)
219
219
  task_source = f"{task_pkg.package_source}:{task_name_slug}"
220
220
  if not task_executable.exists():
221
221
  raise FileNotFoundError(
@@ -321,7 +321,7 @@ async def background_collect_pip(
321
321
 
322
322
  # Update DB
323
323
  data.status = "OK"
324
- data.log = get_collection_log(venv_path)
324
+ data.log = get_collection_log_v1(venv_path)
325
325
  state.data = data.sanitised_dict()
326
326
  db.add(state)
327
327
  db.merge(state)
@@ -342,7 +342,7 @@ async def background_collect_pip(
342
342
  # Update db
343
343
  data.status = "fail"
344
344
  data.info = f"Original error: {e}"
345
- data.log = get_collection_log(venv_path)
345
+ data.log = get_collection_log_v1(venv_path)
346
346
  state.data = data.sanitised_dict()
347
347
  db.merge(state)
348
348
  db.commit()
@@ -2,12 +2,12 @@ import json
2
2
  from pathlib import Path
3
3
 
4
4
  from fractal_server.app.schemas.v1 import TaskCollectStatusV1
5
- from fractal_server.tasks.utils import get_absolute_venv_path
5
+ from fractal_server.tasks.utils import get_absolute_venv_path_v1
6
6
  from fractal_server.tasks.utils import get_collection_path
7
7
 
8
8
 
9
9
  def get_collection_data(venv_path: Path) -> TaskCollectStatusV1:
10
- package_path = get_absolute_venv_path(venv_path)
10
+ package_path = get_absolute_venv_path_v1(venv_path)
11
11
  collection_path = get_collection_path(package_path)
12
12
  with collection_path.open() as f:
13
13
  data = json.load(f)