fractal-server 2.16.5__py3-none-any.whl → 2.17.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +129 -22
  3. fractal_server/app/db/__init__.py +9 -11
  4. fractal_server/app/models/security.py +7 -3
  5. fractal_server/app/models/user_settings.py +0 -4
  6. fractal_server/app/models/v2/__init__.py +4 -0
  7. fractal_server/app/models/v2/job.py +3 -4
  8. fractal_server/app/models/v2/profile.py +16 -0
  9. fractal_server/app/models/v2/project.py +3 -0
  10. fractal_server/app/models/v2/resource.py +130 -0
  11. fractal_server/app/models/v2/task_group.py +3 -0
  12. fractal_server/app/routes/admin/v2/__init__.py +4 -0
  13. fractal_server/app/routes/admin/v2/_aux_functions.py +55 -0
  14. fractal_server/app/routes/admin/v2/profile.py +86 -0
  15. fractal_server/app/routes/admin/v2/resource.py +229 -0
  16. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +48 -82
  17. fractal_server/app/routes/api/__init__.py +26 -7
  18. fractal_server/app/routes/api/v2/_aux_functions.py +27 -1
  19. fractal_server/app/routes/api/v2/_aux_functions_history.py +2 -2
  20. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +3 -3
  21. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +7 -7
  22. fractal_server/app/routes/api/v2/project.py +5 -1
  23. fractal_server/app/routes/api/v2/submit.py +32 -24
  24. fractal_server/app/routes/api/v2/task.py +5 -0
  25. fractal_server/app/routes/api/v2/task_collection.py +36 -47
  26. fractal_server/app/routes/api/v2/task_collection_custom.py +11 -5
  27. fractal_server/app/routes/api/v2/task_collection_pixi.py +34 -40
  28. fractal_server/app/routes/api/v2/task_group_lifecycle.py +39 -82
  29. fractal_server/app/routes/api/v2/workflow_import.py +4 -3
  30. fractal_server/app/routes/auth/_aux_auth.py +3 -3
  31. fractal_server/app/routes/auth/current_user.py +45 -7
  32. fractal_server/app/routes/auth/oauth.py +1 -1
  33. fractal_server/app/routes/auth/users.py +9 -0
  34. fractal_server/app/routes/aux/_runner.py +2 -1
  35. fractal_server/app/routes/aux/validate_user_profile.py +62 -0
  36. fractal_server/app/routes/aux/validate_user_settings.py +12 -9
  37. fractal_server/app/schemas/user.py +20 -13
  38. fractal_server/app/schemas/user_settings.py +0 -4
  39. fractal_server/app/schemas/v2/__init__.py +11 -0
  40. fractal_server/app/schemas/v2/profile.py +72 -0
  41. fractal_server/app/schemas/v2/resource.py +117 -0
  42. fractal_server/app/security/__init__.py +6 -13
  43. fractal_server/app/security/signup_email.py +2 -2
  44. fractal_server/app/user_settings.py +2 -12
  45. fractal_server/config/__init__.py +23 -0
  46. fractal_server/config/_database.py +58 -0
  47. fractal_server/config/_email.py +170 -0
  48. fractal_server/config/_init_data.py +27 -0
  49. fractal_server/config/_main.py +216 -0
  50. fractal_server/config/_settings_config.py +7 -0
  51. fractal_server/images/tools.py +3 -3
  52. fractal_server/logger.py +3 -3
  53. fractal_server/main.py +14 -21
  54. fractal_server/migrations/versions/90f6508c6379_drop_useroauth_username.py +36 -0
  55. fractal_server/migrations/versions/a80ac5a352bf_resource_profile.py +195 -0
  56. fractal_server/runner/config/__init__.py +2 -0
  57. fractal_server/runner/config/_local.py +21 -0
  58. fractal_server/runner/config/_slurm.py +128 -0
  59. fractal_server/runner/config/slurm_mem_to_MB.py +63 -0
  60. fractal_server/runner/exceptions.py +4 -0
  61. fractal_server/runner/executors/base_runner.py +17 -7
  62. fractal_server/runner/executors/local/get_local_config.py +21 -86
  63. fractal_server/runner/executors/local/runner.py +48 -5
  64. fractal_server/runner/executors/slurm_common/_batching.py +2 -2
  65. fractal_server/runner/executors/slurm_common/base_slurm_runner.py +59 -25
  66. fractal_server/runner/executors/slurm_common/get_slurm_config.py +38 -54
  67. fractal_server/runner/executors/slurm_common/remote.py +1 -1
  68. fractal_server/runner/executors/slurm_common/{_slurm_config.py → slurm_config.py} +3 -254
  69. fractal_server/runner/executors/slurm_common/slurm_job_task_models.py +1 -1
  70. fractal_server/runner/executors/slurm_ssh/runner.py +12 -14
  71. fractal_server/runner/executors/slurm_sudo/_subprocess_run_as_user.py +2 -2
  72. fractal_server/runner/executors/slurm_sudo/runner.py +12 -12
  73. fractal_server/runner/v2/_local.py +36 -21
  74. fractal_server/runner/v2/_slurm_ssh.py +40 -4
  75. fractal_server/runner/v2/_slurm_sudo.py +41 -11
  76. fractal_server/runner/v2/db_tools.py +1 -1
  77. fractal_server/runner/v2/runner.py +3 -11
  78. fractal_server/runner/v2/runner_functions.py +42 -28
  79. fractal_server/runner/v2/submit_workflow.py +87 -108
  80. fractal_server/runner/versions.py +8 -3
  81. fractal_server/ssh/_fabric.py +6 -6
  82. fractal_server/tasks/config/__init__.py +3 -0
  83. fractal_server/tasks/config/_pixi.py +127 -0
  84. fractal_server/tasks/config/_python.py +51 -0
  85. fractal_server/tasks/v2/local/_utils.py +7 -7
  86. fractal_server/tasks/v2/local/collect.py +13 -5
  87. fractal_server/tasks/v2/local/collect_pixi.py +26 -10
  88. fractal_server/tasks/v2/local/deactivate.py +7 -1
  89. fractal_server/tasks/v2/local/deactivate_pixi.py +5 -1
  90. fractal_server/tasks/v2/local/delete.py +4 -0
  91. fractal_server/tasks/v2/local/reactivate.py +13 -5
  92. fractal_server/tasks/v2/local/reactivate_pixi.py +27 -9
  93. fractal_server/tasks/v2/ssh/_pixi_slurm_ssh.py +11 -10
  94. fractal_server/tasks/v2/ssh/_utils.py +6 -7
  95. fractal_server/tasks/v2/ssh/collect.py +19 -12
  96. fractal_server/tasks/v2/ssh/collect_pixi.py +34 -16
  97. fractal_server/tasks/v2/ssh/deactivate.py +12 -8
  98. fractal_server/tasks/v2/ssh/deactivate_pixi.py +14 -10
  99. fractal_server/tasks/v2/ssh/delete.py +12 -9
  100. fractal_server/tasks/v2/ssh/reactivate.py +18 -12
  101. fractal_server/tasks/v2/ssh/reactivate_pixi.py +36 -17
  102. fractal_server/tasks/v2/templates/4_pip_show.sh +4 -6
  103. fractal_server/tasks/v2/utils_database.py +2 -2
  104. fractal_server/tasks/v2/utils_python_interpreter.py +8 -16
  105. fractal_server/tasks/v2/utils_templates.py +7 -10
  106. fractal_server/utils.py +1 -1
  107. {fractal_server-2.16.5.dist-info → fractal_server-2.17.0a0.dist-info}/METADATA +5 -5
  108. {fractal_server-2.16.5.dist-info → fractal_server-2.17.0a0.dist-info}/RECORD +112 -90
  109. {fractal_server-2.16.5.dist-info → fractal_server-2.17.0a0.dist-info}/WHEEL +1 -1
  110. fractal_server/config.py +0 -906
  111. /fractal_server/{runner → app}/shutdown.py +0 -0
  112. {fractal_server-2.16.5.dist-info → fractal_server-2.17.0a0.dist-info}/entry_points.txt +0 -0
  113. {fractal_server-2.16.5.dist-info → fractal_server-2.17.0a0.dist-info/licenses}/LICENSE +0 -0
@@ -8,35 +8,52 @@ the individual backends.
8
8
  import os
9
9
  import traceback
10
10
  from pathlib import Path
11
+ from typing import Protocol
11
12
 
12
13
  from sqlalchemy.orm import Session as DBSyncSession
13
14
 
14
- from ...config import get_settings
15
- from ...logger import get_logger
16
- from ...logger import reset_logger_handlers
17
- from ...logger import set_logger
18
- from ...ssh._fabric import FractalSSH
19
- from ...syringe import Inject
20
- from ...utils import get_timestamp
21
- from ...zip_tools import _zip_folder_to_file_and_remove
22
- from ..exceptions import JobExecutionError
23
- from ..filenames import WORKFLOW_LOG_FILENAME
24
15
  from ._local import process_workflow as local_process_workflow
25
16
  from ._slurm_ssh import process_workflow as slurm_ssh_process_workflow
26
17
  from ._slurm_sudo import process_workflow as slurm_sudo_process_workflow
27
18
  from fractal_server import __VERSION__
28
19
  from fractal_server.app.db import DB
29
- from fractal_server.app.models import UserSettings
30
20
  from fractal_server.app.models.v2 import DatasetV2
31
21
  from fractal_server.app.models.v2 import JobV2
22
+ from fractal_server.app.models.v2 import Profile
23
+ from fractal_server.app.models.v2 import Resource
32
24
  from fractal_server.app.models.v2 import WorkflowV2
33
25
  from fractal_server.app.schemas.v2 import JobStatusTypeV2
26
+ from fractal_server.app.schemas.v2 import ResourceType
27
+ from fractal_server.logger import get_logger
28
+ from fractal_server.logger import reset_logger_handlers
29
+ from fractal_server.logger import set_logger
30
+ from fractal_server.runner.exceptions import JobExecutionError
31
+ from fractal_server.runner.filenames import WORKFLOW_LOG_FILENAME
32
+ from fractal_server.ssh._fabric import FractalSSH
33
+ from fractal_server.types import AttributeFilters
34
+ from fractal_server.utils import get_timestamp
35
+ from fractal_server.zip_tools import _zip_folder_to_file_and_remove
34
36
 
35
37
 
36
- _backends = {}
37
- _backends["local"] = local_process_workflow
38
- _backends["slurm"] = slurm_sudo_process_workflow
39
- _backends["slurm_ssh"] = slurm_ssh_process_workflow
38
+ class ProcessWorkflowType(Protocol):
39
+ def __call__(
40
+ self,
41
+ *,
42
+ workflow: WorkflowV2,
43
+ dataset: DatasetV2,
44
+ workflow_dir_local: Path,
45
+ job_id: int,
46
+ workflow_dir_remote: Path | None,
47
+ first_task_index: int | None,
48
+ last_task_index: int | None,
49
+ logger_name: str,
50
+ job_attribute_filters: AttributeFilters,
51
+ job_type_filters: dict[str, bool],
52
+ user_id: int,
53
+ resource: Resource,
54
+ profile: Profile,
55
+ ) -> None:
56
+ ...
40
57
 
41
58
 
42
59
  def fail_job(
@@ -67,10 +84,10 @@ def submit_workflow(
67
84
  dataset_id: int,
68
85
  job_id: int,
69
86
  user_id: int,
70
- user_settings: UserSettings,
71
87
  worker_init: str | None = None,
72
- slurm_user: str | None = None,
73
- user_cache_dir: str | None = None,
88
+ user_cache_dir: str | None = None, # FIXME: review this
89
+ resource: Resource,
90
+ profile: Profile,
74
91
  fractal_ssh: FractalSSH | None = None,
75
92
  ) -> None:
76
93
  """
@@ -94,15 +111,15 @@ def submit_workflow(
94
111
  each task.
95
112
  user_cache_dir:
96
113
  Cache directory (namely a path where the user can write); for the
97
- slurm backend, this is used as a base directory for
98
- `job.working_dir_user`.
99
- slurm_user:
100
- The username to impersonate for the workflow execution, for the
101
- slurm backend.
114
+ slurm backend, this is used as a base directory for FIXME.
115
+ resource:
116
+ Computational resource to be used for this job (e.g. a SLURM
117
+ cluster).
118
+ profile:
119
+ Computational profile to be used for this job.
120
+ fractal_ssh: SSH object, for when `resource.type = "slurm_ssh"`.
102
121
  """
103
122
  # Declare runner backend and set `process_workflow` function
104
- settings = Inject(get_settings)
105
- FRACTAL_RUNNER_BACKEND = settings.FRACTAL_RUNNER_BACKEND
106
123
  logger_name = f"WF{workflow_id}_job{job_id}"
107
124
  logger = set_logger(logger_name=logger_name)
108
125
 
@@ -135,66 +152,30 @@ def submit_workflow(
135
152
  )
136
153
  return
137
154
 
138
- # Declare runner backend and set `process_workflow` function
139
- settings = Inject(get_settings)
140
- FRACTAL_RUNNER_BACKEND = settings.FRACTAL_RUNNER_BACKEND
141
155
  try:
142
- process_workflow = _backends[settings.FRACTAL_RUNNER_BACKEND]
143
- except KeyError as e:
144
- fail_job(
145
- db=db_sync,
146
- job=job,
147
- log_msg=(
148
- f"Invalid {FRACTAL_RUNNER_BACKEND=}.\n"
149
- f"Original KeyError: {str(e)}"
150
- ),
151
- logger_name=logger_name,
152
- emit_log=True,
153
- )
154
- return
155
-
156
- # Define and create server-side working folder
157
- WORKFLOW_DIR_LOCAL = Path(job.working_dir)
158
- if WORKFLOW_DIR_LOCAL.exists():
159
- fail_job(
160
- db=db_sync,
161
- job=job,
162
- log_msg=f"Workflow dir {WORKFLOW_DIR_LOCAL} already exists.",
163
- logger_name=logger_name,
164
- emit_log=True,
165
- )
166
- return
156
+ # Define local/remote folders, and create local folder
157
+ local_job_dir = Path(job.working_dir)
158
+ remote_job_dir = Path(job.working_dir_user)
159
+ match resource.type:
160
+ case ResourceType.LOCAL:
161
+ local_job_dir.mkdir(parents=True, exist_ok=False)
162
+ case ResourceType.SLURM_SUDO:
163
+ original_umask = os.umask(0)
164
+ local_job_dir.mkdir(
165
+ parents=True, mode=0o755, exist_ok=False
166
+ )
167
+ os.umask(original_umask)
168
+ case ResourceType.SLURM_SSH:
169
+ local_job_dir.mkdir(parents=True, exist_ok=False)
167
170
 
168
- try:
169
- # Create WORKFLOW_DIR_LOCAL and define WORKFLOW_DIR_REMOTE
170
- if FRACTAL_RUNNER_BACKEND == "local":
171
- WORKFLOW_DIR_LOCAL.mkdir(parents=True)
172
- WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
173
- elif FRACTAL_RUNNER_BACKEND == "slurm":
174
- original_umask = os.umask(0)
175
- WORKFLOW_DIR_LOCAL.mkdir(parents=True, mode=0o755)
176
- os.umask(original_umask)
177
- WORKFLOW_DIR_REMOTE = (
178
- Path(user_cache_dir) / WORKFLOW_DIR_LOCAL.name
179
- )
180
- elif FRACTAL_RUNNER_BACKEND == "slurm_ssh":
181
- WORKFLOW_DIR_LOCAL.mkdir(parents=True)
182
- WORKFLOW_DIR_REMOTE = (
183
- Path(user_settings.ssh_jobs_dir) / WORKFLOW_DIR_LOCAL.name
184
- )
185
- else:
186
- raise ValueError(
187
- "Invalid FRACTAL_RUNNER_BACKEND="
188
- f"{settings.FRACTAL_RUNNER_BACKEND}."
189
- )
190
171
  except Exception as e:
191
172
  error_type = type(e).__name__
192
173
  fail_job(
193
174
  db=db_sync,
194
175
  job=job,
195
176
  log_msg=(
196
- f"{error_type} error occurred while creating job folder "
197
- f"and subfolders.\nOriginal error: {str(e)}"
177
+ f"{error_type} error while creating local job folder."
178
+ f" Original error: {str(e)}"
198
179
  ),
199
180
  logger_name=logger_name,
200
181
  emit_log=True,
@@ -218,7 +199,7 @@ def submit_workflow(
218
199
  db_sync.refresh(wftask)
219
200
 
220
201
  # Write logs
221
- log_file_path = WORKFLOW_DIR_LOCAL / WORKFLOW_LOG_FILENAME
202
+ log_file_path = local_job_dir / WORKFLOW_LOG_FILENAME
222
203
  logger = set_logger(
223
204
  logger_name=logger_name,
224
205
  log_file_path=log_file_path,
@@ -228,14 +209,10 @@ def submit_workflow(
228
209
  f"more logs at {str(log_file_path)}"
229
210
  )
230
211
  logger.debug(f"fractal_server.__VERSION__: {__VERSION__}")
231
- logger.debug(f"FRACTAL_RUNNER_BACKEND: {FRACTAL_RUNNER_BACKEND}")
232
- if FRACTAL_RUNNER_BACKEND == "slurm":
233
- logger.debug(f"slurm_user: {slurm_user}")
234
- logger.debug(f"slurm_account: {job.slurm_account}")
235
- logger.debug(f"worker_init: {worker_init}")
236
- elif FRACTAL_RUNNER_BACKEND == "slurm_ssh":
237
- logger.debug(f"ssh_user: {user_settings.ssh_username}")
238
- logger.debug(f"base dir: {user_settings.ssh_tasks_dir}")
212
+ logger.debug(f"Resource name: {resource.name}")
213
+ logger.debug(f"Profile name: {profile.name}")
214
+ logger.debug(f"Username: {profile.username}")
215
+ if resource.type in [ResourceType.SLURM_SUDO, ResourceType.SLURM_SSH]:
239
216
  logger.debug(f"slurm_account: {job.slurm_account}")
240
217
  logger.debug(f"worker_init: {worker_init}")
241
218
  logger.debug(f"job.id: {job.id}")
@@ -247,40 +224,42 @@ def submit_workflow(
247
224
  job_working_dir = job.working_dir
248
225
 
249
226
  try:
250
- if FRACTAL_RUNNER_BACKEND == "local":
251
- process_workflow = local_process_workflow
252
- backend_specific_kwargs = {}
253
- elif FRACTAL_RUNNER_BACKEND == "slurm":
254
- process_workflow = slurm_sudo_process_workflow
255
- backend_specific_kwargs = dict(
256
- slurm_user=slurm_user,
257
- slurm_account=job.slurm_account,
258
- user_cache_dir=user_cache_dir,
259
- )
260
- elif FRACTAL_RUNNER_BACKEND == "slurm_ssh":
261
- process_workflow = slurm_ssh_process_workflow
262
- backend_specific_kwargs = dict(
263
- fractal_ssh=fractal_ssh,
264
- slurm_account=job.slurm_account,
265
- )
266
- else:
267
- raise RuntimeError(
268
- f"Invalid runner backend {FRACTAL_RUNNER_BACKEND=}"
269
- )
227
+ match resource.type:
228
+ case ResourceType.LOCAL:
229
+ process_workflow: ProcessWorkflowType = local_process_workflow
230
+ backend_specific_kwargs = {}
231
+ case ResourceType.SLURM_SUDO:
232
+ process_workflow: ProcessWorkflowType = (
233
+ slurm_sudo_process_workflow
234
+ )
235
+ backend_specific_kwargs = dict(
236
+ slurm_account=job.slurm_account,
237
+ user_cache_dir=user_cache_dir,
238
+ )
239
+ case ResourceType.SLURM_SSH:
240
+ process_workflow: ProcessWorkflowType = (
241
+ slurm_ssh_process_workflow
242
+ )
243
+ backend_specific_kwargs = dict(
244
+ fractal_ssh=fractal_ssh,
245
+ slurm_account=job.slurm_account,
246
+ )
270
247
 
271
248
  process_workflow(
272
249
  workflow=workflow,
273
250
  dataset=dataset,
274
251
  job_id=job_id,
275
252
  user_id=user_id,
276
- workflow_dir_local=WORKFLOW_DIR_LOCAL,
277
- workflow_dir_remote=WORKFLOW_DIR_REMOTE,
253
+ workflow_dir_local=local_job_dir,
254
+ workflow_dir_remote=remote_job_dir,
278
255
  logger_name=logger_name,
279
256
  worker_init=worker_init,
280
257
  first_task_index=job.first_task_index,
281
258
  last_task_index=job.last_task_index,
282
259
  job_attribute_filters=job.attribute_filters,
283
260
  job_type_filters=job.type_filters,
261
+ resource=resource,
262
+ profile=profile,
284
263
  **backend_specific_kwargs,
285
264
  )
286
265
 
@@ -1,18 +1,23 @@
1
1
  import json
2
2
  import sys
3
+ from typing import TypedDict
3
4
 
4
5
  import fractal_server
5
6
 
6
7
 
7
- def get_versions() -> dict[str, list[int] | str]:
8
+ class VersionsType(TypedDict):
9
+ python: tuple[int, ...]
10
+ fractal_server: str
11
+
12
+
13
+ def get_versions() -> VersionsType:
8
14
  """
9
15
  Extract versions of Python and fractal-server.
10
16
  """
11
- versions = dict(
17
+ return dict(
12
18
  python=tuple(sys.version_info[:3]),
13
19
  fractal_server=fractal_server.__VERSION__,
14
20
  )
15
- return versions
16
21
 
17
22
 
18
23
  if __name__ == "__main__":
@@ -84,7 +84,7 @@ def _acquire_lock_with_timeout(
84
84
  """
85
85
  Given a `threading.Lock` object, try to acquire it within a given timeout.
86
86
 
87
- Arguments:
87
+ Args:
88
88
  lock:
89
89
  label:
90
90
  timeout:
@@ -177,7 +177,7 @@ class FractalSSH:
177
177
  """
178
178
  Log and re-raise an exception from a FractalSSH method.
179
179
 
180
- Arguments:
180
+ Args:
181
181
  message: Additional message to be logged.
182
182
  e: Original exception
183
183
  """
@@ -686,7 +686,7 @@ class FractalSSHList:
686
686
 
687
687
  Note: Changing `_data` requires acquiring `_lock`.
688
688
 
689
- Arguments:
689
+ Args:
690
690
  host:
691
691
  user:
692
692
  key_path:
@@ -732,7 +732,7 @@ class FractalSSHList:
732
732
  """
733
733
  Return whether a given key is present in the collection.
734
734
 
735
- Arguments:
735
+ Args:
736
736
  host:
737
737
  user:
738
738
  key_path:
@@ -752,7 +752,7 @@ class FractalSSHList:
752
752
 
753
753
  Note: Changing `_data` requires acquiring `_lock`.
754
754
 
755
- Arguments:
755
+ Args:
756
756
  host:
757
757
  user:
758
758
  key_path:
@@ -780,7 +780,7 @@ class FractalSSHList:
780
780
  """
781
781
  Close all `FractalSSH` objects in the collection.
782
782
 
783
- Arguments:
783
+ Args:
784
784
  timeout:
785
785
  Timeout for `FractalSSH._lock` acquisition, to be obtained
786
786
  before closing.
@@ -0,0 +1,3 @@
1
+ from ._pixi import PixiSLURMConfig # noqa F401
2
+ from ._pixi import TasksPixiSettings # noqa F401
3
+ from ._python import TasksPythonSettings # noqa F401
@@ -0,0 +1,127 @@
1
+ from pathlib import Path
2
+ from typing import Annotated
3
+
4
+ from pydantic import AfterValidator
5
+ from pydantic import BaseModel
6
+ from pydantic import model_validator
7
+ from pydantic import PositiveInt
8
+
9
+ from fractal_server.types import DictStrStr
10
+ from fractal_server.types import NonEmptyStr
11
+
12
+
13
+ def _check_pixi_slurm_memory(mem: str) -> str:
14
+ if mem[-1] not in ["K", "M", "G", "T"]:
15
+ raise ValueError(
16
+ f"Invalid memory requirement {mem=} for `pixi`, "
17
+ "please set a K/M/G/T units suffix."
18
+ )
19
+ return mem
20
+
21
+
22
+ class PixiSLURMConfig(BaseModel):
23
+ """
24
+ Parameters that are passed directly to a `sbatch` command.
25
+
26
+ See https://slurm.schedmd.com/sbatch.html.
27
+ """
28
+
29
+ partition: NonEmptyStr
30
+ """
31
+ `-p, --partition=<partition_names>`
32
+ """
33
+ cpus: PositiveInt
34
+ """
35
+ `-c, --cpus-per-task=<ncpus>
36
+ """
37
+ mem: Annotated[NonEmptyStr, AfterValidator(_check_pixi_slurm_memory)]
38
+ """
39
+ `--mem=<size>[units]` (examples: `"10M"`, `"10G"`).
40
+ From `sbatch` docs: Specify the real memory required per node. Default
41
+ units are megabytes. Different units can be specified using the suffix
42
+ [K|M|G|T].
43
+ """
44
+ time: NonEmptyStr
45
+ """
46
+ `-t, --time=<time>`.
47
+ From `sbatch` docs: "A time limit of zero requests that no time limit be
48
+ imposed. Acceptable time formats include "minutes", "minutes:seconds",
49
+ "hours:minutes:seconds", "days-hours", "days-hours:minutes" and
50
+ "days-hours:minutes:seconds".
51
+ """
52
+
53
+
54
+ class TasksPixiSettings(BaseModel):
55
+ """
56
+ Configuration for `pixi` Task collection.
57
+ """
58
+
59
+ versions: DictStrStr
60
+ """
61
+ Dictionary mapping `pixi` versions (e.g. `0.47.0`) to the corresponding
62
+ folders (e.g. `/somewhere/pixi/0.47.0` - if the binary is
63
+ `/somewhere/pixi/0.47.0/bin/pixi`).
64
+ """
65
+ default_version: str
66
+ """
67
+ Default task-collection `pixi` version.
68
+ """
69
+ PIXI_CONCURRENT_SOLVES: int = 4
70
+ """
71
+ Value of
72
+ [`--concurrent-solves`](https://pixi.sh/latest/reference/cli/pixi/install/#arg---concurrent-solves)
73
+ for `pixi install`.
74
+ """
75
+ PIXI_CONCURRENT_DOWNLOADS: int = 4
76
+ """
77
+ Value of
78
+ [`--concurrent-downloads`](https://pixi.sh/latest/reference/cli/pixi/install/#arg---concurrent-downloads)
79
+ for `pixi install`.
80
+ """
81
+ TOKIO_WORKER_THREADS: int = 2
82
+ """
83
+ From
84
+ [Tokio documentation](
85
+ https://docs.rs/tokio/latest/tokio/#cpu-bound-tasks-and-blocking-code
86
+ )
87
+ :
88
+
89
+ The core threads are where all asynchronous code runs,
90
+ and Tokio will by default spawn one for each CPU core.
91
+ You can use the environment variable `TOKIO_WORKER_THREADS` to override
92
+ the default value.
93
+ """
94
+ DEFAULT_ENVIRONMENT: str = "default"
95
+ """
96
+ Default pixi environment name.
97
+ """
98
+ DEFAULT_PLATFORM: str = "linux-64"
99
+ """
100
+ Default platform for pixi.
101
+ """
102
+ SLURM_CONFIG: PixiSLURMConfig | None = None
103
+ """
104
+ Required when using `pixi` in a SSH/SLURM deployment.
105
+ """
106
+
107
+ @model_validator(mode="after")
108
+ def check_pixi_settings(self):
109
+ if self.default_version not in self.versions:
110
+ raise ValueError(
111
+ f"Default version '{self.default_version}' not in "
112
+ f"available version {list(self.versions.keys())}."
113
+ )
114
+
115
+ pixi_base_dir = Path(self.versions[self.default_version]).parent
116
+
117
+ for key, value in self.versions.items():
118
+ pixi_path = Path(value)
119
+
120
+ if pixi_path.parent != pixi_base_dir:
121
+ raise ValueError(
122
+ f"{pixi_path=} is not located within the {pixi_base_dir=}."
123
+ )
124
+ if pixi_path.name != key:
125
+ raise ValueError(f"{pixi_path.name=} is not equal to {key=}")
126
+
127
+ return self
@@ -0,0 +1,51 @@
1
+ from typing import Literal
2
+ from typing import Self
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import model_validator
6
+
7
+ from fractal_server.types import AbsolutePathStr
8
+ from fractal_server.types import NonEmptyStr
9
+
10
+
11
+ class TasksPythonSettings(BaseModel):
12
+ """
13
+ Configuration for the Python base interpreters to be used for task venvs.
14
+
15
+ For task collection to work, there must be one or more base Python
16
+ interpreters available on your system.
17
+ """
18
+
19
+ default_version: NonEmptyStr
20
+ """
21
+ Default task-collection Python version (must be a key of `versions`).
22
+ """
23
+ versions: dict[
24
+ Literal[
25
+ "3.9",
26
+ "3.10",
27
+ "3.11",
28
+ "3.12",
29
+ "3.13",
30
+ "3.14",
31
+ ],
32
+ AbsolutePathStr,
33
+ ]
34
+ """
35
+ Dictionary mapping Python versions to the corresponding interpreters.
36
+ """
37
+
38
+ pip_cache_dir: AbsolutePathStr | None = None
39
+ """
40
+ Argument for `--cache-dir` option of `pip install`, if set.
41
+ """
42
+
43
+ @model_validator(mode="after")
44
+ def _validate_versions(self) -> Self:
45
+ if self.default_version not in self.versions.keys():
46
+ raise ValueError(
47
+ f"The default Python version ('{self.default_version}') is "
48
+ f"not available versions in {list(self.versions.keys())}."
49
+ )
50
+
51
+ return self
@@ -1,15 +1,13 @@
1
1
  from pathlib import Path
2
2
 
3
3
  from ..utils_pixi import simplify_pyproject_toml
4
+ from fractal_server.app.models import Resource
4
5
  from fractal_server.app.schemas.v2 import TaskCreateV2
5
- from fractal_server.config import get_settings
6
6
  from fractal_server.logger import get_logger
7
7
  from fractal_server.logger import set_logger
8
- from fractal_server.syringe import Inject
9
8
  from fractal_server.tasks.v2.utils_templates import customize_template
10
9
  from fractal_server.utils import execute_command_sync
11
10
 
12
-
13
11
  logger = set_logger(__name__)
14
12
 
15
13
 
@@ -78,7 +76,10 @@ def check_task_files_exist(task_list: list[TaskCreateV2]) -> None:
78
76
  )
79
77
 
80
78
 
81
- def edit_pyproject_toml_in_place_local(pyproject_toml_path: Path) -> None:
79
+ def edit_pyproject_toml_in_place_local(
80
+ pyproject_toml_path: Path,
81
+ resource: Resource,
82
+ ) -> None:
82
83
  """
83
84
  Wrapper of `simplify_pyproject_toml`, with I/O.
84
85
  """
@@ -88,11 +89,10 @@ def edit_pyproject_toml_in_place_local(pyproject_toml_path: Path) -> None:
88
89
  pyproject_contents = f.read()
89
90
 
90
91
  # Simplify contents
91
- settings = Inject(get_settings)
92
92
  new_pyproject_contents = simplify_pyproject_toml(
93
93
  original_toml_string=pyproject_contents,
94
- pixi_environment=settings.pixi.DEFAULT_ENVIRONMENT,
95
- pixi_platform=settings.pixi.DEFAULT_PLATFORM,
94
+ pixi_environment=resource.tasks_pixi_config["DEFAULT_ENVIRONMENT"],
95
+ pixi_platform=resource.tasks_pixi_config["DEFAULT_PLATFORM"],
96
96
  )
97
97
  # Write new `pyproject.toml`
98
98
  with pyproject_toml_path.open("w") as f:
@@ -7,6 +7,8 @@ from tempfile import TemporaryDirectory
7
7
  from ..utils_database import create_db_tasks_and_update_task_group_sync
8
8
  from ._utils import _customize_and_run_template
9
9
  from fractal_server.app.db import get_sync_db
10
+ from fractal_server.app.models import Profile
11
+ from fractal_server.app.models import Resource
10
12
  from fractal_server.app.models.v2 import TaskGroupV2
11
13
  from fractal_server.app.schemas.v2 import FractalUploadedFile
12
14
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
@@ -25,7 +27,7 @@ from fractal_server.tasks.v2.utils_background import get_current_log
25
27
  from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
26
28
  from fractal_server.tasks.v2.utils_package_names import compare_package_names
27
29
  from fractal_server.tasks.v2.utils_python_interpreter import (
28
- get_python_interpreter_v2,
30
+ get_python_interpreter,
29
31
  )
30
32
  from fractal_server.tasks.v2.utils_templates import get_collection_replacements
31
33
  from fractal_server.tasks.v2.utils_templates import (
@@ -39,6 +41,8 @@ def collect_local(
39
41
  *,
40
42
  task_group_activity_id: int,
41
43
  task_group_id: int,
44
+ resource: Resource,
45
+ profile: Profile,
42
46
  wheel_file: FractalUploadedFile | None = None,
43
47
  ) -> None:
44
48
  """
@@ -52,9 +56,10 @@ def collect_local(
52
56
  https://github.com/encode/starlette/blob/master/starlette/background.py).
53
57
 
54
58
 
55
- Arguments:
59
+ Args:
56
60
  task_group_id:
57
61
  task_group_activity_id:
62
+ resource: Resource
58
63
  wheel_file:
59
64
  """
60
65
 
@@ -111,11 +116,14 @@ def collect_local(
111
116
  task_group = add_commit_refresh(obj=task_group, db=db)
112
117
 
113
118
  # Prepare replacements for templates
119
+ python_bin = get_python_interpreter(
120
+ python_version=task_group.python_version,
121
+ resource=resource,
122
+ )
114
123
  replacements = get_collection_replacements(
115
124
  task_group=task_group,
116
- python_bin=get_python_interpreter_v2(
117
- python_version=task_group.python_version
118
- ),
125
+ python_bin=python_bin,
126
+ resource=resource,
119
127
  )
120
128
 
121
129
  # Prepare common arguments for `_customize_and_run_template``