fractal-server 2.9.0a10__py3-none-any.whl → 2.9.0a12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/routes/auth/group.py +20 -0
- fractal_server/app/routes/auth/users.py +97 -99
- fractal_server/app/runner/executors/slurm/_slurm_config.py +0 -17
- fractal_server/app/runner/executors/slurm/ssh/executor.py +41 -173
- fractal_server/app/runner/executors/slurm/sudo/executor.py +26 -109
- fractal_server/app/runner/executors/slurm/utils_executors.py +58 -0
- fractal_server/app/schemas/user.py +16 -10
- fractal_server/app/security/__init__.py +17 -0
- {fractal_server-2.9.0a10.dist-info → fractal_server-2.9.0a12.dist-info}/METADATA +2 -3
- {fractal_server-2.9.0a10.dist-info → fractal_server-2.9.0a12.dist-info}/RECORD +14 -13
- {fractal_server-2.9.0a10.dist-info → fractal_server-2.9.0a12.dist-info}/LICENSE +0 -0
- {fractal_server-2.9.0a10.dist-info → fractal_server-2.9.0a12.dist-info}/WHEEL +0 -0
- {fractal_server-2.9.0a10.dist-info → fractal_server-2.9.0a12.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.9.
|
1
|
+
__VERSION__ = "2.9.0a12"
|
@@ -10,6 +10,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|
10
10
|
from sqlmodel import select
|
11
11
|
|
12
12
|
from . import current_active_superuser
|
13
|
+
from ._aux_auth import _get_default_usergroup_id
|
13
14
|
from ._aux_auth import _get_single_usergroup_with_user_ids
|
14
15
|
from ._aux_auth import _user_or_404
|
15
16
|
from ._aux_auth import _usergroup_or_404
|
@@ -234,16 +235,35 @@ async def remove_user_from_group(
|
|
234
235
|
superuser: UserOAuth = Depends(current_active_superuser),
|
235
236
|
db: AsyncSession = Depends(get_async_db),
|
236
237
|
) -> UserGroupRead:
|
238
|
+
|
239
|
+
# Check that user and group exist
|
237
240
|
await _usergroup_or_404(group_id, db)
|
238
241
|
user = await _user_or_404(user_id, db)
|
242
|
+
|
243
|
+
# Check that group is not the default one
|
244
|
+
default_user_group_id = await _get_default_usergroup_id(db=db)
|
245
|
+
if default_user_group_id == group_id:
|
246
|
+
raise HTTPException(
|
247
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
248
|
+
detail=(
|
249
|
+
f"Cannot remove user from '{FRACTAL_DEFAULT_GROUP_NAME}' "
|
250
|
+
"group.",
|
251
|
+
),
|
252
|
+
)
|
253
|
+
|
239
254
|
link = await db.get(LinkUserGroup, (group_id, user_id))
|
240
255
|
if link is None:
|
256
|
+
# If user and group are not linked, fail
|
241
257
|
raise HTTPException(
|
242
258
|
status_code=422,
|
243
259
|
detail=f"User '{user.email}' is not a member of group {group_id}.",
|
244
260
|
)
|
245
261
|
else:
|
262
|
+
# If user and group are linked, delete the link
|
246
263
|
await db.delete(link)
|
247
264
|
await db.commit()
|
265
|
+
|
266
|
+
# Enrich the response object with user_ids
|
248
267
|
group = await _get_single_usergroup_with_user_ids(group_id=group_id, db=db)
|
268
|
+
|
249
269
|
return group
|
@@ -8,9 +8,7 @@ from fastapi import status
|
|
8
8
|
from fastapi_users import exceptions
|
9
9
|
from fastapi_users import schemas
|
10
10
|
from fastapi_users.router.common import ErrorCode
|
11
|
-
from sqlalchemy.exc import IntegrityError
|
12
11
|
from sqlalchemy.ext.asyncio import AsyncSession
|
13
|
-
from sqlmodel import col
|
14
12
|
from sqlmodel import func
|
15
13
|
from sqlmodel import select
|
16
14
|
|
@@ -18,9 +16,10 @@ from . import current_active_superuser
|
|
18
16
|
from ...db import get_async_db
|
19
17
|
from ...schemas.user import UserRead
|
20
18
|
from ...schemas.user import UserUpdate
|
21
|
-
from ...schemas.user import UserUpdateWithNewGroupIds
|
22
19
|
from ..aux.validate_user_settings import verify_user_has_settings
|
20
|
+
from ._aux_auth import _get_default_usergroup_id
|
23
21
|
from ._aux_auth import _get_single_user_with_groups
|
22
|
+
from ._aux_auth import FRACTAL_DEFAULT_GROUP_NAME
|
24
23
|
from fractal_server.app.models import LinkUserGroup
|
25
24
|
from fractal_server.app.models import UserGroup
|
26
25
|
from fractal_server.app.models import UserOAuth
|
@@ -28,6 +27,7 @@ from fractal_server.app.models import UserSettings
|
|
28
27
|
from fractal_server.app.routes.auth._aux_auth import _user_or_404
|
29
28
|
from fractal_server.app.schemas import UserSettingsRead
|
30
29
|
from fractal_server.app.schemas import UserSettingsUpdate
|
30
|
+
from fractal_server.app.schemas.user import UserUpdateGroups
|
31
31
|
from fractal_server.app.security import get_user_manager
|
32
32
|
from fractal_server.app.security import UserManager
|
33
33
|
from fractal_server.logger import set_logger
|
@@ -55,114 +55,43 @@ async def get_user(
|
|
55
55
|
@router_users.patch("/users/{user_id}/", response_model=UserRead)
|
56
56
|
async def patch_user(
|
57
57
|
user_id: int,
|
58
|
-
user_update:
|
58
|
+
user_update: UserUpdate,
|
59
59
|
current_superuser: UserOAuth = Depends(current_active_superuser),
|
60
60
|
user_manager: UserManager = Depends(get_user_manager),
|
61
61
|
db: AsyncSession = Depends(get_async_db),
|
62
62
|
):
|
63
63
|
"""
|
64
64
|
Custom version of the PATCH-user route from `fastapi-users`.
|
65
|
-
|
66
|
-
In order to keep the fastapi-users logic in place (which is convenient to
|
67
|
-
update user attributes), we split the endpoint into two branches. We either
|
68
|
-
go through the fastapi-users-based attribute-update branch, or through the
|
69
|
-
branch where we establish new user/group relationships.
|
70
|
-
|
71
|
-
Note that we prevent making both changes at the same time, since it would
|
72
|
-
be more complex to guarantee that endpoint error would leave the database
|
73
|
-
in the same state as before the API call.
|
74
65
|
"""
|
75
66
|
|
76
|
-
# We prevent simultaneous editing of both user attributes and user/group
|
77
|
-
# associations
|
78
|
-
user_update_dict_without_groups = user_update.dict(
|
79
|
-
exclude_unset=True, exclude={"new_group_ids"}
|
80
|
-
)
|
81
|
-
edit_attributes = user_update_dict_without_groups != {}
|
82
|
-
edit_groups = user_update.new_group_ids is not None
|
83
|
-
if edit_attributes and edit_groups:
|
84
|
-
raise HTTPException(
|
85
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
86
|
-
detail=(
|
87
|
-
"Cannot modify both user attributes and group membership. "
|
88
|
-
"Please make two independent PATCH calls"
|
89
|
-
),
|
90
|
-
)
|
91
|
-
|
92
67
|
# Check that user exists
|
93
68
|
user_to_patch = await _user_or_404(user_id, db)
|
94
69
|
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
70
|
+
# Modify user attributes
|
71
|
+
try:
|
72
|
+
user = await user_manager.update(
|
73
|
+
user_update,
|
74
|
+
user_to_patch,
|
75
|
+
safe=False,
|
76
|
+
request=None,
|
77
|
+
)
|
78
|
+
validated_user = schemas.model_validate(UserOAuth, user)
|
79
|
+
patched_user = await db.get(
|
80
|
+
UserOAuth, validated_user.id, populate_existing=True
|
81
|
+
)
|
82
|
+
except exceptions.InvalidPasswordException as e:
|
83
|
+
raise HTTPException(
|
84
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
85
|
+
detail={
|
86
|
+
"code": ErrorCode.UPDATE_USER_INVALID_PASSWORD,
|
87
|
+
"reason": e.reason,
|
88
|
+
},
|
89
|
+
)
|
90
|
+
except exceptions.UserAlreadyExists:
|
91
|
+
raise HTTPException(
|
92
|
+
status.HTTP_400_BAD_REQUEST,
|
93
|
+
detail=ErrorCode.UPDATE_USER_EMAIL_ALREADY_EXISTS,
|
103
94
|
)
|
104
|
-
res = await db.execute(stm)
|
105
|
-
number_matching_groups = res.scalar()
|
106
|
-
if number_matching_groups != len(user_update.new_group_ids):
|
107
|
-
raise HTTPException(
|
108
|
-
status_code=status.HTTP_404_NOT_FOUND,
|
109
|
-
detail=(
|
110
|
-
"Not all requested groups (IDs: "
|
111
|
-
f"{user_update.new_group_ids}) exist."
|
112
|
-
),
|
113
|
-
)
|
114
|
-
|
115
|
-
for new_group_id in user_update.new_group_ids:
|
116
|
-
link = LinkUserGroup(user_id=user_id, group_id=new_group_id)
|
117
|
-
db.add(link)
|
118
|
-
|
119
|
-
try:
|
120
|
-
await db.commit()
|
121
|
-
except IntegrityError as e:
|
122
|
-
error_msg = (
|
123
|
-
f"Cannot link groups with IDs {user_update.new_group_ids} "
|
124
|
-
f"to user {user_id}. "
|
125
|
-
"Likely reason: one of these links already exists.\n"
|
126
|
-
f"Original error: {str(e)}"
|
127
|
-
)
|
128
|
-
logger.info(error_msg)
|
129
|
-
raise HTTPException(
|
130
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
131
|
-
detail=error_msg,
|
132
|
-
)
|
133
|
-
patched_user = user_to_patch
|
134
|
-
elif edit_attributes:
|
135
|
-
# Modify user attributes
|
136
|
-
try:
|
137
|
-
user_update_without_groups = UserUpdate(
|
138
|
-
**user_update_dict_without_groups
|
139
|
-
)
|
140
|
-
user = await user_manager.update(
|
141
|
-
user_update_without_groups,
|
142
|
-
user_to_patch,
|
143
|
-
safe=False,
|
144
|
-
request=None,
|
145
|
-
)
|
146
|
-
validated_user = schemas.model_validate(UserOAuth, user)
|
147
|
-
patched_user = await db.get(
|
148
|
-
UserOAuth, validated_user.id, populate_existing=True
|
149
|
-
)
|
150
|
-
except exceptions.InvalidPasswordException as e:
|
151
|
-
raise HTTPException(
|
152
|
-
status_code=status.HTTP_400_BAD_REQUEST,
|
153
|
-
detail={
|
154
|
-
"code": ErrorCode.UPDATE_USER_INVALID_PASSWORD,
|
155
|
-
"reason": e.reason,
|
156
|
-
},
|
157
|
-
)
|
158
|
-
except exceptions.UserAlreadyExists:
|
159
|
-
raise HTTPException(
|
160
|
-
status.HTTP_400_BAD_REQUEST,
|
161
|
-
detail=ErrorCode.UPDATE_USER_EMAIL_ALREADY_EXISTS,
|
162
|
-
)
|
163
|
-
else:
|
164
|
-
# Nothing to do, just continue
|
165
|
-
patched_user = user_to_patch
|
166
95
|
|
167
96
|
# Enrich user object with `group_ids_names` attribute
|
168
97
|
patched_user_with_groups = await _get_single_user_with_groups(
|
@@ -203,6 +132,75 @@ async def list_users(
|
|
203
132
|
return user_list
|
204
133
|
|
205
134
|
|
135
|
+
@router_users.post("/users/{user_id}/set-groups/", response_model=UserRead)
|
136
|
+
async def set_user_groups(
|
137
|
+
user_id: int,
|
138
|
+
user_update: UserUpdateGroups,
|
139
|
+
superuser: UserOAuth = Depends(current_active_superuser),
|
140
|
+
db: AsyncSession = Depends(get_async_db),
|
141
|
+
) -> UserRead:
|
142
|
+
|
143
|
+
# Preliminary check that all objects exist in the db
|
144
|
+
user = await _user_or_404(user_id=user_id, db=db)
|
145
|
+
target_group_ids = user_update.group_ids
|
146
|
+
stm = select(func.count(UserGroup.id)).where(
|
147
|
+
UserGroup.id.in_(target_group_ids)
|
148
|
+
)
|
149
|
+
res = await db.execute(stm)
|
150
|
+
count = res.scalar()
|
151
|
+
if count != len(target_group_ids):
|
152
|
+
raise HTTPException(
|
153
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
154
|
+
detail=f"Some UserGroups in {target_group_ids} do not exist.",
|
155
|
+
)
|
156
|
+
|
157
|
+
# Check that default group is not being removed
|
158
|
+
default_group_id = await _get_default_usergroup_id(db=db)
|
159
|
+
if default_group_id not in target_group_ids:
|
160
|
+
raise HTTPException(
|
161
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
162
|
+
detail=(
|
163
|
+
f"Cannot remove user from "
|
164
|
+
f"'{FRACTAL_DEFAULT_GROUP_NAME}' group.",
|
165
|
+
),
|
166
|
+
)
|
167
|
+
|
168
|
+
# Prepare lists of links to be removed
|
169
|
+
res = await db.execute(
|
170
|
+
select(LinkUserGroup)
|
171
|
+
.where(LinkUserGroup.user_id == user_id)
|
172
|
+
.where(LinkUserGroup.group_id.not_in(target_group_ids))
|
173
|
+
)
|
174
|
+
links_to_remove = res.scalars().all()
|
175
|
+
|
176
|
+
# Prepare lists of links to be added
|
177
|
+
res = await db.execute(
|
178
|
+
select(LinkUserGroup.group_id)
|
179
|
+
.where(LinkUserGroup.user_id == user_id)
|
180
|
+
.where(LinkUserGroup.group_id.in_(target_group_ids))
|
181
|
+
)
|
182
|
+
ids_links_already_in = res.scalars().all()
|
183
|
+
ids_links_to_add = set(target_group_ids) - set(ids_links_already_in)
|
184
|
+
|
185
|
+
# Remove/create links as needed
|
186
|
+
for link in links_to_remove:
|
187
|
+
logger.info(
|
188
|
+
f"Removing LinkUserGroup with {link.user_id=} "
|
189
|
+
f"and {link.group_id=}."
|
190
|
+
)
|
191
|
+
await db.delete(link)
|
192
|
+
for group_id in ids_links_to_add:
|
193
|
+
logger.info(
|
194
|
+
f"Creating new LinkUserGroup with {user_id=} " f"and {group_id=}."
|
195
|
+
)
|
196
|
+
db.add(LinkUserGroup(user_id=user_id, group_id=group_id))
|
197
|
+
await db.commit()
|
198
|
+
|
199
|
+
user_with_groups = await _get_single_user_with_groups(user, db)
|
200
|
+
|
201
|
+
return user_with_groups
|
202
|
+
|
203
|
+
|
206
204
|
@router_users.get(
|
207
205
|
"/users/{user_id}/settings/", response_model=UserSettingsRead
|
208
206
|
)
|
@@ -456,20 +456,3 @@ def _parse_mem_value(raw_mem: Union[str, int]) -> int:
|
|
456
456
|
|
457
457
|
logger.debug(f"{info}, return {mem_MB}")
|
458
458
|
return mem_MB
|
459
|
-
|
460
|
-
|
461
|
-
def get_default_slurm_config():
|
462
|
-
"""
|
463
|
-
Return a default `SlurmConfig` configuration object
|
464
|
-
"""
|
465
|
-
return SlurmConfig(
|
466
|
-
partition="main",
|
467
|
-
cpus_per_task=1,
|
468
|
-
mem_per_task_MB=100,
|
469
|
-
target_cpus_per_job=1,
|
470
|
-
max_cpus_per_job=2,
|
471
|
-
target_mem_per_job=100,
|
472
|
-
max_mem_per_job=500,
|
473
|
-
target_num_jobs=2,
|
474
|
-
max_num_jobs=4,
|
475
|
-
)
|
@@ -31,9 +31,11 @@ from ....filenames import SHUTDOWN_FILENAME
|
|
31
31
|
from ....task_files import get_task_file_paths
|
32
32
|
from ....task_files import TaskFiles
|
33
33
|
from ....versions import get_versions
|
34
|
-
from ...slurm._slurm_config import get_default_slurm_config
|
35
34
|
from ...slurm._slurm_config import SlurmConfig
|
36
35
|
from .._batching import heuristics
|
36
|
+
from ..utils_executors import get_pickle_file_path
|
37
|
+
from ..utils_executors import get_slurm_file_path
|
38
|
+
from ..utils_executors import get_slurm_script_file_path
|
37
39
|
from ._executor_wait_thread import FractalSlurmWaitThread
|
38
40
|
from fractal_server.app.runner.components import _COMPONENT_KEY_
|
39
41
|
from fractal_server.app.runner.compress_folder import compress_folder
|
@@ -223,132 +225,12 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
223
225
|
with self.jobs_lock:
|
224
226
|
self.map_jobid_to_slurm_files_local.pop(jobid)
|
225
227
|
|
226
|
-
def get_input_pickle_file_path_local(
|
227
|
-
self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
|
228
|
-
) -> Path:
|
229
|
-
|
230
|
-
prefix = prefix or "cfut"
|
231
|
-
output = (
|
232
|
-
self.workflow_dir_local
|
233
|
-
/ subfolder_name
|
234
|
-
/ f"{prefix}_in_{arg}.pickle"
|
235
|
-
)
|
236
|
-
return output
|
237
|
-
|
238
|
-
def get_input_pickle_file_path_remote(
|
239
|
-
self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
|
240
|
-
) -> Path:
|
241
|
-
|
242
|
-
prefix = prefix or "cfut"
|
243
|
-
output = (
|
244
|
-
self.workflow_dir_remote
|
245
|
-
/ subfolder_name
|
246
|
-
/ f"{prefix}_in_{arg}.pickle"
|
247
|
-
)
|
248
|
-
return output
|
249
|
-
|
250
|
-
def get_output_pickle_file_path_local(
|
251
|
-
self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
|
252
|
-
) -> Path:
|
253
|
-
prefix = prefix or "cfut"
|
254
|
-
return (
|
255
|
-
self.workflow_dir_local
|
256
|
-
/ subfolder_name
|
257
|
-
/ f"{prefix}_out_{arg}.pickle"
|
258
|
-
)
|
259
|
-
|
260
|
-
def get_output_pickle_file_path_remote(
|
261
|
-
self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
|
262
|
-
) -> Path:
|
263
|
-
prefix = prefix or "cfut"
|
264
|
-
return (
|
265
|
-
self.workflow_dir_remote
|
266
|
-
/ subfolder_name
|
267
|
-
/ f"{prefix}_out_{arg}.pickle"
|
268
|
-
)
|
269
|
-
|
270
|
-
def get_slurm_script_file_path_local(
|
271
|
-
self, *, subfolder_name: str, prefix: Optional[str] = None
|
272
|
-
) -> Path:
|
273
|
-
prefix = prefix or "_temp"
|
274
|
-
return (
|
275
|
-
self.workflow_dir_local
|
276
|
-
/ subfolder_name
|
277
|
-
/ f"{prefix}_slurm_submit.sbatch"
|
278
|
-
)
|
279
|
-
|
280
|
-
def get_slurm_script_file_path_remote(
|
281
|
-
self, *, subfolder_name: str, prefix: Optional[str] = None
|
282
|
-
) -> Path:
|
283
|
-
prefix = prefix or "_temp"
|
284
|
-
return (
|
285
|
-
self.workflow_dir_remote
|
286
|
-
/ subfolder_name
|
287
|
-
/ f"{prefix}_slurm_submit.sbatch"
|
288
|
-
)
|
289
|
-
|
290
|
-
def get_slurm_stdout_file_path_local(
|
291
|
-
self,
|
292
|
-
*,
|
293
|
-
subfolder_name: str,
|
294
|
-
arg: str = "%j",
|
295
|
-
prefix: Optional[str] = None,
|
296
|
-
) -> Path:
|
297
|
-
prefix = prefix or "slurmpy.stdout"
|
298
|
-
return (
|
299
|
-
self.workflow_dir_local
|
300
|
-
/ subfolder_name
|
301
|
-
/ f"{prefix}_slurm_{arg}.out"
|
302
|
-
)
|
303
|
-
|
304
|
-
def get_slurm_stdout_file_path_remote(
|
305
|
-
self,
|
306
|
-
*,
|
307
|
-
subfolder_name: str,
|
308
|
-
arg: str = "%j",
|
309
|
-
prefix: Optional[str] = None,
|
310
|
-
) -> Path:
|
311
|
-
prefix = prefix or "slurmpy.stdout"
|
312
|
-
return (
|
313
|
-
self.workflow_dir_remote
|
314
|
-
/ subfolder_name
|
315
|
-
/ f"{prefix}_slurm_{arg}.out"
|
316
|
-
)
|
317
|
-
|
318
|
-
def get_slurm_stderr_file_path_local(
|
319
|
-
self,
|
320
|
-
*,
|
321
|
-
subfolder_name: str,
|
322
|
-
arg: str = "%j",
|
323
|
-
prefix: Optional[str] = None,
|
324
|
-
) -> Path:
|
325
|
-
prefix = prefix or "slurmpy.stderr"
|
326
|
-
return (
|
327
|
-
self.workflow_dir_local
|
328
|
-
/ subfolder_name
|
329
|
-
/ f"{prefix}_slurm_{arg}.err"
|
330
|
-
)
|
331
|
-
|
332
|
-
def get_slurm_stderr_file_path_remote(
|
333
|
-
self,
|
334
|
-
*,
|
335
|
-
subfolder_name: str,
|
336
|
-
arg: str = "%j",
|
337
|
-
prefix: Optional[str] = None,
|
338
|
-
) -> Path:
|
339
|
-
prefix = prefix or "slurmpy.stderr"
|
340
|
-
return (
|
341
|
-
self.workflow_dir_remote
|
342
|
-
/ subfolder_name
|
343
|
-
/ f"{prefix}_slurm_{arg}.err"
|
344
|
-
)
|
345
|
-
|
346
228
|
def submit(
|
347
229
|
self,
|
348
230
|
fun: Callable[..., Any],
|
349
231
|
*fun_args: Sequence[Any],
|
350
|
-
slurm_config:
|
351
|
-
task_files:
|
232
|
+
slurm_config: SlurmConfig,
|
233
|
+
task_files: TaskFiles,
|
352
234
|
**fun_kwargs: dict,
|
353
235
|
) -> Future:
|
354
236
|
"""
|
@@ -359,11 +241,9 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
359
241
|
fun_args: Function positional arguments
|
360
242
|
fun_kwargs: Function keyword arguments
|
361
243
|
slurm_config:
|
362
|
-
A `SlurmConfig` object
|
363
|
-
`get_default_slurm_config()`.
|
244
|
+
A `SlurmConfig` object.
|
364
245
|
task_files:
|
365
|
-
A `TaskFiles` object
|
366
|
-
`self.get_default_task_files()`.
|
246
|
+
A `TaskFiles` object.
|
367
247
|
|
368
248
|
Returns:
|
369
249
|
Future representing the execution of the current SLURM job.
|
@@ -375,12 +255,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
375
255
|
logger.warning(error_msg)
|
376
256
|
raise JobExecutionError(info=error_msg)
|
377
257
|
|
378
|
-
# Set defaults, if needed
|
379
|
-
if slurm_config is None:
|
380
|
-
slurm_config = get_default_slurm_config()
|
381
|
-
if task_files is None:
|
382
|
-
task_files = self.get_default_task_files()
|
383
|
-
|
384
258
|
# Set slurm_file_prefix
|
385
259
|
slurm_file_prefix = task_files.file_prefix
|
386
260
|
|
@@ -418,8 +292,8 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
418
292
|
fn: Callable[..., Any],
|
419
293
|
iterable: list[Sequence[Any]],
|
420
294
|
*,
|
421
|
-
slurm_config:
|
422
|
-
task_files:
|
295
|
+
slurm_config: SlurmConfig,
|
296
|
+
task_files: TaskFiles,
|
423
297
|
):
|
424
298
|
"""
|
425
299
|
Return an iterator with the results of several execution of a function
|
@@ -442,12 +316,9 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
442
316
|
An iterable such that each element is the list of arguments to
|
443
317
|
be passed to `fn`, as in `fn(*args)`.
|
444
318
|
slurm_config:
|
445
|
-
A `SlurmConfig` object
|
446
|
-
`get_default_slurm_config()`.
|
319
|
+
A `SlurmConfig` object.
|
447
320
|
task_files:
|
448
|
-
A `TaskFiles` object
|
449
|
-
`self.get_default_task_files()`.
|
450
|
-
|
321
|
+
A `TaskFiles` object.
|
451
322
|
"""
|
452
323
|
|
453
324
|
# Do not continue if auxiliary thread was shut down
|
@@ -472,12 +343,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
472
343
|
# self._exception
|
473
344
|
del fut
|
474
345
|
|
475
|
-
# Set defaults, if needed
|
476
|
-
if not slurm_config:
|
477
|
-
slurm_config = get_default_slurm_config()
|
478
|
-
if task_files is None:
|
479
|
-
task_files = self.get_default_task_files()
|
480
|
-
|
481
346
|
# Include common_script_lines in extra_lines
|
482
347
|
logger.debug(
|
483
348
|
f"Adding {self.common_script_lines=} to "
|
@@ -710,63 +575,80 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
710
575
|
f"Missing folder {subfolder_path.as_posix()}."
|
711
576
|
)
|
712
577
|
|
713
|
-
# Define I/O pickle file local/remote paths
|
714
578
|
job.input_pickle_files_local = tuple(
|
715
|
-
|
579
|
+
get_pickle_file_path(
|
716
580
|
arg=job.workerids[ind],
|
581
|
+
workflow_dir=self.workflow_dir_local,
|
717
582
|
subfolder_name=job.wftask_subfolder_name,
|
583
|
+
in_or_out="in",
|
718
584
|
prefix=job.wftask_file_prefixes[ind],
|
719
585
|
)
|
720
586
|
for ind in range(job.num_tasks_tot)
|
721
587
|
)
|
588
|
+
|
722
589
|
job.input_pickle_files_remote = tuple(
|
723
|
-
|
590
|
+
get_pickle_file_path(
|
724
591
|
arg=job.workerids[ind],
|
592
|
+
workflow_dir=self.workflow_dir_remote,
|
725
593
|
subfolder_name=job.wftask_subfolder_name,
|
594
|
+
in_or_out="in",
|
726
595
|
prefix=job.wftask_file_prefixes[ind],
|
727
596
|
)
|
728
597
|
for ind in range(job.num_tasks_tot)
|
729
598
|
)
|
730
599
|
job.output_pickle_files_local = tuple(
|
731
|
-
|
600
|
+
get_pickle_file_path(
|
732
601
|
arg=job.workerids[ind],
|
602
|
+
workflow_dir=self.workflow_dir_local,
|
733
603
|
subfolder_name=job.wftask_subfolder_name,
|
604
|
+
in_or_out="out",
|
734
605
|
prefix=job.wftask_file_prefixes[ind],
|
735
606
|
)
|
736
607
|
for ind in range(job.num_tasks_tot)
|
737
608
|
)
|
738
609
|
job.output_pickle_files_remote = tuple(
|
739
|
-
|
610
|
+
get_pickle_file_path(
|
740
611
|
arg=job.workerids[ind],
|
612
|
+
workflow_dir=self.workflow_dir_remote,
|
741
613
|
subfolder_name=job.wftask_subfolder_name,
|
614
|
+
in_or_out="out",
|
742
615
|
prefix=job.wftask_file_prefixes[ind],
|
743
616
|
)
|
744
617
|
for ind in range(job.num_tasks_tot)
|
745
618
|
)
|
746
|
-
|
747
|
-
|
748
|
-
|
619
|
+
# define slurm-job file local/remote paths
|
620
|
+
job.slurm_script_local = get_slurm_script_file_path(
|
621
|
+
workflow_dir=self.workflow_dir_local,
|
749
622
|
subfolder_name=job.wftask_subfolder_name,
|
750
623
|
prefix=job.slurm_file_prefix,
|
751
624
|
)
|
752
|
-
job.slurm_script_remote =
|
625
|
+
job.slurm_script_remote = get_slurm_script_file_path(
|
626
|
+
workflow_dir=self.workflow_dir_remote,
|
753
627
|
subfolder_name=job.wftask_subfolder_name,
|
754
628
|
prefix=job.slurm_file_prefix,
|
755
629
|
)
|
756
|
-
job.slurm_stdout_local =
|
630
|
+
job.slurm_stdout_local = get_slurm_file_path(
|
631
|
+
workflow_dir=self.workflow_dir_local,
|
757
632
|
subfolder_name=job.wftask_subfolder_name,
|
633
|
+
out_or_err="out",
|
758
634
|
prefix=job.slurm_file_prefix,
|
759
635
|
)
|
760
|
-
job.slurm_stdout_remote =
|
636
|
+
job.slurm_stdout_remote = get_slurm_file_path(
|
637
|
+
workflow_dir=self.workflow_dir_remote,
|
761
638
|
subfolder_name=job.wftask_subfolder_name,
|
639
|
+
out_or_err="out",
|
762
640
|
prefix=job.slurm_file_prefix,
|
763
641
|
)
|
764
|
-
job.slurm_stderr_local =
|
642
|
+
job.slurm_stderr_local = get_slurm_file_path(
|
643
|
+
workflow_dir=self.workflow_dir_local,
|
765
644
|
subfolder_name=job.wftask_subfolder_name,
|
645
|
+
out_or_err="err",
|
766
646
|
prefix=job.slurm_file_prefix,
|
767
647
|
)
|
768
|
-
job.slurm_stderr_remote =
|
648
|
+
job.slurm_stderr_remote = get_slurm_file_path(
|
649
|
+
workflow_dir=self.workflow_dir_remote,
|
769
650
|
subfolder_name=job.wftask_subfolder_name,
|
651
|
+
out_or_err="err",
|
770
652
|
prefix=job.slurm_file_prefix,
|
771
653
|
)
|
772
654
|
|
@@ -1294,7 +1176,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
1294
1176
|
slurm_err_path: str,
|
1295
1177
|
slurm_config: SlurmConfig,
|
1296
1178
|
):
|
1297
|
-
|
1298
1179
|
num_tasks_max_running = slurm_config.parallel_tasks_per_job
|
1299
1180
|
mem_per_task_MB = slurm_config.mem_per_task_MB
|
1300
1181
|
|
@@ -1346,19 +1227,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
1346
1227
|
script = "\n".join(script_lines)
|
1347
1228
|
return script
|
1348
1229
|
|
1349
|
-
def get_default_task_files(self) -> TaskFiles:
|
1350
|
-
"""
|
1351
|
-
This will be called when self.submit or self.map are called from
|
1352
|
-
outside fractal-server, and then lack some optional arguments.
|
1353
|
-
"""
|
1354
|
-
task_files = TaskFiles(
|
1355
|
-
workflow_dir_local=self.workflow_dir_local,
|
1356
|
-
workflow_dir_remote=self.workflow_dir_remote,
|
1357
|
-
task_order=None,
|
1358
|
-
task_name="name",
|
1359
|
-
)
|
1360
|
-
return task_files
|
1361
|
-
|
1362
1230
|
def shutdown(self, wait=True, *, cancel_futures=False):
|
1363
1231
|
"""
|
1364
1232
|
Clean up all executor variables. Note that this function is executed on
|
@@ -37,9 +37,11 @@ from ....exceptions import TaskExecutionError
|
|
37
37
|
from ....filenames import SHUTDOWN_FILENAME
|
38
38
|
from ....task_files import get_task_file_paths
|
39
39
|
from ....task_files import TaskFiles
|
40
|
-
from ...slurm._slurm_config import get_default_slurm_config
|
41
40
|
from ...slurm._slurm_config import SlurmConfig
|
42
41
|
from .._batching import heuristics
|
42
|
+
from ..utils_executors import get_pickle_file_path
|
43
|
+
from ..utils_executors import get_slurm_file_path
|
44
|
+
from ..utils_executors import get_slurm_script_file_path
|
43
45
|
from ._executor_wait_thread import FractalSlurmWaitThread
|
44
46
|
from ._subprocess_run_as_user import _glob_as_user
|
45
47
|
from ._subprocess_run_as_user import _glob_as_user_strict
|
@@ -305,72 +307,12 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
305
307
|
with self.jobs_lock:
|
306
308
|
self.map_jobid_to_slurm_files.pop(jobid)
|
307
309
|
|
308
|
-
def get_input_pickle_file_path(
|
309
|
-
self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
|
310
|
-
) -> Path:
|
311
|
-
|
312
|
-
prefix = prefix or "cfut"
|
313
|
-
output = (
|
314
|
-
self.workflow_dir_local
|
315
|
-
/ subfolder_name
|
316
|
-
/ f"{prefix}_in_{arg}.pickle"
|
317
|
-
)
|
318
|
-
return output
|
319
|
-
|
320
|
-
def get_output_pickle_file_path(
|
321
|
-
self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
|
322
|
-
) -> Path:
|
323
|
-
prefix = prefix or "cfut"
|
324
|
-
return (
|
325
|
-
self.workflow_dir_remote
|
326
|
-
/ subfolder_name
|
327
|
-
/ f"{prefix}_out_{arg}.pickle"
|
328
|
-
)
|
329
|
-
|
330
|
-
def get_slurm_script_file_path(
|
331
|
-
self, *, subfolder_name: str, prefix: Optional[str] = None
|
332
|
-
) -> Path:
|
333
|
-
prefix = prefix or "_temp"
|
334
|
-
return (
|
335
|
-
self.workflow_dir_local
|
336
|
-
/ subfolder_name
|
337
|
-
/ f"{prefix}_slurm_submit.sbatch"
|
338
|
-
)
|
339
|
-
|
340
|
-
def get_slurm_stdout_file_path(
|
341
|
-
self,
|
342
|
-
*,
|
343
|
-
subfolder_name: str,
|
344
|
-
arg: str = "%j",
|
345
|
-
prefix: Optional[str] = None,
|
346
|
-
) -> Path:
|
347
|
-
prefix = prefix or "slurmpy.stdout"
|
348
|
-
return (
|
349
|
-
self.workflow_dir_remote
|
350
|
-
/ subfolder_name
|
351
|
-
/ f"{prefix}_slurm_{arg}.out"
|
352
|
-
)
|
353
|
-
|
354
|
-
def get_slurm_stderr_file_path(
|
355
|
-
self,
|
356
|
-
*,
|
357
|
-
subfolder_name: str,
|
358
|
-
arg: str = "%j",
|
359
|
-
prefix: Optional[str] = None,
|
360
|
-
) -> Path:
|
361
|
-
prefix = prefix or "slurmpy.stderr"
|
362
|
-
return (
|
363
|
-
self.workflow_dir_remote
|
364
|
-
/ subfolder_name
|
365
|
-
/ f"{prefix}_slurm_{arg}.err"
|
366
|
-
)
|
367
|
-
|
368
310
|
def submit(
|
369
311
|
self,
|
370
312
|
fun: Callable[..., Any],
|
371
313
|
*fun_args: Sequence[Any],
|
372
|
-
slurm_config:
|
373
|
-
task_files:
|
314
|
+
slurm_config: SlurmConfig,
|
315
|
+
task_files: TaskFiles,
|
374
316
|
**fun_kwargs: dict,
|
375
317
|
) -> Future:
|
376
318
|
"""
|
@@ -381,22 +323,14 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
381
323
|
fun_args: Function positional arguments
|
382
324
|
fun_kwargs: Function keyword arguments
|
383
325
|
slurm_config:
|
384
|
-
A `SlurmConfig` object
|
385
|
-
`get_default_slurm_config()`.
|
326
|
+
A `SlurmConfig` object.
|
386
327
|
task_files:
|
387
|
-
A `TaskFiles` object
|
388
|
-
`self.get_default_task_files()`.
|
328
|
+
A `TaskFiles` object.
|
389
329
|
|
390
330
|
Returns:
|
391
331
|
Future representing the execution of the current SLURM job.
|
392
332
|
"""
|
393
333
|
|
394
|
-
# Set defaults, if needed
|
395
|
-
if slurm_config is None:
|
396
|
-
slurm_config = get_default_slurm_config()
|
397
|
-
if task_files is None:
|
398
|
-
task_files = self.get_default_task_files()
|
399
|
-
|
400
334
|
# Set slurm_file_prefix
|
401
335
|
slurm_file_prefix = task_files.file_prefix
|
402
336
|
|
@@ -431,8 +365,8 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
431
365
|
fn: Callable[..., Any],
|
432
366
|
iterable: list[Sequence[Any]],
|
433
367
|
*,
|
434
|
-
slurm_config:
|
435
|
-
task_files:
|
368
|
+
slurm_config: SlurmConfig,
|
369
|
+
task_files: TaskFiles,
|
436
370
|
):
|
437
371
|
"""
|
438
372
|
Return an iterator with the results of several execution of a function
|
@@ -455,11 +389,9 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
455
389
|
An iterable such that each element is the list of arguments to
|
456
390
|
be passed to `fn`, as in `fn(*args)`.
|
457
391
|
slurm_config:
|
458
|
-
A `SlurmConfig` object
|
459
|
-
`get_default_slurm_config()`.
|
392
|
+
A `SlurmConfig` object.
|
460
393
|
task_files:
|
461
|
-
A `TaskFiles` object
|
462
|
-
`self.get_default_task_files()`.
|
394
|
+
A `TaskFiles` object.
|
463
395
|
|
464
396
|
"""
|
465
397
|
|
@@ -479,12 +411,6 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
479
411
|
# self._exception
|
480
412
|
del fut
|
481
413
|
|
482
|
-
# Set defaults, if needed
|
483
|
-
if not slurm_config:
|
484
|
-
slurm_config = get_default_slurm_config()
|
485
|
-
if task_files is None:
|
486
|
-
task_files = self.get_default_task_files()
|
487
|
-
|
488
414
|
# Include common_script_lines in extra_lines
|
489
415
|
logger.debug(
|
490
416
|
f"Adding {self.common_script_lines=} to "
|
@@ -700,39 +626,46 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
700
626
|
f"Missing folder {subfolder_path.as_posix()}."
|
701
627
|
)
|
702
628
|
|
703
|
-
# Define I/O pickle file names/paths
|
704
629
|
job.input_pickle_files = tuple(
|
705
|
-
|
630
|
+
get_pickle_file_path(
|
706
631
|
arg=job.workerids[ind],
|
632
|
+
workflow_dir=self.workflow_dir_local,
|
707
633
|
subfolder_name=job.wftask_subfolder_name,
|
634
|
+
in_or_out="in",
|
708
635
|
prefix=job.wftask_file_prefixes[ind],
|
709
636
|
)
|
710
637
|
for ind in range(job.num_tasks_tot)
|
711
638
|
)
|
712
639
|
job.output_pickle_files = tuple(
|
713
|
-
|
640
|
+
get_pickle_file_path(
|
714
641
|
arg=job.workerids[ind],
|
642
|
+
workflow_dir=self.workflow_dir_remote,
|
715
643
|
subfolder_name=job.wftask_subfolder_name,
|
644
|
+
in_or_out="out",
|
716
645
|
prefix=job.wftask_file_prefixes[ind],
|
717
646
|
)
|
718
647
|
for ind in range(job.num_tasks_tot)
|
719
648
|
)
|
720
|
-
|
721
649
|
# Define SLURM-job file names/paths
|
722
|
-
job.slurm_script =
|
650
|
+
job.slurm_script = get_slurm_script_file_path(
|
651
|
+
workflow_dir=self.workflow_dir_local,
|
723
652
|
subfolder_name=job.wftask_subfolder_name,
|
724
653
|
prefix=job.slurm_file_prefix,
|
725
654
|
)
|
726
|
-
job.slurm_stdout =
|
655
|
+
job.slurm_stdout = get_slurm_file_path(
|
656
|
+
workflow_dir=self.workflow_dir_remote,
|
727
657
|
subfolder_name=job.wftask_subfolder_name,
|
658
|
+
out_or_err="out",
|
728
659
|
prefix=job.slurm_file_prefix,
|
729
660
|
)
|
730
|
-
job.slurm_stderr =
|
661
|
+
job.slurm_stderr = get_slurm_file_path(
|
662
|
+
workflow_dir=self.workflow_dir_remote,
|
731
663
|
subfolder_name=job.wftask_subfolder_name,
|
664
|
+
out_or_err="err",
|
732
665
|
prefix=job.slurm_file_prefix,
|
733
666
|
)
|
734
667
|
|
735
|
-
# Dump serialized versions+function+args+kwargs to pickle
|
668
|
+
# Dump serialized versions+function+args+kwargs to pickle
|
736
669
|
versions = dict(
|
737
670
|
python=sys.version_info[:3],
|
738
671
|
cloudpickle=cloudpickle.__version__,
|
@@ -824,7 +757,6 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
824
757
|
"""
|
825
758
|
# Handle all uncaught exceptions in this broad try/except block
|
826
759
|
try:
|
827
|
-
|
828
760
|
# Retrieve job
|
829
761
|
with self.jobs_lock:
|
830
762
|
try:
|
@@ -1039,7 +971,6 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
1039
971
|
)
|
1040
972
|
|
1041
973
|
for prefix in prefixes:
|
1042
|
-
|
1043
974
|
if prefix == job.slurm_file_prefix:
|
1044
975
|
files_to_copy = _glob_as_user(
|
1045
976
|
folder=str(self.workflow_dir_remote / subfolder_name),
|
@@ -1177,7 +1108,6 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
1177
1108
|
slurm_err_path: str,
|
1178
1109
|
slurm_config: SlurmConfig,
|
1179
1110
|
):
|
1180
|
-
|
1181
1111
|
num_tasks_max_running = slurm_config.parallel_tasks_per_job
|
1182
1112
|
mem_per_task_MB = slurm_config.mem_per_task_MB
|
1183
1113
|
|
@@ -1229,19 +1159,6 @@ class FractalSlurmExecutor(SlurmExecutor):
|
|
1229
1159
|
script = "\n".join(script_lines)
|
1230
1160
|
return script
|
1231
1161
|
|
1232
|
-
def get_default_task_files(self) -> TaskFiles:
|
1233
|
-
"""
|
1234
|
-
This will be called when self.submit or self.map are called from
|
1235
|
-
outside fractal-server, and then lack some optional arguments.
|
1236
|
-
"""
|
1237
|
-
task_files = TaskFiles(
|
1238
|
-
workflow_dir_local=self.workflow_dir_local,
|
1239
|
-
workflow_dir_remote=self.workflow_dir_remote,
|
1240
|
-
task_order=None,
|
1241
|
-
task_name="name",
|
1242
|
-
)
|
1243
|
-
return task_files
|
1244
|
-
|
1245
1162
|
def shutdown(self, wait=True, *, cancel_futures=False):
|
1246
1163
|
"""
|
1247
1164
|
Clean up all executor variables. Note that this function is executed on
|
@@ -0,0 +1,58 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Literal
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
|
6
|
+
def get_pickle_file_path(
|
7
|
+
*,
|
8
|
+
arg: str,
|
9
|
+
workflow_dir: Path,
|
10
|
+
subfolder_name: str,
|
11
|
+
in_or_out: Literal["in", "out"],
|
12
|
+
prefix: str,
|
13
|
+
) -> Path:
|
14
|
+
if in_or_out in ["in", "out"]:
|
15
|
+
output = (
|
16
|
+
workflow_dir
|
17
|
+
/ subfolder_name
|
18
|
+
/ f"{prefix}_{in_or_out}_{arg}.pickle"
|
19
|
+
)
|
20
|
+
return output
|
21
|
+
else:
|
22
|
+
raise ValueError(
|
23
|
+
f"Missing or unexpected value in_or_out argument, {in_or_out=}"
|
24
|
+
)
|
25
|
+
|
26
|
+
|
27
|
+
def get_slurm_script_file_path(
|
28
|
+
*, workflow_dir: Path, subfolder_name: str, prefix: Optional[str] = None
|
29
|
+
) -> Path:
|
30
|
+
prefix = prefix or "_temp"
|
31
|
+
return workflow_dir / subfolder_name / f"{prefix}_slurm_submit.sbatch"
|
32
|
+
|
33
|
+
|
34
|
+
def get_slurm_file_path(
|
35
|
+
*,
|
36
|
+
workflow_dir: Path,
|
37
|
+
subfolder_name: str,
|
38
|
+
arg: str = "%j",
|
39
|
+
out_or_err: Literal["out", "err"],
|
40
|
+
prefix: str,
|
41
|
+
) -> Path:
|
42
|
+
if out_or_err == "out":
|
43
|
+
return (
|
44
|
+
workflow_dir
|
45
|
+
/ subfolder_name
|
46
|
+
/ f"{prefix}_slurm_{arg}.{out_or_err}"
|
47
|
+
)
|
48
|
+
elif out_or_err == "err":
|
49
|
+
return (
|
50
|
+
workflow_dir
|
51
|
+
/ subfolder_name
|
52
|
+
/ f"{prefix}_slurm_{arg}.{out_or_err}"
|
53
|
+
)
|
54
|
+
else:
|
55
|
+
raise ValueError(
|
56
|
+
"Missing or unexpected value out_or_err argument, "
|
57
|
+
f"{out_or_err=}"
|
58
|
+
)
|
@@ -3,6 +3,7 @@ from typing import Optional
|
|
3
3
|
from fastapi_users import schemas
|
4
4
|
from pydantic import BaseModel
|
5
5
|
from pydantic import Extra
|
6
|
+
from pydantic import Field
|
6
7
|
from pydantic import validator
|
7
8
|
|
8
9
|
from ._validators import val_unique_list
|
@@ -11,8 +12,8 @@ from ._validators import valstr
|
|
11
12
|
__all__ = (
|
12
13
|
"UserRead",
|
13
14
|
"UserUpdate",
|
15
|
+
"UserUpdateGroups",
|
14
16
|
"UserCreate",
|
15
|
-
"UserUpdateWithNewGroupIds",
|
16
17
|
)
|
17
18
|
|
18
19
|
|
@@ -45,7 +46,7 @@ class UserRead(schemas.BaseUser[int]):
|
|
45
46
|
oauth_accounts: list[OAuthAccountRead]
|
46
47
|
|
47
48
|
|
48
|
-
class UserUpdate(schemas.BaseUserUpdate):
|
49
|
+
class UserUpdate(schemas.BaseUserUpdate, extra=Extra.forbid):
|
49
50
|
"""
|
50
51
|
Schema for `User` update.
|
51
52
|
|
@@ -82,14 +83,6 @@ class UserUpdateStrict(BaseModel, extra=Extra.forbid):
|
|
82
83
|
pass
|
83
84
|
|
84
85
|
|
85
|
-
class UserUpdateWithNewGroupIds(UserUpdate):
|
86
|
-
new_group_ids: Optional[list[int]] = None
|
87
|
-
|
88
|
-
_val_unique = validator("new_group_ids", allow_reuse=True)(
|
89
|
-
val_unique_list("new_group_ids")
|
90
|
-
)
|
91
|
-
|
92
|
-
|
93
86
|
class UserCreate(schemas.BaseUserCreate):
|
94
87
|
"""
|
95
88
|
Schema for `User` creation.
|
@@ -103,3 +96,16 @@ class UserCreate(schemas.BaseUserCreate):
|
|
103
96
|
# Validators
|
104
97
|
|
105
98
|
_username = validator("username", allow_reuse=True)(valstr("username"))
|
99
|
+
|
100
|
+
|
101
|
+
class UserUpdateGroups(BaseModel, extra=Extra.forbid):
|
102
|
+
"""
|
103
|
+
Schema for `POST /auth/users/{user_id}/set-groups/`
|
104
|
+
|
105
|
+
"""
|
106
|
+
|
107
|
+
group_ids: list[int] = Field(min_items=1)
|
108
|
+
|
109
|
+
_group_ids = validator("group_ids", allow_reuse=True)(
|
110
|
+
val_unique_list("group_ids")
|
111
|
+
)
|
@@ -43,6 +43,9 @@ from fastapi_users.exceptions import UserAlreadyExists
|
|
43
43
|
from fastapi_users.models import ID
|
44
44
|
from fastapi_users.models import OAP
|
45
45
|
from fastapi_users.models import UP
|
46
|
+
from fastapi_users.password import PasswordHelper
|
47
|
+
from pwdlib import PasswordHash
|
48
|
+
from pwdlib.hashers.bcrypt import BcryptHasher
|
46
49
|
from sqlalchemy.ext.asyncio import AsyncSession
|
47
50
|
from sqlalchemy.orm import selectinload
|
48
51
|
from sqlmodel import func
|
@@ -177,7 +180,21 @@ async def get_user_db(
|
|
177
180
|
yield SQLModelUserDatabaseAsync(session, UserOAuth, OAuthAccount)
|
178
181
|
|
179
182
|
|
183
|
+
password_hash = PasswordHash(hashers=(BcryptHasher(),))
|
184
|
+
password_helper = PasswordHelper(password_hash=password_hash)
|
185
|
+
|
186
|
+
|
180
187
|
class UserManager(IntegerIDMixin, BaseUserManager[UserOAuth, int]):
|
188
|
+
def __init__(self, user_db):
|
189
|
+
"""
|
190
|
+
Override `__init__` of `BaseUserManager` to define custom
|
191
|
+
`password_helper`.
|
192
|
+
"""
|
193
|
+
super().__init__(
|
194
|
+
user_db=user_db,
|
195
|
+
password_helper=password_helper,
|
196
|
+
)
|
197
|
+
|
181
198
|
async def validate_password(self, password: str, user: UserOAuth) -> None:
|
182
199
|
# check password length
|
183
200
|
min_length = 4
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fractal-server
|
3
|
-
Version: 2.9.
|
3
|
+
Version: 2.9.0a12
|
4
4
|
Summary: Server component of the Fractal analytics platform
|
5
5
|
Home-page: https://github.com/fractal-analytics-platform/fractal-server
|
6
6
|
License: BSD-3-Clause
|
@@ -13,12 +13,11 @@ Classifier: Programming Language :: Python :: 3.10
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.11
|
14
14
|
Classifier: Programming Language :: Python :: 3.12
|
15
15
|
Requires-Dist: alembic (>=1.13.1,<2.0.0)
|
16
|
-
Requires-Dist: bcrypt (==4.0.1)
|
17
16
|
Requires-Dist: cloudpickle (>=3.0.0,<3.1.0)
|
18
17
|
Requires-Dist: clusterfutures (>=0.5,<0.6)
|
19
18
|
Requires-Dist: fabric (>=3.2.2,<4.0.0)
|
20
19
|
Requires-Dist: fastapi (>=0.115.0,<0.116.0)
|
21
|
-
Requires-Dist: fastapi-users[oauth] (>=
|
20
|
+
Requires-Dist: fastapi-users[oauth] (>=14,<15)
|
22
21
|
Requires-Dist: gunicorn (>=21.2,<23.0)
|
23
22
|
Requires-Dist: packaging (>=23.2,<24.0)
|
24
23
|
Requires-Dist: psutil (>=5.9.8,<6.0.0)
|
@@ -1,4 +1,4 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=JIykE6nusWjfkKUyAj2-Gbeh8nJP3e4L6LX9XBNXH7M,25
|
2
2
|
fractal_server/__main__.py,sha256=dEkCfzLLQrIlxsGC-HBfoR-RBMWnJDgNrxYTyzmE9c0,6146
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -63,12 +63,12 @@ fractal_server/app/routes/api/v2/workflowtask.py,sha256=ciHTwXXFiFnMF7ZpJ3Xs0q6Y
|
|
63
63
|
fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
|
64
64
|
fractal_server/app/routes/auth/_aux_auth.py,sha256=ifkNocTYatBSMYGwiR14qohmvR9SfMldceiEj6uJBrU,4783
|
65
65
|
fractal_server/app/routes/auth/current_user.py,sha256=I3aVY5etWAJ_SH6t65Mj5TjvB2X8sAGuu1KG7FxLyPU,5883
|
66
|
-
fractal_server/app/routes/auth/group.py,sha256=
|
66
|
+
fractal_server/app/routes/auth/group.py,sha256=EBwR-eiTfHSZUbsbrhjKTWTiwPkGPLFYhuHi7ifDbfY,8358
|
67
67
|
fractal_server/app/routes/auth/login.py,sha256=tSu6OBLOieoBtMZB4JkBAdEgH2Y8KqPGSbwy7NIypIo,566
|
68
68
|
fractal_server/app/routes/auth/oauth.py,sha256=AnFHbjqL2AgBX3eksI931xD6RTtmbciHBEuGf9YJLjU,1895
|
69
69
|
fractal_server/app/routes/auth/register.py,sha256=DlHq79iOvGd_gt2v9uwtsqIKeO6i_GKaW59VIkllPqY,587
|
70
70
|
fractal_server/app/routes/auth/router.py,sha256=tzJrygXFZlmV_uWelVqTOJMEH-3Fr7ydwlgx1LxRjxY,527
|
71
|
-
fractal_server/app/routes/auth/users.py,sha256=
|
71
|
+
fractal_server/app/routes/auth/users.py,sha256=kZv-Ls224WBFiuvVeM584LhYq_BLz6HQ9HpWbWQxRRM,7808
|
72
72
|
fractal_server/app/routes/aux/__init__.py,sha256=LR4bR7RunHAK6jc9IR2bReQd-BdXADdnDccXI4uGeGY,731
|
73
73
|
fractal_server/app/routes/aux/_job.py,sha256=q-RCiW17yXnZKAC_0La52RLvhqhxuvbgQJ2MlGXOj8A,702
|
74
74
|
fractal_server/app/routes/aux/_runner.py,sha256=FdCVla5DxGAZ__aB7Z8dEJzD_RIeh5tftjrPyqkr8N8,895
|
@@ -82,17 +82,18 @@ fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrG
|
|
82
82
|
fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
83
83
|
fractal_server/app/runner/executors/slurm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
84
84
|
fractal_server/app/runner/executors/slurm/_batching.py,sha256=3mfeFuYm3UA4EXh4VWuqZTF-dcINECZgTHoPOaOszDo,8840
|
85
|
-
fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=
|
85
|
+
fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=RkFrp9bltfVxrp5Ei2KuCMEft6q3mBArTvSBvtHA2n4,15682
|
86
86
|
fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
|
87
87
|
fractal_server/app/runner/executors/slurm/ssh/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
|
88
88
|
fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py,sha256=bKo5Ja0IGxJWpPWyh9dN0AG-PwzTDZzD5LyaEHB3YU4,3742
|
89
89
|
fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py,sha256=rwlqZzoGo4SAb4nSlFjsQJdaCgfM1J6YGcjb8yYxlqc,4506
|
90
|
-
fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=
|
90
|
+
fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=yRn5v0ZUX_dQdN1MN8gjRBMCXVWZ_PZgcI2wnWXIAO8,54070
|
91
91
|
fractal_server/app/runner/executors/slurm/sudo/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
|
92
92
|
fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py,sha256=wAgwpVcr6JIslKHOuS0FhRa_6T1KCManyRJqA-fifzw,1909
|
93
93
|
fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py,sha256=z5LlhaiqAb8pHsF1WwdzXN39C5anQmwjo1rSQgtRAYE,4422
|
94
94
|
fractal_server/app/runner/executors/slurm/sudo/_subprocess_run_as_user.py,sha256=g8wqUjSicN17UZVXlfaMomYZ-xOIbBu1oE7HdJTzfvw,5218
|
95
|
-
fractal_server/app/runner/executors/slurm/sudo/executor.py,sha256=
|
95
|
+
fractal_server/app/runner/executors/slurm/sudo/executor.py,sha256=CAIPFMmsjQLxmjN8Kdpq0OlZIX9PZIiRo0XO1quKWEM,46495
|
96
|
+
fractal_server/app/runner/executors/slurm/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
|
96
97
|
fractal_server/app/runner/extract_archive.py,sha256=tLpjDrX47OjTNhhoWvm6iNukg8KoieWyTb7ZfvE9eWU,2483
|
97
98
|
fractal_server/app/runner/filenames.py,sha256=9lwu3yB4C67yiijYw8XIKaLFn3mJUt6_TCyVFM_aZUQ,206
|
98
99
|
fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oXGNvnTEoAfv2bxc,959
|
@@ -135,7 +136,7 @@ fractal_server/app/runner/v2/task_interface.py,sha256=hT3p-bRGsLNAR_dNv_PYFoqzIF
|
|
135
136
|
fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
|
136
137
|
fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMoqWc3orFyI,135
|
137
138
|
fractal_server/app/schemas/_validators.py,sha256=T5EswIJAJRvawfzqWtPcN2INAfiBXyE4m0iwQm4ht-0,3149
|
138
|
-
fractal_server/app/schemas/user.py,sha256=
|
139
|
+
fractal_server/app/schemas/user.py,sha256=icjox9gK_invW44Nh_L4CvqfRa92qghyQhmevyg09nQ,2243
|
139
140
|
fractal_server/app/schemas/user_group.py,sha256=t30Kd07PY43G_AqFDb8vjdInTeLeU9WvFZDx8fVLPSI,1750
|
140
141
|
fractal_server/app/schemas/user_settings.py,sha256=TalISeEfCrtN8LgqbLx1Q8ZPoeiZnbksg5NYAVzkIqY,3527
|
141
142
|
fractal_server/app/schemas/v1/__init__.py,sha256=CrBGgBhoemCvmZ70ZUchM-jfVAICnoa7AjZBAtL2UB0,1852
|
@@ -160,7 +161,7 @@ fractal_server/app/schemas/v2/task_collection.py,sha256=yHpCRxoj6tKqCiQfUjaTj8Sf
|
|
160
161
|
fractal_server/app/schemas/v2/task_group.py,sha256=fSjdLbClrpmrPj5hFZMu9DoJW4Y33EnbOh0HjMBsGVc,3784
|
161
162
|
fractal_server/app/schemas/v2/workflow.py,sha256=-KWvXnbHBFA3pj5n7mfSyLKJQSqkJmoziIEe7mpLl3M,1875
|
162
163
|
fractal_server/app/schemas/v2/workflowtask.py,sha256=vDdMktYbHeYBgB5OuWSv6wRPRXWqvetkeqQ7IC5YtfA,5751
|
163
|
-
fractal_server/app/security/__init__.py,sha256=
|
164
|
+
fractal_server/app/security/__init__.py,sha256=MlWVrLFPj9M2Gug-k8yATM-Cw066RugVU4KK6kMRbnQ,13019
|
164
165
|
fractal_server/app/user_settings.py,sha256=aZgQ3i0JkHfgwLGW1ee6Gzr1ae3IioFfJKKSsSS8Svk,1312
|
165
166
|
fractal_server/config.py,sha256=Bk6EFKnU07sjgThf2NVEqrFAx9F4s0BfCvDKtWHzJTc,23217
|
166
167
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
@@ -238,8 +239,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=C5WLuY3uGG2s53OEL-__H35-fmSlgu
|
|
238
239
|
fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
|
239
240
|
fractal_server/utils.py,sha256=utvmBx8K9I8hRWFquxna2pBaOqe0JifDL_NVPmihEJI,3525
|
240
241
|
fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
|
241
|
-
fractal_server-2.9.
|
242
|
-
fractal_server-2.9.
|
243
|
-
fractal_server-2.9.
|
244
|
-
fractal_server-2.9.
|
245
|
-
fractal_server-2.9.
|
242
|
+
fractal_server-2.9.0a12.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
243
|
+
fractal_server-2.9.0a12.dist-info/METADATA,sha256=4_h_wWBEo_p6k4KxkzUmwzSO54gtToDfUVM2CwpVz48,4546
|
244
|
+
fractal_server-2.9.0a12.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
245
|
+
fractal_server-2.9.0a12.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
246
|
+
fractal_server-2.9.0a12.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|