fractal-server 2.9.0a12__py3-none-any.whl → 2.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/security.py +0 -1
- fractal_server/app/models/user_settings.py +1 -2
- fractal_server/app/routes/api/v1/project.py +7 -1
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +2 -2
- fractal_server/app/routes/api/v2/submit.py +9 -5
- fractal_server/app/routes/api/v2/workflow_import.py +4 -7
- fractal_server/app/runner/executors/slurm/_slurm_config.py +2 -2
- fractal_server/app/runner/executors/slurm/ssh/executor.py +4 -3
- fractal_server/app/runner/v1/_common.py +1 -2
- fractal_server/app/runner/v2/_local/__init__.py +0 -4
- fractal_server/app/runner/v2/_slurm_ssh/__init__.py +0 -6
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py +1 -7
- fractal_server/app/schemas/user_settings.py +0 -18
- fractal_server/app/user_settings.py +2 -2
- fractal_server/config.py +1 -2
- fractal_server/logger.py +2 -4
- fractal_server/migrations/versions/316140ff7ee1_remove_usersettings_cache_dir.py +36 -0
- fractal_server/ssh/_fabric.py +9 -11
- fractal_server/tasks/v2/ssh/_utils.py +1 -1
- fractal_server/tasks/v2/ssh/collect.py +19 -21
- fractal_server/tasks/v2/utils_database.py +6 -3
- fractal_server/tasks/v2/utils_python_interpreter.py +2 -2
- fractal_server/tasks/v2/utils_templates.py +3 -3
- {fractal_server-2.9.0a12.dist-info → fractal_server-2.9.2.dist-info}/METADATA +1 -1
- {fractal_server-2.9.0a12.dist-info → fractal_server-2.9.2.dist-info}/RECORD +29 -28
- {fractal_server-2.9.0a12.dist-info → fractal_server-2.9.2.dist-info}/LICENSE +0 -0
- {fractal_server-2.9.0a12.dist-info → fractal_server-2.9.2.dist-info}/WHEEL +0 -0
- {fractal_server-2.9.0a12.dist-info → fractal_server-2.9.2.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.9.
|
1
|
+
__VERSION__ = "2.9.2"
|
@@ -20,7 +20,7 @@ class UserSettings(SQLModel, table=True):
|
|
20
20
|
ssh_tasks_dir: Task-venvs base folder on `ssh_host`.
|
21
21
|
ssh_jobs_dir: Jobs base folder on `ssh_host`.
|
22
22
|
slurm_user: Local user, to be impersonated via `sudo -u`
|
23
|
-
|
23
|
+
project_dir: Folder where `slurm_user` can write.
|
24
24
|
"""
|
25
25
|
|
26
26
|
__tablename__ = "user_settings"
|
@@ -35,5 +35,4 @@ class UserSettings(SQLModel, table=True):
|
|
35
35
|
ssh_tasks_dir: Optional[str] = None
|
36
36
|
ssh_jobs_dir: Optional[str] = None
|
37
37
|
slurm_user: Optional[str] = None
|
38
|
-
cache_dir: Optional[str] = None
|
39
38
|
project_dir: Optional[str] = None
|
@@ -448,6 +448,12 @@ async def apply_workflow(
|
|
448
448
|
await db.commit()
|
449
449
|
await db.refresh(job)
|
450
450
|
|
451
|
+
cache_dir = (
|
452
|
+
f"{user_settings.project_dir}/.fractal_cache"
|
453
|
+
if user_settings.project_dir is not None
|
454
|
+
else None
|
455
|
+
)
|
456
|
+
|
451
457
|
background_tasks.add_task(
|
452
458
|
submit_workflow,
|
453
459
|
workflow_id=workflow.id,
|
@@ -456,7 +462,7 @@ async def apply_workflow(
|
|
456
462
|
job_id=job.id,
|
457
463
|
worker_init=apply_workflow.worker_init,
|
458
464
|
slurm_user=user_settings.slurm_user,
|
459
|
-
user_cache_dir=
|
465
|
+
user_cache_dir=cache_dir,
|
460
466
|
)
|
461
467
|
request.app.state.jobsV1.append(job.id)
|
462
468
|
logger.info(
|
@@ -182,8 +182,8 @@ async def check_no_submitted_job(
|
|
182
182
|
Find submitted jobs which include tasks from a given task group.
|
183
183
|
|
184
184
|
Arguments:
|
185
|
-
|
186
|
-
db:
|
185
|
+
task_group_id: ID of the `TaskGroupV2` object.
|
186
|
+
db: Asynchronous database session.
|
187
187
|
"""
|
188
188
|
stm = (
|
189
189
|
select(func.count(JobV2.id))
|
@@ -186,18 +186,22 @@ async def apply_workflow(
|
|
186
186
|
f"_{timestamp_string}"
|
187
187
|
)
|
188
188
|
|
189
|
+
cache_dir = (
|
190
|
+
Path(user_settings.project_dir) / ".fractal_cache"
|
191
|
+
if user_settings.project_dir is not None
|
192
|
+
else None
|
193
|
+
)
|
194
|
+
|
189
195
|
# Define user-side job directory
|
190
196
|
if FRACTAL_RUNNER_BACKEND == "local":
|
191
197
|
WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
|
192
198
|
elif FRACTAL_RUNNER_BACKEND == "local_experimental":
|
193
199
|
WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
|
194
200
|
elif FRACTAL_RUNNER_BACKEND == "slurm":
|
195
|
-
WORKFLOW_DIR_REMOTE =
|
196
|
-
Path(user_settings.cache_dir) / f"{WORKFLOW_DIR_LOCAL.name}"
|
197
|
-
)
|
201
|
+
WORKFLOW_DIR_REMOTE = cache_dir / WORKFLOW_DIR_LOCAL.name
|
198
202
|
elif FRACTAL_RUNNER_BACKEND == "slurm_ssh":
|
199
203
|
WORKFLOW_DIR_REMOTE = (
|
200
|
-
Path(user_settings.ssh_jobs_dir) /
|
204
|
+
Path(user_settings.ssh_jobs_dir) / WORKFLOW_DIR_LOCAL.name
|
201
205
|
)
|
202
206
|
|
203
207
|
# Update job folders in the db
|
@@ -229,7 +233,7 @@ async def apply_workflow(
|
|
229
233
|
user_settings=user_settings,
|
230
234
|
worker_init=job.worker_init,
|
231
235
|
slurm_user=user_settings.slurm_user,
|
232
|
-
user_cache_dir=
|
236
|
+
user_cache_dir=cache_dir.as_posix() if cache_dir else None,
|
233
237
|
fractal_ssh=fractal_ssh,
|
234
238
|
)
|
235
239
|
request.app.state.jobsV2.append(job.id)
|
@@ -68,11 +68,8 @@ async def _get_task_by_source(
|
|
68
68
|
Find task with a given source.
|
69
69
|
|
70
70
|
Args:
|
71
|
-
|
72
|
-
|
73
|
-
default_group_id: ID of default user group.
|
74
|
-
task_group_list: Current list of valid task groups.
|
75
|
-
db: Asynchronous db session
|
71
|
+
source: `source` of the task to be imported.
|
72
|
+
task_groups_list: Current list of valid task groups.
|
76
73
|
|
77
74
|
Return:
|
78
75
|
`id` of the matching task, or `None`.
|
@@ -172,10 +169,10 @@ async def _get_task_by_taskimport(
|
|
172
169
|
|
173
170
|
Args:
|
174
171
|
task_import: Info on task to be imported.
|
172
|
+
task_groups_list: Current list of valid task groups.
|
175
173
|
user_id: ID of current user.
|
176
174
|
default_group_id: ID of default user group.
|
177
|
-
|
178
|
-
db: Asynchronous db session
|
175
|
+
db: Asynchronous database session.
|
179
176
|
|
180
177
|
Return:
|
181
178
|
`id` of the matching task, or `None`.
|
@@ -327,7 +327,7 @@ class SlurmConfig(BaseModel, extra=Extra.forbid):
|
|
327
327
|
script.
|
328
328
|
|
329
329
|
Arguments:
|
330
|
-
|
330
|
+
remote_export_dir:
|
331
331
|
Base directory for exports defined in
|
332
332
|
`self.user_local_exports`.
|
333
333
|
"""
|
@@ -378,7 +378,7 @@ class SlurmConfig(BaseModel, extra=Extra.forbid):
|
|
378
378
|
if self.user_local_exports:
|
379
379
|
if remote_export_dir is None:
|
380
380
|
raise ValueError(
|
381
|
-
f"
|
381
|
+
f"remote_export_dir=None but {self.user_local_exports=}"
|
382
382
|
)
|
383
383
|
for key, value in self.user_local_exports.items():
|
384
384
|
tmp_value = str(Path(remote_export_dir) / value)
|
@@ -917,7 +917,7 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
917
917
|
thread via `fut.set_exception(...)`.
|
918
918
|
|
919
919
|
Arguments:
|
920
|
-
|
920
|
+
job_ids: IDs of the SLURM jobs to handle.
|
921
921
|
"""
|
922
922
|
# Handle all uncaught exceptions in this broad try/except block
|
923
923
|
try:
|
@@ -1109,8 +1109,9 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
|
|
1109
1109
|
Fetch a remote folder via tar+sftp+tar
|
1110
1110
|
|
1111
1111
|
Arguments:
|
1112
|
-
|
1113
|
-
`SlurmJob` object (needed for
|
1112
|
+
jobs:
|
1113
|
+
List of `SlurmJob` object (needed for their prefix-related
|
1114
|
+
attributes).
|
1114
1115
|
"""
|
1115
1116
|
|
1116
1117
|
# Check that the subfolder is unique
|
@@ -524,8 +524,7 @@ def execute_tasks(
|
|
524
524
|
|
525
525
|
**Note:** At the end of each task, write current metadata to
|
526
526
|
`workflow_dir_local / METADATA_FILENAME`, so that they can be read as part
|
527
|
-
of the
|
528
|
-
endpoint](../../api/v1/job/#fractal_server.app.routes.api.v1.job.get_job).
|
527
|
+
of the `get_job` endpoint.
|
529
528
|
|
530
529
|
Arguments:
|
531
530
|
executor:
|
@@ -44,10 +44,6 @@ def _process_workflow(
|
|
44
44
|
Internal processing routine
|
45
45
|
|
46
46
|
Schedules the workflow using a `FractalThreadPoolExecutor`.
|
47
|
-
|
48
|
-
Cf.
|
49
|
-
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
50
|
-
for the call signature.
|
51
47
|
"""
|
52
48
|
|
53
49
|
with FractalThreadPoolExecutor() as executor:
|
@@ -55,9 +55,6 @@ def _process_workflow(
|
|
55
55
|
workflow working dir and user to impersonate. It then schedules the
|
56
56
|
workflow tasks and returns the new dataset attributes
|
57
57
|
|
58
|
-
Cf.
|
59
|
-
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
60
|
-
|
61
58
|
Returns:
|
62
59
|
new_dataset_attributes:
|
63
60
|
"""
|
@@ -115,9 +112,6 @@ async def process_workflow(
|
|
115
112
|
) -> dict:
|
116
113
|
"""
|
117
114
|
Process workflow (SLURM backend public interface)
|
118
|
-
|
119
|
-
Cf.
|
120
|
-
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
121
115
|
"""
|
122
116
|
|
123
117
|
# Set values of first_task_index and last_task_index
|
@@ -51,9 +51,6 @@ def _process_workflow(
|
|
51
51
|
workflow working dir and user to impersonate. It then schedules the
|
52
52
|
workflow tasks and returns the new dataset attributes
|
53
53
|
|
54
|
-
Cf.
|
55
|
-
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
56
|
-
|
57
54
|
Returns:
|
58
55
|
new_dataset_attributes:
|
59
56
|
"""
|
@@ -106,10 +103,7 @@ async def process_workflow(
|
|
106
103
|
worker_init: Optional[str] = None,
|
107
104
|
) -> dict:
|
108
105
|
"""
|
109
|
-
Process workflow (SLURM backend public interface)
|
110
|
-
|
111
|
-
Cf.
|
112
|
-
[process_workflow][fractal_server.app.runner.v2._local.process_workflow]
|
106
|
+
Process workflow (SLURM backend public interface).
|
113
107
|
"""
|
114
108
|
|
115
109
|
# Set values of first_task_index and last_task_index
|
@@ -31,14 +31,12 @@ class UserSettingsRead(BaseModel):
|
|
31
31
|
ssh_jobs_dir: Optional[str] = None
|
32
32
|
slurm_user: Optional[str] = None
|
33
33
|
slurm_accounts: list[str]
|
34
|
-
cache_dir: Optional[str] = None
|
35
34
|
project_dir: Optional[str] = None
|
36
35
|
|
37
36
|
|
38
37
|
class UserSettingsReadStrict(BaseModel):
|
39
38
|
slurm_user: Optional[str] = None
|
40
39
|
slurm_accounts: list[str]
|
41
|
-
cache_dir: Optional[str] = None
|
42
40
|
ssh_username: Optional[str] = None
|
43
41
|
project_dir: Optional[str] = None
|
44
42
|
|
@@ -55,7 +53,6 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
|
|
55
53
|
ssh_jobs_dir: Optional[str] = None
|
56
54
|
slurm_user: Optional[str] = None
|
57
55
|
slurm_accounts: Optional[list[StrictStr]] = None
|
58
|
-
cache_dir: Optional[str] = None
|
59
56
|
project_dir: Optional[str] = None
|
60
57
|
|
61
58
|
_ssh_host = validator("ssh_host", allow_reuse=True)(
|
@@ -87,13 +84,6 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
|
|
87
84
|
value[i] = valstr(f"slurm_accounts[{i}]")(item)
|
88
85
|
return val_unique_list("slurm_accounts")(value)
|
89
86
|
|
90
|
-
@validator("cache_dir")
|
91
|
-
def cache_dir_validator(cls, value):
|
92
|
-
if value is None:
|
93
|
-
return None
|
94
|
-
validate_cmd(value)
|
95
|
-
return val_absolute_path("cache_dir")(value)
|
96
|
-
|
97
87
|
@validator("project_dir")
|
98
88
|
def project_dir_validator(cls, value):
|
99
89
|
if value is None:
|
@@ -104,15 +94,7 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
|
|
104
94
|
|
105
95
|
class UserSettingsUpdateStrict(BaseModel, extra=Extra.forbid):
|
106
96
|
slurm_accounts: Optional[list[StrictStr]] = None
|
107
|
-
cache_dir: Optional[str] = None
|
108
97
|
|
109
98
|
_slurm_accounts = validator("slurm_accounts", allow_reuse=True)(
|
110
99
|
val_unique_list("slurm_accounts")
|
111
100
|
)
|
112
|
-
|
113
|
-
@validator("cache_dir")
|
114
|
-
def cache_dir_validator(cls, value):
|
115
|
-
if value is None:
|
116
|
-
return value
|
117
|
-
validate_cmd(value)
|
118
|
-
return val_absolute_path("cache_dir")(value)
|
@@ -32,11 +32,11 @@ class SlurmSudoUserSettings(BaseModel):
|
|
32
32
|
|
33
33
|
Attributes:
|
34
34
|
slurm_user: User to be impersonated via `sudo -u`.
|
35
|
-
|
35
|
+
project_dir: Folder where `slurm_user` can write.
|
36
36
|
slurm_accounts:
|
37
37
|
List of SLURM accounts, to be used upon Fractal job submission.
|
38
38
|
"""
|
39
39
|
|
40
40
|
slurm_user: str
|
41
|
-
|
41
|
+
project_dir: str
|
42
42
|
slurm_accounts: list[str]
|
fractal_server/config.py
CHANGED
@@ -302,8 +302,7 @@ class Settings(BaseSettings):
|
|
302
302
|
"""
|
303
303
|
Logging-level threshold for logging
|
304
304
|
|
305
|
-
Only logs of with this level (or higher) will appear in the console logs
|
306
|
-
see details [here](../internals/logs/).
|
305
|
+
Only logs of with this level (or higher) will appear in the console logs.
|
307
306
|
"""
|
308
307
|
|
309
308
|
FRACTAL_LOCAL_CONFIG_FILE: Optional[Path]
|
fractal_server/logger.py
CHANGED
@@ -66,10 +66,8 @@ def set_logger(
|
|
66
66
|
|
67
67
|
* The attribute `Logger.propagate` set to `False`;
|
68
68
|
* One and only one `logging.StreamHandler` handler, with severity level set
|
69
|
-
to
|
70
|
-
|
71
|
-
and formatter set as in the `logger.LOG_FORMAT` variable from the current
|
72
|
-
module;
|
69
|
+
to `FRACTAL_LOGGING_LEVEL` and formatter set as in the `logger.LOG_FORMAT`
|
70
|
+
variable from the current module;
|
73
71
|
* One or many `logging.FileHandler` handlers, including one pointint to
|
74
72
|
`log_file_path` (if set); all these handlers have severity level set to
|
75
73
|
`logging.DEBUG`.
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"""Remove UserSettings.cache_dir
|
2
|
+
|
3
|
+
Revision ID: 316140ff7ee1
|
4
|
+
Revises: d256a7379ab8
|
5
|
+
Create Date: 2024-12-03 10:15:53.255958
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "316140ff7ee1"
|
14
|
+
down_revision = "d256a7379ab8"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("user_settings", schema=None) as batch_op:
|
22
|
+
batch_op.drop_column("cache_dir")
|
23
|
+
|
24
|
+
# ### end Alembic commands ###
|
25
|
+
|
26
|
+
|
27
|
+
def downgrade() -> None:
|
28
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
29
|
+
with op.batch_alter_table("user_settings", schema=None) as batch_op:
|
30
|
+
batch_op.add_column(
|
31
|
+
sa.Column(
|
32
|
+
"cache_dir", sa.VARCHAR(), autoincrement=False, nullable=True
|
33
|
+
)
|
34
|
+
)
|
35
|
+
|
36
|
+
# ### end Alembic commands ###
|
fractal_server/ssh/_fabric.py
CHANGED
@@ -366,10 +366,9 @@ class FractalSSH(object):
|
|
366
366
|
Transfer a file via SSH
|
367
367
|
|
368
368
|
Args:
|
369
|
-
local: Local path to file
|
370
|
-
remote: Target path on remote host
|
371
|
-
|
372
|
-
logger_name: Name of the logger
|
369
|
+
local: Local path to file.
|
370
|
+
remote: Target path on remote host.
|
371
|
+
lock_timeout: Timeout for lock acquisition (overrides default).
|
373
372
|
"""
|
374
373
|
try:
|
375
374
|
self.logger.info(
|
@@ -407,10 +406,9 @@ class FractalSSH(object):
|
|
407
406
|
Transfer a file via SSH
|
408
407
|
|
409
408
|
Args:
|
410
|
-
local: Local path to file
|
411
|
-
remote: Target path on remote host
|
412
|
-
|
413
|
-
lock_timeout:
|
409
|
+
local: Local path to file.
|
410
|
+
remote: Target path on remote host.
|
411
|
+
lock_timeout: Timeout for lock acquisition (overrides default).
|
414
412
|
"""
|
415
413
|
try:
|
416
414
|
prefix = "[fetch_file] "
|
@@ -499,9 +497,9 @@ class FractalSSH(object):
|
|
499
497
|
Open a remote file via SFTP and write it.
|
500
498
|
|
501
499
|
Args:
|
502
|
-
path: Absolute path
|
503
|
-
|
504
|
-
lock_timeout:
|
500
|
+
path: Absolute path of remote file.
|
501
|
+
content: Contents to be written to file.
|
502
|
+
lock_timeout: Timeout for lock acquisition (overrides default).
|
505
503
|
"""
|
506
504
|
self.logger.info(f"START writing to remote file {path}.")
|
507
505
|
actual_lock_timeout = self.default_lock_timeout
|
@@ -24,7 +24,7 @@ def _customize_and_run_template(
|
|
24
24
|
Args:
|
25
25
|
template_filename: Filename of the template file (ends with ".sh").
|
26
26
|
replacements: Dictionary of replacements.
|
27
|
-
|
27
|
+
script_dir_local: Local folder where the script will be placed.
|
28
28
|
prefix: Prefix for the script filename.
|
29
29
|
fractal_ssh: FractalSSH object
|
30
30
|
script_dir_remote: Remote scripts directory
|
@@ -69,7 +69,6 @@ def collect_ssh(
|
|
69
69
|
)
|
70
70
|
|
71
71
|
with next(get_sync_db()) as db:
|
72
|
-
|
73
72
|
# Get main objects from db
|
74
73
|
activity = db.get(TaskGroupActivityV2, task_group_activity_id)
|
75
74
|
task_group = db.get(TaskGroupV2, task_group_id)
|
@@ -117,6 +116,25 @@ def collect_ssh(
|
|
117
116
|
return
|
118
117
|
|
119
118
|
try:
|
119
|
+
script_dir_remote = (
|
120
|
+
Path(task_group.path) / SCRIPTS_SUBFOLDER
|
121
|
+
).as_posix()
|
122
|
+
# Create remote `task_group.path` and `script_dir_remote`
|
123
|
+
# folders (note that because of `parents=True` we are in
|
124
|
+
# the `no error if existing, make parent directories as
|
125
|
+
# needed` scenario for `mkdir`)
|
126
|
+
fractal_ssh.mkdir(folder=task_group.path, parents=True)
|
127
|
+
fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
|
128
|
+
|
129
|
+
# Copy wheel file into task group path
|
130
|
+
if task_group.wheel_path:
|
131
|
+
new_wheel_path = _copy_wheel_file_ssh(
|
132
|
+
task_group=task_group,
|
133
|
+
fractal_ssh=fractal_ssh,
|
134
|
+
logger_name=LOGGER_NAME,
|
135
|
+
)
|
136
|
+
task_group.wheel_path = new_wheel_path
|
137
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
120
138
|
|
121
139
|
# Prepare replacements for templates
|
122
140
|
replacements = get_collection_replacements(
|
@@ -127,9 +145,6 @@ def collect_ssh(
|
|
127
145
|
)
|
128
146
|
|
129
147
|
# Prepare common arguments for `_customize_and_run_template``
|
130
|
-
script_dir_remote = (
|
131
|
-
Path(task_group.path) / SCRIPTS_SUBFOLDER
|
132
|
-
).as_posix()
|
133
148
|
common_args = dict(
|
134
149
|
replacements=replacements,
|
135
150
|
script_dir_local=(
|
@@ -144,23 +159,6 @@ def collect_ssh(
|
|
144
159
|
logger_name=LOGGER_NAME,
|
145
160
|
)
|
146
161
|
|
147
|
-
# Create remote `task_group.path` and `script_dir_remote`
|
148
|
-
# folders (note that because of `parents=True` we are in
|
149
|
-
# the `no error if existing, make parent directories as
|
150
|
-
# needed` scenario for `mkdir`)
|
151
|
-
fractal_ssh.mkdir(folder=task_group.path, parents=True)
|
152
|
-
fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
|
153
|
-
|
154
|
-
# Copy wheel file into task group path
|
155
|
-
if task_group.wheel_path:
|
156
|
-
new_wheel_path = _copy_wheel_file_ssh(
|
157
|
-
task_group=task_group,
|
158
|
-
fractal_ssh=fractal_ssh,
|
159
|
-
logger_name=LOGGER_NAME,
|
160
|
-
)
|
161
|
-
task_group.wheel_path = new_wheel_path
|
162
|
-
task_group = add_commit_refresh(obj=task_group, db=db)
|
163
|
-
|
164
162
|
logger.debug("installing - START")
|
165
163
|
|
166
164
|
# Set status to ONGOING and refresh logs
|
@@ -24,9 +24,12 @@ def create_db_tasks_and_update_task_group(
|
|
24
24
|
Create a `TaskGroupV2` with N `TaskV2`s, and insert them into the database.
|
25
25
|
|
26
26
|
Arguments:
|
27
|
-
|
28
|
-
task_list:
|
29
|
-
db:
|
27
|
+
task_group_id: ID of an existing `TaskGroupV2` object.
|
28
|
+
task_list: List of `TaskCreateV2` objects to be inserted into the db.
|
29
|
+
db: Synchronous database session
|
30
|
+
|
31
|
+
Returns:
|
32
|
+
Updated `TaskGroupV2` object.
|
30
33
|
"""
|
31
34
|
actual_task_list = [
|
32
35
|
TaskV2(
|
@@ -8,10 +8,10 @@ def get_python_interpreter_v2(
|
|
8
8
|
python_version: Literal["3.9", "3.10", "3.11", "3.12"]
|
9
9
|
) -> str:
|
10
10
|
"""
|
11
|
-
Return the path to the
|
11
|
+
Return the path to the Python interpreter
|
12
12
|
|
13
13
|
Args:
|
14
|
-
|
14
|
+
python_version: Python version
|
15
15
|
|
16
16
|
Raises:
|
17
17
|
ValueError: If the python version requested is not available on the
|
@@ -19,9 +19,9 @@ def customize_template(
|
|
19
19
|
Customize a bash-script template and write it to disk.
|
20
20
|
|
21
21
|
Args:
|
22
|
-
|
23
|
-
|
24
|
-
|
22
|
+
template_name: Name of the template that will be customized.
|
23
|
+
replacements: List of replacements for template customization.
|
24
|
+
script_path: Local path where the customized template will be written.
|
25
25
|
"""
|
26
26
|
# Read template
|
27
27
|
template_path = TEMPLATES_DIR / template_name
|
@@ -1,4 +1,4 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=NSsP8CCnskjO7TQm83nfwPJv47-cRQJECiDBsOXPNyU,22
|
2
2
|
fractal_server/__main__.py,sha256=dEkCfzLLQrIlxsGC-HBfoR-RBMWnJDgNrxYTyzmE9c0,6146
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -6,8 +6,8 @@ fractal_server/app/db/__init__.py,sha256=wup2wcOkyOh8Vd0Xm76PZn_naxeMqaL4eF8DHHX
|
|
6
6
|
fractal_server/app/models/__init__.py,sha256=aG7mf1zZbsgzDSp7GHEcZhdjHfW3TGPOLCI8MrvYhPw,500
|
7
7
|
fractal_server/app/models/linkusergroup.py,sha256=LWTUfhH2uAnn_4moK7QdRUIHWtpw-hPZuW-5jClv_OE,610
|
8
8
|
fractal_server/app/models/linkuserproject.py,sha256=eQaourbGRshvlMVlKzLYJKHEjfsW1CbWws9yW4eHXhA,567
|
9
|
-
fractal_server/app/models/security.py,sha256=
|
10
|
-
fractal_server/app/models/user_settings.py,sha256=
|
9
|
+
fractal_server/app/models/security.py,sha256=kLvarGwG1CxvtbpV2HkkOobzHU5Ia0PHyNzHghKSEx4,3751
|
10
|
+
fractal_server/app/models/user_settings.py,sha256=Y-ZV-uZAFLZqXxy8c5_Qeh_F7zQuZDWOgLpU6Zs6iqU,1316
|
11
11
|
fractal_server/app/models/v1/__init__.py,sha256=hUI7dEbPaiZGN0IbHW4RSmSicyvtn_xeuevoX7zvUwI,466
|
12
12
|
fractal_server/app/models/v1/dataset.py,sha256=99GDgt7njx8yYQApkImqp_7bHA5HH3ElvbR6Oyj9kVI,2017
|
13
13
|
fractal_server/app/models/v1/job.py,sha256=QLGXcWdVRHaUHQNDapYYlLpEfw4K7QyD8TmcwhrWw2o,3304
|
@@ -37,28 +37,28 @@ fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0
|
|
37
37
|
fractal_server/app/routes/api/v1/_aux_functions.py,sha256=P9Q48thGH95w0h5cacYoibxqgiiLW4oqZ8rNJ2LIISY,13219
|
38
38
|
fractal_server/app/routes/api/v1/dataset.py,sha256=KVfKdp-bT8eB14kCjTSmpji4a2IPIHxGID8L10h3Wac,17282
|
39
39
|
fractal_server/app/routes/api/v1/job.py,sha256=0jGxvu0xNQnWuov2qnoo9yE7Oat37XbcVn4Ute-UsiE,5370
|
40
|
-
fractal_server/app/routes/api/v1/project.py,sha256=
|
40
|
+
fractal_server/app/routes/api/v1/project.py,sha256=3NsdNXLIsE8QiNgKP1Kp1-B0zYG0Zi5HKBzWA0LjlQg,15551
|
41
41
|
fractal_server/app/routes/api/v1/task.py,sha256=eW89nMCjpD4G6tHXDo2qGBKqWaPirjH6M3hpdJQhfa0,6528
|
42
42
|
fractal_server/app/routes/api/v1/task_collection.py,sha256=5EMh3yhS1Z4x25kp5Iaxalrf7RgJh-XD1nBjrFvgwsg,9072
|
43
43
|
fractal_server/app/routes/api/v1/workflow.py,sha256=2T93DuEnSshaDCue-JPmjuvGCtbk6lt9pFMuPt783t8,11217
|
44
44
|
fractal_server/app/routes/api/v1/workflowtask.py,sha256=OYYConwJbmNULDw5I3T-UbSJKrbbBiAHbbBeVcpoFKQ,5785
|
45
45
|
fractal_server/app/routes/api/v2/__init__.py,sha256=w4c9WzagaVV5d4TWBX5buu5ENk8jf3YftMQYmhavz9Q,2172
|
46
46
|
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=mb4R_qqFxeW0LAis2QJIIfVx8Sydv1jTYaRIMsMxnIk,11720
|
47
|
-
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=
|
47
|
+
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=c8eqPXdMhc3nIixX50B1Ka5n7LgbOZm2JbEs7lICQ04,6767
|
48
48
|
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=PuapLtvSk9yhBAsKNEp1w2oagOMr0YZTo247-CU3hdM,11008
|
49
49
|
fractal_server/app/routes/api/v2/dataset.py,sha256=Y6uZz--YSEGgnPYu05rZ9sr1Ug08bNl2v1h3VeApBe8,9441
|
50
50
|
fractal_server/app/routes/api/v2/images.py,sha256=JR1rR6qEs81nacjriOXAOBQjAbCXF4Ew7M7mkWdxBU0,7920
|
51
51
|
fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
|
52
52
|
fractal_server/app/routes/api/v2/project.py,sha256=eWYFJ7F2ZYQcpi-_n-rhPF-Q4gJhzYBsVGYFhHZZXAE,6653
|
53
53
|
fractal_server/app/routes/api/v2/status.py,sha256=6N9DSZ4iFqbZImorWfEAPoyoFUgEruo4Hweqo0x0xXU,6435
|
54
|
-
fractal_server/app/routes/api/v2/submit.py,sha256=
|
54
|
+
fractal_server/app/routes/api/v2/submit.py,sha256=cQwt0oK8xjHMGA_bQrw4Um8jd_aCvgmWfoqSQDh12hQ,8246
|
55
55
|
fractal_server/app/routes/api/v2/task.py,sha256=K0ik33t7vL8BAK5S7fqyJDNdRK4stGqb_73bSa8tvPE,7159
|
56
56
|
fractal_server/app/routes/api/v2/task_collection.py,sha256=TIr1IPO15TX6CZIQ_LPc0zFtTltuleDISAdMVaVQxfw,9633
|
57
57
|
fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
|
58
58
|
fractal_server/app/routes/api/v2/task_group.py,sha256=4o2N0z7jK7VUVlJZMM4GveCCc4JKxYJx9-PMmsYIlJQ,8256
|
59
59
|
fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
|
60
60
|
fractal_server/app/routes/api/v2/workflow.py,sha256=vjCNRzMHaAB4YWbAEWGlELHXDN4GjtE26IkIiB15RGM,8682
|
61
|
-
fractal_server/app/routes/api/v2/workflow_import.py,sha256
|
61
|
+
fractal_server/app/routes/api/v2/workflow_import.py,sha256=-7Er3FWGF_1xI2qHFO9gfLVQAok5bojd7mbzQxa9Ofw,10858
|
62
62
|
fractal_server/app/routes/api/v2/workflowtask.py,sha256=ciHTwXXFiFnMF7ZpJ3Xs0q6YfuZrFvIjqndlzAEdZpo,6969
|
63
63
|
fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
|
64
64
|
fractal_server/app/routes/auth/_aux_auth.py,sha256=ifkNocTYatBSMYGwiR14qohmvR9SfMldceiEj6uJBrU,4783
|
@@ -82,12 +82,12 @@ fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrG
|
|
82
82
|
fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
83
83
|
fractal_server/app/runner/executors/slurm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
84
84
|
fractal_server/app/runner/executors/slurm/_batching.py,sha256=3mfeFuYm3UA4EXh4VWuqZTF-dcINECZgTHoPOaOszDo,8840
|
85
|
-
fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=
|
85
|
+
fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=P0TDfIFf07h0hIhVNZUcY3t5vgdjptU-2T0uC_ZBEB4,15688
|
86
86
|
fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
|
87
87
|
fractal_server/app/runner/executors/slurm/ssh/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
|
88
88
|
fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py,sha256=bKo5Ja0IGxJWpPWyh9dN0AG-PwzTDZzD5LyaEHB3YU4,3742
|
89
89
|
fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py,sha256=rwlqZzoGo4SAb4nSlFjsQJdaCgfM1J6YGcjb8yYxlqc,4506
|
90
|
-
fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=
|
90
|
+
fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=U2-tNE_5ECHFIoXjEvBlaSXKaIf-1IXZlDs0c34mab8,54110
|
91
91
|
fractal_server/app/runner/executors/slurm/sudo/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
|
92
92
|
fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py,sha256=wAgwpVcr6JIslKHOuS0FhRa_6T1KCManyRJqA-fifzw,1909
|
93
93
|
fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py,sha256=z5LlhaiqAb8pHsF1WwdzXN39C5anQmwjo1rSQgtRAYE,4422
|
@@ -101,7 +101,7 @@ fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2X
|
|
101
101
|
fractal_server/app/runner/shutdown.py,sha256=I_o2iYKJwzku0L3E85ETjrve3QPECygR5xhhsAo5huM,2910
|
102
102
|
fractal_server/app/runner/task_files.py,sha256=sd_MpJ01C8c9QTO8GzGMidFGdlq_hXX_ARDRhd_YMnI,3762
|
103
103
|
fractal_server/app/runner/v1/__init__.py,sha256=VvJFk4agX2X3fQfDcoNmOB2ouNCaQU7dAqaFmpcdP8I,15063
|
104
|
-
fractal_server/app/runner/v1/_common.py,sha256=
|
104
|
+
fractal_server/app/runner/v1/_common.py,sha256=bHM_fU0ubwoxiv0V3uUnb0uVBPpJ_rJaUaQGqACwHS4,21549
|
105
105
|
fractal_server/app/runner/v1/_local/__init__.py,sha256=KlSML4LqF4p1IfhSd8tAkiu3aeDzifeanuNXjATDsYE,6929
|
106
106
|
fractal_server/app/runner/v1/_local/_local_config.py,sha256=hM7SPxR07luXPcXdrWXRpEB2uOyjSSRUdqW3QBKJn9c,3147
|
107
107
|
fractal_server/app/runner/v1/_local/_submit_setup.py,sha256=XyBDPb4IYdKEEnzLYdcYteIHWVWofJxKMmQCyRkn5Bc,1509
|
@@ -112,7 +112,7 @@ fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6pQNNx997bLIfLp0g
|
|
112
112
|
fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
|
113
113
|
fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
|
114
114
|
fractal_server/app/runner/v2/__init__.py,sha256=4RTlY34bOqgmzqVHXER0-lpnKaG15boMgDyf1L40JWg,17362
|
115
|
-
fractal_server/app/runner/v2/_local/__init__.py,sha256=
|
115
|
+
fractal_server/app/runner/v2/_local/__init__.py,sha256=nTQrdPaxsWvUAhgq-1hMq8f5W-LwUlaapyjOdQ7BfQ8,5857
|
116
116
|
fractal_server/app/runner/v2/_local/_local_config.py,sha256=9oi209Dlp35ANfxb_DISqmMKKc6DPaMsmYVWbZLseME,3630
|
117
117
|
fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=MucNOo8Er0F5ZIwH7CnTeXgnFMc6d3pKPkv563QNVi0,1630
|
118
118
|
fractal_server/app/runner/v2/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
|
@@ -122,9 +122,9 @@ fractal_server/app/runner/v2/_local_experimental/_submit_setup.py,sha256=we7r-sQ
|
|
122
122
|
fractal_server/app/runner/v2/_local_experimental/executor.py,sha256=plvEqqdcXOSohYsQoykYlyDwCING7OO5h-4XAZtwdPs,5503
|
123
123
|
fractal_server/app/runner/v2/_slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
124
124
|
fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py,sha256=UdkoFF0HF_TdKbay-d9bjkxT2ltcOE5i8H_FoOu64HU,6202
|
125
|
-
fractal_server/app/runner/v2/_slurm_ssh/__init__.py,sha256=
|
125
|
+
fractal_server/app/runner/v2/_slurm_ssh/__init__.py,sha256=q9XL6q6s77-bSudRY1Vg5_NcHXvDEZZJ8PhcrPG28uE,4358
|
126
126
|
fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py,sha256=a5_FDPH_yxYmrjAjMRLgh_Y4DSG3mRslCLQodGM3-t4,2838
|
127
|
-
fractal_server/app/runner/v2/_slurm_sudo/__init__.py,sha256=
|
127
|
+
fractal_server/app/runner/v2/_slurm_sudo/__init__.py,sha256=cK0MXXnIqRBgKsv37VqHe2poQfrFYyUXDHOl5YXrXrU,4145
|
128
128
|
fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py,sha256=a5_FDPH_yxYmrjAjMRLgh_Y4DSG3mRslCLQodGM3-t4,2838
|
129
129
|
fractal_server/app/runner/v2/deduplicate_list.py,sha256=-imwO7OB7ATADEnqVbTElUwoY0YIJCTf_SbWJNN9OZg,639
|
130
130
|
fractal_server/app/runner/v2/handle_failed_job.py,sha256=fipRJT5Y8UY0US4bXUX-4ORTAQ1AetZcCAOVCjDO3_c,5202
|
@@ -138,7 +138,7 @@ fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMo
|
|
138
138
|
fractal_server/app/schemas/_validators.py,sha256=T5EswIJAJRvawfzqWtPcN2INAfiBXyE4m0iwQm4ht-0,3149
|
139
139
|
fractal_server/app/schemas/user.py,sha256=icjox9gK_invW44Nh_L4CvqfRa92qghyQhmevyg09nQ,2243
|
140
140
|
fractal_server/app/schemas/user_group.py,sha256=t30Kd07PY43G_AqFDb8vjdInTeLeU9WvFZDx8fVLPSI,1750
|
141
|
-
fractal_server/app/schemas/user_settings.py,sha256=
|
141
|
+
fractal_server/app/schemas/user_settings.py,sha256=re7ZFS8BLjR9MdIoZNRt2DNPc7znCgDpEYFKr8ZsAZg,2980
|
142
142
|
fractal_server/app/schemas/v1/__init__.py,sha256=CrBGgBhoemCvmZ70ZUchM-jfVAICnoa7AjZBAtL2UB0,1852
|
143
143
|
fractal_server/app/schemas/v1/applyworkflow.py,sha256=dYArxQAOBdUIEXX_Ejz8b9fBhEYu1nMm6b_Z6_P6TgA,4052
|
144
144
|
fractal_server/app/schemas/v1/dataset.py,sha256=DWFCxZjApcKt2M6UJMK0tmejXwUT09vjUULf2D7Y-f0,3293
|
@@ -162,15 +162,15 @@ fractal_server/app/schemas/v2/task_group.py,sha256=fSjdLbClrpmrPj5hFZMu9DoJW4Y33
|
|
162
162
|
fractal_server/app/schemas/v2/workflow.py,sha256=-KWvXnbHBFA3pj5n7mfSyLKJQSqkJmoziIEe7mpLl3M,1875
|
163
163
|
fractal_server/app/schemas/v2/workflowtask.py,sha256=vDdMktYbHeYBgB5OuWSv6wRPRXWqvetkeqQ7IC5YtfA,5751
|
164
164
|
fractal_server/app/security/__init__.py,sha256=MlWVrLFPj9M2Gug-k8yATM-Cw066RugVU4KK6kMRbnQ,13019
|
165
|
-
fractal_server/app/user_settings.py,sha256=
|
166
|
-
fractal_server/config.py,sha256=
|
165
|
+
fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
|
166
|
+
fractal_server/config.py,sha256=wRWJqyEeH4j2puH-fGlCYKLoKFh9pzRsQkS6q1VtO9M,23173
|
167
167
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
168
168
|
fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
|
169
169
|
fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
|
170
170
|
fractal_server/images/__init__.py,sha256=xO6jTLE4EZKO6cTDdJsBmK9cdeh9hFTaSbSuWgQg7y4,196
|
171
171
|
fractal_server/images/models.py,sha256=UlWazUOFQtpS3pZuROjcJXviG_Ai453jqUDHdzuvD5w,4170
|
172
172
|
fractal_server/images/tools.py,sha256=gxeniYy4Z-cp_ToK2LHPJUTVVUUrdpogYdcBUvBuLiY,2209
|
173
|
-
fractal_server/logger.py,sha256=
|
173
|
+
fractal_server/logger.py,sha256=zwg_AjIHkNP0ruciXjm5lI5UFP3n6tMHullsM9lDjz4,5039
|
174
174
|
fractal_server/main.py,sha256=gStLT9Du5QMpc9SyvRvtKU21EKwp-dG4HL3zGHzE06A,4908
|
175
175
|
fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
|
176
176
|
fractal_server/migrations/env.py,sha256=9t_OeKVlhM8WRcukmTrLbWNup-imiBGP_9xNgwCbtpI,2730
|
@@ -179,6 +179,7 @@ fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhR
|
|
179
179
|
fractal_server/migrations/versions/034a469ec2eb_task_groups.py,sha256=vrPhC8hfFu1c4HmLHNZyCuqEfecFD8-bWc49bXMNes0,6199
|
180
180
|
fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py,sha256=-BSS9AFTPcu3gYC-sYbawSy4MWQQx8TfMb5BW5EBKmQ,1450
|
181
181
|
fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py,sha256=Q1Gj1cJ0UrdLBJ5AXfFK9QpxTtmcv-4Z3NEGDnxOme4,961
|
182
|
+
fractal_server/migrations/versions/316140ff7ee1_remove_usersettings_cache_dir.py,sha256=lANgTox0rz459_yo1Rw7fGCT1qw5sUCUXTLUMc_Bzf8,911
|
182
183
|
fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
|
183
184
|
fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py,sha256=Mob8McGYAcmgvrseyyYOa54E6Gsgr-4SiGdC-r9O4_A,1157
|
184
185
|
fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py,sha256=5ROUgcoZOdjf8kMt6cxuvPhzHmV6xaCxvZEbhUEyZM4,3271
|
@@ -203,7 +204,7 @@ fractal_server/migrations/versions/efa89c30e0a4_add_project_timestamp_created.py
|
|
203
204
|
fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
|
204
205
|
fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
205
206
|
fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
|
206
|
-
fractal_server/ssh/_fabric.py,sha256=
|
207
|
+
fractal_server/ssh/_fabric.py,sha256=lNy4IX1I4We6VoWa4Bz4fUPuApLMSoejpyE6I3jDZeM,22869
|
207
208
|
fractal_server/string_tools.py,sha256=XtMNsr5R7GmgzmFi68zkKMedHs8vjGoVMMCXqWhIk9k,2568
|
208
209
|
fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
|
209
210
|
fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39NHE8,23
|
@@ -221,8 +222,8 @@ fractal_server/tasks/v2/local/collect.py,sha256=BbXSgxExPUxFxcmBs3ejwWzRae-sQgfb
|
|
221
222
|
fractal_server/tasks/v2/local/deactivate.py,sha256=XR1nvJY3mKCRqwPwV79rVaQmtb3J83KdmJKjTOHD-cU,9250
|
222
223
|
fractal_server/tasks/v2/local/reactivate.py,sha256=R3rArAzUpMGf6xa3dGVwwXHW9WVDi5ia28AFisZsqNc,6112
|
223
224
|
fractal_server/tasks/v2/ssh/__init__.py,sha256=aSQbVi6Ummt9QzcSLWNmSqYjfdxrn9ROmqgH6bDpI7k,135
|
224
|
-
fractal_server/tasks/v2/ssh/_utils.py,sha256=
|
225
|
-
fractal_server/tasks/v2/ssh/collect.py,sha256=
|
225
|
+
fractal_server/tasks/v2/ssh/_utils.py,sha256=LjaEYVUJDChilu3YuhxuGWYRNnVJ_zqNE9SDHdRTIHY,2824
|
226
|
+
fractal_server/tasks/v2/ssh/collect.py,sha256=FkTfyhdwAp4qa4W_dqjT0CmuDMFuCBSOYjg_y1Kq2Bs,12939
|
226
227
|
fractal_server/tasks/v2/ssh/deactivate.py,sha256=Ffk_UuQSBUBNBCiviuKNhEUGyZPQa4_erJKFdwgMcE8,10616
|
227
228
|
fractal_server/tasks/v2/ssh/reactivate.py,sha256=jdO8iyzavzSVPcOpIZrYSEkGPYTvz5XJ5h_5-nz9yzA,7896
|
228
229
|
fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
|
@@ -232,15 +233,15 @@ fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=84NGHlg6JIbrQktgGKyfGsggP
|
|
232
233
|
fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
|
233
234
|
fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=n9C8w76YraLbeTe7NhuLzvAQiJCm_akL3Mc3EMfxrHo,1007
|
234
235
|
fractal_server/tasks/v2/utils_background.py,sha256=tikXhggqxdU7EnKdx2co3UwinlDazEjfOPQOXtO58zs,4240
|
235
|
-
fractal_server/tasks/v2/utils_database.py,sha256=
|
236
|
+
fractal_server/tasks/v2/utils_database.py,sha256=g5m3sNPZKQ3AjflhPURDlAppQcIS5T1A8a1macdswBA,1268
|
236
237
|
fractal_server/tasks/v2/utils_package_names.py,sha256=RDg__xrvQs4ieeVzmVdMcEh95vGQYrv9Hfal-5EDBM8,2393
|
237
|
-
fractal_server/tasks/v2/utils_python_interpreter.py,sha256
|
238
|
-
fractal_server/tasks/v2/utils_templates.py,sha256=
|
238
|
+
fractal_server/tasks/v2/utils_python_interpreter.py,sha256=5_wrlrTqXyo1YuLZvAW9hrSoh5MyLOzdPVUlUwM7uDQ,955
|
239
|
+
fractal_server/tasks/v2/utils_templates.py,sha256=MS8zu24qimJSktZaHruPxkwIl81ZoUnIVGtnMHS4Y3o,2876
|
239
240
|
fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
|
240
241
|
fractal_server/utils.py,sha256=utvmBx8K9I8hRWFquxna2pBaOqe0JifDL_NVPmihEJI,3525
|
241
242
|
fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
|
242
|
-
fractal_server-2.9.
|
243
|
-
fractal_server-2.9.
|
244
|
-
fractal_server-2.9.
|
245
|
-
fractal_server-2.9.
|
246
|
-
fractal_server-2.9.
|
243
|
+
fractal_server-2.9.2.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
244
|
+
fractal_server-2.9.2.dist-info/METADATA,sha256=SS4_FSV8br7rCqs9Bho2_pFmB_Tvg_4Mpp-Au4weqf8,4543
|
245
|
+
fractal_server-2.9.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
246
|
+
fractal_server-2.9.2.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
247
|
+
fractal_server-2.9.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|