fractal-server 2.14.0a2__py3-none-any.whl → 2.14.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/history/image_updates.py +2 -1
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/api/v2/history.py +21 -10
- fractal_server/app/routes/api/v2/project.py +1 -1
- fractal_server/app/runner/executors/base_runner.py +2 -1
- fractal_server/app/runner/executors/local/_submit_setup.py +5 -13
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +1 -1
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +1 -1
- fractal_server/app/runner/executors/slurm_common/remote.py +1 -1
- fractal_server/app/runner/v2/__init__.py +2 -2
- fractal_server/app/runner/v2/_slurm_ssh.py +1 -1
- fractal_server/app/runner/v2/_slurm_sudo.py +1 -1
- fractal_server/app/runner/v2/runner.py +3 -2
- fractal_server/app/runner/v2/runner_functions.py +21 -47
- fractal_server/app/schemas/v2/manifest.py +1 -1
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/config.py +2 -2
- fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a3.dist-info}/METADATA +1 -1
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a3.dist-info}/RECORD +24 -24
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a3.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a3.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a3.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.14.
|
1
|
+
__VERSION__ = "2.14.0a3"
|
@@ -83,7 +83,8 @@ def update_single_image_logfile(
|
|
83
83
|
) -> None:
|
84
84
|
|
85
85
|
logger.debug(
|
86
|
-
|
86
|
+
"[update_single_image_logfile] "
|
87
|
+
f"{history_item_id=}, {logfile=}, {zarr_url=}"
|
87
88
|
)
|
88
89
|
|
89
90
|
with next(get_sync_db()) as db:
|
@@ -68,7 +68,7 @@ async def query_tasks(
|
|
68
68
|
db: AsyncSession = Depends(get_async_db),
|
69
69
|
) -> list[TaskV2Info]:
|
70
70
|
"""
|
71
|
-
Query `TaskV2` table and get
|
71
|
+
Query `TaskV2` table and get information about related items
|
72
72
|
(WorkflowV2s and ProjectV2s)
|
73
73
|
|
74
74
|
Args:
|
@@ -140,20 +140,27 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
140
140
|
.group_by(ImageStatus.parameters_hash)
|
141
141
|
)
|
142
142
|
res = await db.execute(stm)
|
143
|
-
|
143
|
+
hash_to_statuses = res.all()
|
144
144
|
|
145
|
-
|
146
|
-
for
|
147
|
-
|
148
|
-
|
145
|
+
subsets = []
|
146
|
+
for parameters_hash, statuses in hash_to_statuses:
|
147
|
+
# Get the oldest HistoryItemV2 matching with `parameters_hash`
|
148
|
+
stm = (
|
149
|
+
select(HistoryItemV2)
|
149
150
|
.where(HistoryItemV2.workflowtask_id == workflowtask_id)
|
150
151
|
.where(HistoryItemV2.dataset_id == dataset_id)
|
151
|
-
.where(HistoryItemV2.parameters_hash ==
|
152
|
+
.where(HistoryItemV2.parameters_hash == parameters_hash)
|
153
|
+
.order_by(HistoryItemV2.timestamp_started)
|
154
|
+
.limit(1)
|
152
155
|
)
|
153
|
-
|
156
|
+
res = await db.execute(stm)
|
157
|
+
oldest_history_item = res.scalar_one()
|
158
|
+
|
159
|
+
subsets.append(
|
154
160
|
{
|
155
|
-
"
|
156
|
-
"
|
161
|
+
"_timestamp": oldest_history_item.timestamp_started,
|
162
|
+
"workflowtask_dump": oldest_history_item.workflowtask_dump,
|
163
|
+
"parameters_hash": parameters_hash,
|
157
164
|
"info": {
|
158
165
|
"num_done_images": statuses.count(
|
159
166
|
HistoryItemImageStatus.DONE
|
@@ -168,7 +175,11 @@ async def get_per_workflowtask_subsets_aggregated_info(
|
|
168
175
|
}
|
169
176
|
)
|
170
177
|
|
171
|
-
|
178
|
+
# Use `_timestamp` values for sorting, and then drop them from the response
|
179
|
+
sorted_results = sorted(subsets, key=lambda obj: obj["_timestamp"])
|
180
|
+
[item.pop("_timestamp") for item in sorted_results]
|
181
|
+
|
182
|
+
return JSONResponse(content=sorted_results, status_code=200)
|
172
183
|
|
173
184
|
|
174
185
|
@router.get("/project/{project_id}/status/images/")
|
@@ -106,7 +106,8 @@ class BaseRunner(object):
|
|
106
106
|
)
|
107
107
|
if _COMPONENT_KEY_ not in single_kwargs.keys():
|
108
108
|
raise ValueError(
|
109
|
-
f"No '{_COMPONENT_KEY_}' key
|
109
|
+
f"No '{_COMPONENT_KEY_}' key "
|
110
|
+
f"in {list(single_kwargs.keys())}"
|
110
111
|
)
|
111
112
|
if not in_compound_task:
|
112
113
|
zarr_urls = [kwargs["zarr_url"] for kwargs in list_parameters]
|
@@ -21,18 +21,10 @@ def _local_submit_setup(
|
|
21
21
|
FIXME
|
22
22
|
|
23
23
|
Arguments:
|
24
|
-
wftask:
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
workflow_dir_remote:
|
29
|
-
Not used in this function.
|
30
|
-
|
31
|
-
Returns:
|
32
|
-
submit_setup_dict:
|
33
|
-
A dictionary that will be passed on to
|
34
|
-
`FractalThreadPoolExecutor.submit` and
|
35
|
-
`FractalThreadPoolExecutor.map`, so as to set extra options.
|
24
|
+
wftask: WorkflowTask for which the configuration is to be assembled
|
25
|
+
root_dir_local:
|
26
|
+
root_dir_rempote: Not used in this function.
|
27
|
+
which_type: Whether it is a parallel or non-parallel task.
|
36
28
|
"""
|
37
29
|
|
38
30
|
local_backend_config = get_local_backend_config(
|
@@ -43,7 +35,7 @@ def _local_submit_setup(
|
|
43
35
|
# Get TaskFiles object
|
44
36
|
task_files = TaskFiles(
|
45
37
|
root_dir_local=root_dir_local,
|
46
|
-
root_dir_remote=
|
38
|
+
root_dir_remote=root_dir_local,
|
47
39
|
task_order=wftask.order,
|
48
40
|
task_name=wftask.task.name,
|
49
41
|
)
|
@@ -213,7 +213,7 @@ class SlurmConfig(BaseModel):
|
|
213
213
|
expected file content are defined in
|
214
214
|
[`SlurmConfigFile`](./#fractal_server.app.runner._slurm._slurm_config.SlurmConfigFile)).
|
215
215
|
|
216
|
-
Part of the attributes map directly to some of the SLURM
|
216
|
+
Part of the attributes map directly to some of the SLURM attributes (see
|
217
217
|
https://slurm.schedmd.com/sbatch.html), e.g. `partition`. Other attributes
|
218
218
|
are metaparameters which are needed in fractal-server to combine multiple
|
219
219
|
tasks in the same SLURM job (e.g. `parallel_tasks_per_job` or
|
@@ -19,7 +19,7 @@ def get_slurm_config(
|
|
19
19
|
Prepare a `SlurmConfig` configuration object
|
20
20
|
|
21
21
|
The argument `which_type` determines whether we use `wftask.meta_parallel`
|
22
|
-
or `wftask.meta_non_parallel`. In the following
|
22
|
+
or `wftask.meta_non_parallel`. In the following description, let us assume
|
23
23
|
that `which_type="parallel"`.
|
24
24
|
|
25
25
|
The sources for `SlurmConfig` attributes, in increasing priority order, are
|
@@ -134,7 +134,7 @@ def worker(
|
|
134
134
|
_extra_import_paths = extra_import_paths.split(":")
|
135
135
|
sys.path[:0] = _extra_import_paths
|
136
136
|
|
137
|
-
# Execute the job and
|
137
|
+
# Execute the job and capture exceptions
|
138
138
|
try:
|
139
139
|
with open(in_fname, "rb") as f:
|
140
140
|
indata = f.read()
|
@@ -2,7 +2,7 @@
|
|
2
2
|
Runner backend subsystem root V2
|
3
3
|
|
4
4
|
This module is the single entry point to the runner backend subsystem V2.
|
5
|
-
Other
|
5
|
+
Other subsystems should only import this module and not its submodules or
|
6
6
|
the individual backends.
|
7
7
|
"""
|
8
8
|
import os
|
@@ -118,7 +118,7 @@ def submit_workflow(
|
|
118
118
|
)
|
119
119
|
except Exception as e:
|
120
120
|
logger.error(
|
121
|
-
f"Error
|
121
|
+
f"Error connecting to the database. Original error: {str(e)}"
|
122
122
|
)
|
123
123
|
reset_logger_handlers(logger)
|
124
124
|
return
|
@@ -127,7 +127,7 @@ def execute_tasks_v2(
|
|
127
127
|
dataset_id=dataset.id,
|
128
128
|
parameters_hash=parameters_hash,
|
129
129
|
status=HistoryItemImageStatus.SUBMITTED,
|
130
|
-
logfile=
|
130
|
+
logfile=None,
|
131
131
|
)
|
132
132
|
)
|
133
133
|
db.commit()
|
@@ -198,7 +198,8 @@ def execute_tasks_v2(
|
|
198
198
|
# Update image list
|
199
199
|
num_new_images = 0
|
200
200
|
current_task_output.check_zarr_urls_are_unique()
|
201
|
-
# FIXME: Introduce for loop over task outputs, and processe them
|
201
|
+
# FIXME: Introduce for loop over task outputs, and processe them
|
202
|
+
# sequentially
|
202
203
|
# each failure should lead to an update of the specific image status
|
203
204
|
for image_obj in current_task_output.image_list_updates:
|
204
205
|
image = image_obj.model_dump()
|
@@ -1,9 +1,7 @@
|
|
1
1
|
import functools
|
2
2
|
import logging
|
3
|
-
import traceback
|
4
3
|
from pathlib import Path
|
5
4
|
from typing import Any
|
6
|
-
from typing import Callable
|
7
5
|
from typing import Literal
|
8
6
|
from typing import Optional
|
9
7
|
|
@@ -59,38 +57,18 @@ def _cast_and_validate_InitTaskOutput(
|
|
59
57
|
)
|
60
58
|
|
61
59
|
|
62
|
-
def no_op_submit_setup_call(
|
60
|
+
def no_op_submit_setup_call(
|
61
|
+
*,
|
62
|
+
wftask: WorkflowTaskV2,
|
63
|
+
root_dir_local: Path,
|
64
|
+
which_type: Literal["non_parallel", "parallel"],
|
65
|
+
) -> dict[str, Any]:
|
63
66
|
"""
|
64
67
|
Default (no-operation) interface of submit_setup_call in V2.
|
65
68
|
"""
|
66
69
|
return {}
|
67
70
|
|
68
71
|
|
69
|
-
# Backend-specific configuration
|
70
|
-
def _get_executor_options(
|
71
|
-
*,
|
72
|
-
wftask: WorkflowTaskV2,
|
73
|
-
workflow_dir_local: Path,
|
74
|
-
workflow_dir_remote: Path,
|
75
|
-
submit_setup_call: Callable,
|
76
|
-
which_type: Literal["non_parallel", "parallel"],
|
77
|
-
) -> dict:
|
78
|
-
try:
|
79
|
-
options = submit_setup_call(
|
80
|
-
wftask=wftask,
|
81
|
-
root_dir_local=workflow_dir_local,
|
82
|
-
root_dir_remote=workflow_dir_remote,
|
83
|
-
which_type=which_type,
|
84
|
-
)
|
85
|
-
except Exception as e:
|
86
|
-
tb = "".join(traceback.format_tb(e.__traceback__))
|
87
|
-
raise RuntimeError(
|
88
|
-
f"{type(e)} error in {submit_setup_call=}\n"
|
89
|
-
f"Original traceback:\n{tb}"
|
90
|
-
)
|
91
|
-
return options
|
92
|
-
|
93
|
-
|
94
72
|
def _check_parallelization_list_size(my_list):
|
95
73
|
if len(my_list) > MAX_PARALLELIZATION_LIST_SIZE:
|
96
74
|
raise JobExecutionError(
|
@@ -109,7 +87,7 @@ def run_v2_task_non_parallel(
|
|
109
87
|
workflow_dir_local: Path,
|
110
88
|
workflow_dir_remote: Optional[Path] = None,
|
111
89
|
executor: BaseRunner,
|
112
|
-
submit_setup_call:
|
90
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
113
91
|
history_item_id: int,
|
114
92
|
) -> tuple[TaskOutput, int, dict[int, BaseException]]:
|
115
93
|
"""
|
@@ -123,11 +101,10 @@ def run_v2_task_non_parallel(
|
|
123
101
|
)
|
124
102
|
workflow_dir_remote = workflow_dir_local
|
125
103
|
|
126
|
-
executor_options =
|
104
|
+
executor_options = submit_setup_call(
|
127
105
|
wftask=wftask,
|
128
|
-
|
129
|
-
|
130
|
-
submit_setup_call=submit_setup_call,
|
106
|
+
root_dir_local=workflow_dir_local,
|
107
|
+
root_dir_remote=workflow_dir_remote,
|
131
108
|
which_type="non_parallel",
|
132
109
|
)
|
133
110
|
|
@@ -169,7 +146,7 @@ def run_v2_task_parallel(
|
|
169
146
|
executor: BaseRunner,
|
170
147
|
workflow_dir_local: Path,
|
171
148
|
workflow_dir_remote: Optional[Path] = None,
|
172
|
-
submit_setup_call:
|
149
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
173
150
|
history_item_id: int,
|
174
151
|
) -> tuple[TaskOutput, int, dict[int, BaseException]]:
|
175
152
|
|
@@ -178,11 +155,10 @@ def run_v2_task_parallel(
|
|
178
155
|
|
179
156
|
_check_parallelization_list_size(images)
|
180
157
|
|
181
|
-
executor_options =
|
158
|
+
executor_options = submit_setup_call(
|
182
159
|
wftask=wftask,
|
183
|
-
|
184
|
-
|
185
|
-
submit_setup_call=submit_setup_call,
|
160
|
+
root_dir_local=workflow_dir_local,
|
161
|
+
root_dir_remote=workflow_dir_remote,
|
186
162
|
which_type="parallel",
|
187
163
|
)
|
188
164
|
|
@@ -237,22 +213,20 @@ def run_v2_task_compound(
|
|
237
213
|
executor: BaseRunner,
|
238
214
|
workflow_dir_local: Path,
|
239
215
|
workflow_dir_remote: Optional[Path] = None,
|
240
|
-
submit_setup_call:
|
216
|
+
submit_setup_call: callable = no_op_submit_setup_call,
|
241
217
|
history_item_id: int,
|
242
218
|
) -> tuple[TaskOutput, int, dict[int, BaseException]]:
|
243
219
|
|
244
|
-
executor_options_init =
|
220
|
+
executor_options_init = submit_setup_call(
|
245
221
|
wftask=wftask,
|
246
|
-
|
247
|
-
|
248
|
-
submit_setup_call=submit_setup_call,
|
222
|
+
root_dir_local=workflow_dir_local,
|
223
|
+
root_dir_remote=workflow_dir_remote,
|
249
224
|
which_type="non_parallel",
|
250
225
|
)
|
251
|
-
executor_options_compute =
|
226
|
+
executor_options_compute = submit_setup_call(
|
252
227
|
wftask=wftask,
|
253
|
-
|
254
|
-
|
255
|
-
submit_setup_call=submit_setup_call,
|
228
|
+
root_dir_local=workflow_dir_local,
|
229
|
+
root_dir_remote=workflow_dir_remote,
|
256
230
|
which_type="parallel",
|
257
231
|
)
|
258
232
|
|
@@ -128,7 +128,7 @@ class ManifestV2(BaseModel):
|
|
128
128
|
The list of tasks, represented as specified by subclasses of the
|
129
129
|
_TaskManifestBase (a.k.a. TaskManifestType)
|
130
130
|
has_args_schemas:
|
131
|
-
`True` if the manifest
|
131
|
+
`True` if the manifest includes JSON Schemas for the arguments of
|
132
132
|
each task.
|
133
133
|
args_schema_version:
|
134
134
|
Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
|
@@ -24,7 +24,7 @@ FastAPIUsers with Barer Token and cookie transports and register local routes.
|
|
24
24
|
Then, for each OAuth client defined in the Fractal Settings configuration, it
|
25
25
|
registers the client and the relative routes.
|
26
26
|
|
27
|
-
All routes are
|
27
|
+
All routes are registered under the `auth/` prefix.
|
28
28
|
"""
|
29
29
|
import contextlib
|
30
30
|
from typing import Any
|
@@ -296,7 +296,7 @@ async def _create_first_user(
|
|
296
296
|
Private method to create the first fractal-server user
|
297
297
|
|
298
298
|
Create a user with the given default arguments and return a message with
|
299
|
-
the relevant
|
299
|
+
the relevant information. If the user already exists, for example after a
|
300
300
|
restart, it returns a message to inform that user already exists.
|
301
301
|
|
302
302
|
**WARNING**: This function is only meant to create the first user, and then
|
@@ -312,7 +312,7 @@ async def _create_first_user(
|
|
312
312
|
email: New user's email
|
313
313
|
password: New user's password
|
314
314
|
is_superuser: `True` if the new user is a superuser
|
315
|
-
is_verified: `True` if the new user is
|
315
|
+
is_verified: `True` if the new user is verified
|
316
316
|
username:
|
317
317
|
"""
|
318
318
|
function_logger = set_logger("fractal_server.create_first_user")
|
fractal_server/config.py
CHANGED
@@ -589,11 +589,11 @@ class Settings(BaseSettings):
|
|
589
589
|
"""
|
590
590
|
FRACTAL_EMAIL_SMTP_SERVER: Optional[str] = None
|
591
591
|
"""
|
592
|
-
|
592
|
+
SMTP server for the OAuth-signup emails.
|
593
593
|
"""
|
594
594
|
FRACTAL_EMAIL_SMTP_PORT: Optional[int] = None
|
595
595
|
"""
|
596
|
-
|
596
|
+
SMTP server port for the OAuth-signup emails.
|
597
597
|
"""
|
598
598
|
FRACTAL_EMAIL_INSTANCE_NAME: Optional[str] = None
|
599
599
|
"""
|
@@ -20,7 +20,7 @@ echo
|
|
20
20
|
# FIXME: only run pip-show once!
|
21
21
|
|
22
22
|
# Extract information about paths
|
23
|
-
# WARNING: this block will fail for paths which
|
23
|
+
# WARNING: this block will fail for paths which include whitespace characters
|
24
24
|
write_log "START pip show"
|
25
25
|
$VENVPYTHON -m pip show ${PACKAGE_NAME}
|
26
26
|
write_log "END pip show"
|
@@ -1,10 +1,10 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=hpk2FCu0xXoEuH8cDYFItQE2071uD_KsQqreoRWpt4A,25
|
2
2
|
fractal_server/__main__.py,sha256=igfS2XL3e8JycuhASl2vsYuIPma0MG0cfPPFRuQfh14,6906
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
fractal_server/app/db/__init__.py,sha256=wup2wcOkyOh8Vd0Xm76PZn_naxeMqaL4eF8DHHXTGlI,2889
|
6
6
|
fractal_server/app/history/__init__.py,sha256=bisQpsMCFmtQGhIsf9ES0HdEuH4DYkPxVO7SlvxaWTY,239
|
7
|
-
fractal_server/app/history/image_updates.py,sha256=
|
7
|
+
fractal_server/app/history/image_updates.py,sha256=YqoOwPW97LQk9-o04zB1lSU0yQ90V0AcuhFIhTVUxos,3959
|
8
8
|
fractal_server/app/history/status_enum.py,sha256=xBBLHQY2Z105b2_HVU9DVRgdEVbbjLm6l4kkcV0Q1Sk,275
|
9
9
|
fractal_server/app/models/__init__.py,sha256=xJWiGAwpXmCpnFMC4c_HTqoUCzMOXrakoGLUH_uMvdA,415
|
10
10
|
fractal_server/app/models/linkusergroup.py,sha256=LWTUfhH2uAnn_4moK7QdRUIHWtpw-hPZuW-5jClv_OE,610
|
@@ -28,7 +28,7 @@ fractal_server/app/routes/admin/v2/accounting.py,sha256=UDMPD9DMhMBcu4UsEOEtKMCG
|
|
28
28
|
fractal_server/app/routes/admin/v2/impersonate.py,sha256=gc4lshfEPFR6W2asH7aKu6hqE6chzusdhAUVV9p51eU,1131
|
29
29
|
fractal_server/app/routes/admin/v2/job.py,sha256=4soc-5d99QEsir7U9AqpofgaGggSBwgMm7mXW5LBvSI,7439
|
30
30
|
fractal_server/app/routes/admin/v2/project.py,sha256=luy-yiGX1JYTdPm1hpIdDUUqPm8xHuipLy9k2X6zu74,1223
|
31
|
-
fractal_server/app/routes/admin/v2/task.py,sha256=
|
31
|
+
fractal_server/app/routes/admin/v2/task.py,sha256=QOwgyDU9m7T_wLMwkdgfFaoMjNxcDg6zMVpngxhUvqk,4374
|
32
32
|
fractal_server/app/routes/admin/v2/task_group.py,sha256=XTjdqgABXZcx9EenaoqSmHh12BXSentUus3SV0oxBMs,7929
|
33
33
|
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=0e0ZJ_k75TVHaT2o8Xk33DPDSgh-eBhZf-y4y7t-Adg,9429
|
34
34
|
fractal_server/app/routes/api/__init__.py,sha256=2IDheFi0OFdsUg7nbUiyahqybvpgXqeHUXIL2QtWrQQ,641
|
@@ -37,10 +37,10 @@ fractal_server/app/routes/api/v2/_aux_functions.py,sha256=pmYbsHjJexb5-zMCJQLNSt
|
|
37
37
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qdXCb6IP8-qPEAxGZKljtjIqNzIAyRaAsQSRi5VqFHM,6773
|
38
38
|
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
|
39
39
|
fractal_server/app/routes/api/v2/dataset.py,sha256=gS5169eJRGHBQNUnkDB75Bv3Kg8Ql-tMVw5_FAxUEKc,9664
|
40
|
-
fractal_server/app/routes/api/v2/history.py,sha256=
|
40
|
+
fractal_server/app/routes/api/v2/history.py,sha256=C_V_u2ab4i8v4bM-uQ0SV3Olyor_5olQRDYFsLco2Ac,9801
|
41
41
|
fractal_server/app/routes/api/v2/images.py,sha256=wUhYomNLGtJTtu_pD2oQorcH2LISxo64Wxo6ogc4IXc,8185
|
42
42
|
fractal_server/app/routes/api/v2/job.py,sha256=m89FTh9Px25oXCeWj2k2NdGWQaO2oxMh-6lZppcsJOY,5551
|
43
|
-
fractal_server/app/routes/api/v2/project.py,sha256=
|
43
|
+
fractal_server/app/routes/api/v2/project.py,sha256=apWQNOdj2FIZmBl6Cjtr2tK-jUclEsw-ikKg6PMT8sU,7828
|
44
44
|
fractal_server/app/routes/api/v2/submit.py,sha256=K4OjcSg476JXIeeMUaYdTDk8Qpj5IO5UULvfErI7Y5Y,8624
|
45
45
|
fractal_server/app/routes/api/v2/task.py,sha256=z3_SxsXoKsbM9GGNJUdIiZisQwAJSBqvCc7thaJIOTU,7191
|
46
46
|
fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-TZIuf7nfHAQXUUNmkrlhLM,12706
|
@@ -69,18 +69,18 @@ fractal_server/app/runner/components.py,sha256=ZF8ct_Ky5k8IAcrmpYOZ-bc6OBgdELEig
|
|
69
69
|
fractal_server/app/runner/compress_folder.py,sha256=HSc1tv7x2DBjBoXwugZlC79rm9GNBIWtQKK9yWn5ZBI,3991
|
70
70
|
fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrGrLjZcnQl7A,4159
|
71
71
|
fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
72
|
-
fractal_server/app/runner/executors/base_runner.py,sha256=
|
72
|
+
fractal_server/app/runner/executors/base_runner.py,sha256=0E3gbSndXdEAxZwFCiZXrUd8tjEmvLa_ztPBGMJXtUw,3742
|
73
73
|
fractal_server/app/runner/executors/local/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
74
74
|
fractal_server/app/runner/executors/local/_local_config.py,sha256=8dyg2Gh8L2FlG_jJRYLMkcMgVHGEY2w7DME9aaKXFFo,3688
|
75
|
-
fractal_server/app/runner/executors/local/_submit_setup.py,sha256=
|
75
|
+
fractal_server/app/runner/executors/local/_submit_setup.py,sha256=pDc9Q6axXL8_5JAV0byXzGOLOB0bZF88_L9LZykOgwM,1220
|
76
76
|
fractal_server/app/runner/executors/local/runner.py,sha256=tEI3qe9UQKgqNoY6gkP1b2O1yRw3VGTiPTDKztrCt2I,7577
|
77
77
|
fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
78
78
|
fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=ZY020JZlDS5mfpgpWTChQkyHU7iLE5kx2HVd57_C6XA,8850
|
79
79
|
fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
|
80
|
-
fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=
|
80
|
+
fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=fZaFUUXqDH0p3DndCFUpFqTqyD2tMVCuSYgYLAycpVw,15897
|
81
81
|
fractal_server/app/runner/executors/slurm_common/_submit_setup.py,sha256=crbfAAvXbxe_9PaokXkkVdPV65lSCFbInZ0RlT6uyHI,2746
|
82
|
-
fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256
|
83
|
-
fractal_server/app/runner/executors/slurm_common/remote.py,sha256=
|
82
|
+
fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=-fAX1DZMB5RZnyYanIJD72mWOJAPkh21jd4loDXKJw4,5994
|
83
|
+
fractal_server/app/runner/executors/slurm_common/remote.py,sha256=iXLu4d-bWzn7qmDaOjKFkcuaSHLjPESAMSLcg6c99fc,5852
|
84
84
|
fractal_server/app/runner/executors/slurm_common/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
|
85
85
|
fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
86
86
|
fractal_server/app/runner/executors/slurm_ssh/_executor_wait_thread.py,sha256=lnW8dNNPqqbpQvojVBQaNJm4wN3Qkw02RWBZ1w68Hyw,3755
|
@@ -97,14 +97,14 @@ fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oX
|
|
97
97
|
fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
|
98
98
|
fractal_server/app/runner/shutdown.py,sha256=9pfSKHDNdIcm0eY-opgRTi7y0HmvfPmYiu9JR6Idark,2082
|
99
99
|
fractal_server/app/runner/task_files.py,sha256=5enzBqiQct1AUGwrGX-rxFCxnhW3SPYIUylMYwyVfrE,2482
|
100
|
-
fractal_server/app/runner/v2/__init__.py,sha256=
|
100
|
+
fractal_server/app/runner/v2/__init__.py,sha256=llVnhgNGsSuP_eZ_ilQixQTmwst79LWrgjILpC2Xn9o,14247
|
101
101
|
fractal_server/app/runner/v2/_local.py,sha256=Zas2RS_f9mfdkXszBpzISHylLX1bX8pFuoLA1fHLFqQ,2945
|
102
|
-
fractal_server/app/runner/v2/_slurm_ssh.py,sha256=
|
103
|
-
fractal_server/app/runner/v2/_slurm_sudo.py,sha256=
|
102
|
+
fractal_server/app/runner/v2/_slurm_ssh.py,sha256=5w_lwQzySx-R3kVg2Bf-21n5JpWjJAgMtYP2BROvWJo,3227
|
103
|
+
fractal_server/app/runner/v2/_slurm_sudo.py,sha256=CzWUeC6at_Sj-wU1myjA68ZRKMiLZYBTLv9I9odUxBU,2914
|
104
104
|
fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
|
105
105
|
fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
|
106
|
-
fractal_server/app/runner/v2/runner.py,sha256=
|
107
|
-
fractal_server/app/runner/v2/runner_functions.py,sha256=
|
106
|
+
fractal_server/app/runner/v2/runner.py,sha256=qtxmnrgMdlB3CA5Ayg7BXUv1yETR6H7kMLp70R1faSM,14456
|
107
|
+
fractal_server/app/runner/v2/runner_functions.py,sha256=Zvi6sC5krltygLO-fC0K21VJEhmX1XQCm9IzVqf_cB0,9583
|
108
108
|
fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=dvvRK7od8iQ8vdPf80uGUxs3i5i0buGjCodBxSjZ7PQ,3671
|
109
109
|
fractal_server/app/runner/v2/task_interface.py,sha256=e1GGQSYd0MyBj1EZvEVzqv-HpVE4YffXOq82WLrCaOc,1866
|
110
110
|
fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
|
@@ -120,7 +120,7 @@ fractal_server/app/schemas/v2/dataset.py,sha256=xo7Y3fq5ThMVBp6xDVypdG-EmGfBX_vW
|
|
120
120
|
fractal_server/app/schemas/v2/dumps.py,sha256=2GUjoqeblUvrSoojBz5odoUUf53IABtbY_5GvFZoMVc,1782
|
121
121
|
fractal_server/app/schemas/v2/history.py,sha256=OHwRIbOIjBiiTYUNZYsHTdEXJHff17JRizQ8pf1e0vk,601
|
122
122
|
fractal_server/app/schemas/v2/job.py,sha256=Dp_RRiC5uvJqq1fAJlBXztAFA-tS5FWuRnUbTnLtL6M,4226
|
123
|
-
fractal_server/app/schemas/v2/manifest.py,sha256=
|
123
|
+
fractal_server/app/schemas/v2/manifest.py,sha256=tcCvT4PbdtpdC5eU54MKUne6puXpnPlIExZYwLGHEAo,7133
|
124
124
|
fractal_server/app/schemas/v2/project.py,sha256=uqBreoS0UAkbVEJJS2HkSdjCCWfFIkv6N70TWk9HgxA,868
|
125
125
|
fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
|
126
126
|
fractal_server/app/schemas/v2/task.py,sha256=OUCNQQUULmWSOdPm8Dz8E0ivG1XOcvO4dxz-osSa9R0,7248
|
@@ -128,10 +128,10 @@ fractal_server/app/schemas/v2/task_collection.py,sha256=NFIcfTAhFN5LMxmyJCat7CKx
|
|
128
128
|
fractal_server/app/schemas/v2/task_group.py,sha256=vFF850kJRmmcxt2sn7nrhm-OWJHRhYu_XOQP5LNiXyU,3850
|
129
129
|
fractal_server/app/schemas/v2/workflow.py,sha256=qmKJZ9xZ6-sN41XdocZ7K6hum_pUfaMuKOJs_TlFCRQ,2211
|
130
130
|
fractal_server/app/schemas/v2/workflowtask.py,sha256=qMvwlnFCsnyD8uv8HJ4cFy2-QMm2ETUFlTIbxIFUWxk,8056
|
131
|
-
fractal_server/app/security/__init__.py,sha256=
|
131
|
+
fractal_server/app/security/__init__.py,sha256=e2cveg5hQpieGD3bSPd5GTOMthvJ-HXH3buSb9WVfEU,14096
|
132
132
|
fractal_server/app/security/signup_email.py,sha256=CR1VbsGFNshxsWquLDZPbUAYnGzkCHeJChtncq63RBc,1434
|
133
133
|
fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
|
134
|
-
fractal_server/config.py,sha256
|
134
|
+
fractal_server/config.py,sha256=eYo-c3Zt4rkC45mewLYOeFZSA_7FF4Wmm6zDvX1dpt4,28549
|
135
135
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
136
136
|
fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
|
137
137
|
fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
|
@@ -195,7 +195,7 @@ fractal_server/tasks/v2/ssh/reactivate.py,sha256=8Rnbbny7TjMEAHhboqfgxBVZZK5UNNm
|
|
195
195
|
fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
|
196
196
|
fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=Gpk2io8u9YaflFUlQu2NgkDQw5AA4m4AOVG1sB4yrHQ,1822
|
197
197
|
fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
|
198
|
-
fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=
|
198
|
+
fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=qm1vPy6AkKhWDjCJGXS8LqCLYO3KsAyRK325ZsFcF6U,1747
|
199
199
|
fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
|
200
200
|
fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=A2y8RngEjAcRhG-_owA6P7tAdrS_AszFuGXnaeMV8u0,1122
|
201
201
|
fractal_server/tasks/v2/utils_background.py,sha256=W_RvihI1aiYPJNsPo8z4wKuA_bPs0UT2huzLihRpjU4,4248
|
@@ -206,8 +206,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxe
|
|
206
206
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
207
207
|
fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
|
208
208
|
fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
|
209
|
-
fractal_server-2.14.
|
210
|
-
fractal_server-2.14.
|
211
|
-
fractal_server-2.14.
|
212
|
-
fractal_server-2.14.
|
213
|
-
fractal_server-2.14.
|
209
|
+
fractal_server-2.14.0a3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
210
|
+
fractal_server-2.14.0a3.dist-info/METADATA,sha256=F2fjhacHy-6hFP43F8JNom4iV9-WNRiO6CiCye5KzLI,4550
|
211
|
+
fractal_server-2.14.0a3.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
|
212
|
+
fractal_server-2.14.0a3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
213
|
+
fractal_server-2.14.0a3.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|