fractal-server 1.3.10__py3-none-any.whl → 1.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/api/v1/dataset.py +13 -6
- fractal_server/app/api/v1/job.py +2 -15
- fractal_server/app/api/v1/project.py +0 -2
- fractal_server/app/models/dataset.py +5 -17
- fractal_server/app/models/workflow.py +0 -19
- fractal_server/app/runner/__init__.py +15 -22
- fractal_server/app/runner/_common.py +54 -56
- fractal_server/app/runner/_local/__init__.py +10 -4
- fractal_server/app/runner/_slurm/__init__.py +11 -4
- fractal_server/app/runner/_slurm/_executor_wait_thread.py +2 -0
- fractal_server/app/runner/common.py +4 -0
- fractal_server/app/runner/handle_failed_job.py +6 -5
- fractal_server/app/schemas/__init__.py +2 -0
- fractal_server/app/schemas/applyworkflow.py +0 -1
- fractal_server/app/schemas/dataset.py +37 -0
- fractal_server/app/schemas/project.py +0 -8
- fractal_server/app/schemas/workflow.py +21 -0
- fractal_server/config.py +5 -0
- fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py +1 -1
- fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py +1 -1
- fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py +36 -0
- fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py +1 -1
- fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py +1 -1
- fractal_server/tasks/collection.py +2 -2
- {fractal_server-1.3.10.dist-info → fractal_server-1.3.12.dist-info}/METADATA +2 -1
- {fractal_server-1.3.10.dist-info → fractal_server-1.3.12.dist-info}/RECORD +30 -29
- {fractal_server-1.3.10.dist-info → fractal_server-1.3.12.dist-info}/LICENSE +0 -0
- {fractal_server-1.3.10.dist-info → fractal_server-1.3.12.dist-info}/WHEEL +0 -0
- {fractal_server-1.3.10.dist-info → fractal_server-1.3.12.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "1.3.
|
1
|
+
__VERSION__ = "1.3.12"
|
@@ -14,12 +14,12 @@ from ...db import AsyncSession
|
|
14
14
|
from ...db import get_db
|
15
15
|
from ...models import ApplyWorkflow
|
16
16
|
from ...models import Dataset
|
17
|
-
from ...models import DatasetStatusRead
|
18
17
|
from ...models import JobStatusType
|
19
18
|
from ...models import Resource
|
20
|
-
from ...runner._common import
|
19
|
+
from ...runner._common import HISTORY_FILENAME
|
21
20
|
from ...schemas import DatasetCreate
|
22
21
|
from ...schemas import DatasetRead
|
22
|
+
from ...schemas import DatasetStatusRead
|
23
23
|
from ...schemas import DatasetUpdate
|
24
24
|
from ...schemas import ResourceCreate
|
25
25
|
from ...schemas import ResourceRead
|
@@ -100,6 +100,13 @@ async def update_dataset(
|
|
100
100
|
"""
|
101
101
|
Edit a dataset associated to the current project
|
102
102
|
"""
|
103
|
+
|
104
|
+
if dataset_update.history is not None:
|
105
|
+
raise HTTPException(
|
106
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
107
|
+
detail="Cannot modify dataset history.",
|
108
|
+
)
|
109
|
+
|
103
110
|
output = await _get_dataset_check_owner(
|
104
111
|
project_id=project_id,
|
105
112
|
dataset_id=dataset_id,
|
@@ -350,7 +357,7 @@ async def export_history_as_workflow(
|
|
350
357
|
# means that the history in the DB is up-to-date.
|
351
358
|
|
352
359
|
# Read history from DB
|
353
|
-
history = dataset.
|
360
|
+
history = dataset.history
|
354
361
|
|
355
362
|
# Construct reproducible workflow
|
356
363
|
task_list = []
|
@@ -432,7 +439,7 @@ async def get_workflowtask_status(
|
|
432
439
|
|
433
440
|
# Lowest priority: read status from DB, which corresponds to jobs that are
|
434
441
|
# not running
|
435
|
-
history = dataset.
|
442
|
+
history = dataset.history
|
436
443
|
for history_item in history:
|
437
444
|
wftask_id = history_item["workflowtask"]["id"]
|
438
445
|
wftask_status = history_item["status"]
|
@@ -457,10 +464,10 @@ async def get_workflowtask_status(
|
|
457
464
|
# Highest priority: Read status updates coming from the running-job
|
458
465
|
# temporary file. Note: this file only contains information on
|
459
466
|
# WorkflowTask's that ran through successfully
|
460
|
-
tmp_file = Path(running_job.working_dir) /
|
467
|
+
tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME
|
461
468
|
try:
|
462
469
|
with tmp_file.open("r") as f:
|
463
|
-
history = json.load(f)
|
470
|
+
history = json.load(f)
|
464
471
|
except FileNotFoundError:
|
465
472
|
history = []
|
466
473
|
for history_item in history:
|
fractal_server/app/api/v1/job.py
CHANGED
@@ -1,4 +1,3 @@
|
|
1
|
-
import json
|
2
1
|
from io import BytesIO
|
3
2
|
from pathlib import Path
|
4
3
|
from typing import Optional
|
@@ -17,7 +16,6 @@ from ....syringe import Inject
|
|
17
16
|
from ...db import AsyncSession
|
18
17
|
from ...db import get_db
|
19
18
|
from ...models import ApplyWorkflow
|
20
|
-
from ...runner._common import METADATA_FILENAME
|
21
19
|
from ...runner._common import SHUTDOWN_FILENAME
|
22
20
|
from ...schemas import ApplyWorkflowRead
|
23
21
|
from ...security import current_active_user
|
@@ -38,7 +36,7 @@ async def read_job(
|
|
38
36
|
job_id: int,
|
39
37
|
user: User = Depends(current_active_user),
|
40
38
|
db: AsyncSession = Depends(get_db),
|
41
|
-
) -> Optional[
|
39
|
+
) -> Optional[ApplyWorkflowRead]:
|
42
40
|
"""
|
43
41
|
Return info on an existing job
|
44
42
|
"""
|
@@ -51,19 +49,8 @@ async def read_job(
|
|
51
49
|
)
|
52
50
|
job = output["job"]
|
53
51
|
|
54
|
-
job_read = ApplyWorkflowRead(**job.dict())
|
55
|
-
|
56
|
-
# FIXME: this operation is not reading from the DB, but from file
|
57
|
-
try:
|
58
|
-
metadata_file = Path(job_read.working_dir) / METADATA_FILENAME
|
59
|
-
with metadata_file.open("r") as f:
|
60
|
-
metadata = json.load(f)
|
61
|
-
job_read.history = metadata["HISTORY_LEGACY"]
|
62
|
-
except (KeyError, FileNotFoundError):
|
63
|
-
pass
|
64
|
-
|
65
52
|
await db.close()
|
66
|
-
return
|
53
|
+
return job
|
67
54
|
|
68
55
|
|
69
56
|
@router.get(
|
@@ -18,7 +18,6 @@ from ...db import DBSyncSession
|
|
18
18
|
from ...db import get_db
|
19
19
|
from ...db import get_sync_db
|
20
20
|
from ...models import ApplyWorkflow
|
21
|
-
from ...models import Dataset
|
22
21
|
from ...models import JobStatusType
|
23
22
|
from ...models import LinkUserProject
|
24
23
|
from ...models import Project
|
@@ -76,7 +75,6 @@ async def create_project(
|
|
76
75
|
)
|
77
76
|
|
78
77
|
db_project = Project.from_orm(project)
|
79
|
-
db_project.dataset_list.append(Dataset(name=project.default_dataset_name))
|
80
78
|
db_project.user_list.append(user)
|
81
79
|
try:
|
82
80
|
db.add(db_project)
|
@@ -1,7 +1,6 @@
|
|
1
1
|
from typing import Any
|
2
2
|
from typing import Optional
|
3
3
|
|
4
|
-
from pydantic import BaseModel
|
5
4
|
from sqlalchemy import Column
|
6
5
|
from sqlalchemy.types import JSON
|
7
6
|
from sqlmodel import Field
|
@@ -10,7 +9,6 @@ from sqlmodel import SQLModel
|
|
10
9
|
|
11
10
|
from ..schemas.dataset import _DatasetBase
|
12
11
|
from ..schemas.dataset import _ResourceBase
|
13
|
-
from .workflow import WorkflowTaskStatusType
|
14
12
|
|
15
13
|
|
16
14
|
class Resource(_ResourceBase, SQLModel, table=True):
|
@@ -18,20 +16,6 @@ class Resource(_ResourceBase, SQLModel, table=True):
|
|
18
16
|
dataset_id: int = Field(foreign_key="dataset.id")
|
19
17
|
|
20
18
|
|
21
|
-
class DatasetStatusRead(BaseModel):
|
22
|
-
"""
|
23
|
-
Response type for the
|
24
|
-
`/project/{project_id}/dataset/{dataset_id}/status/` endpoint
|
25
|
-
"""
|
26
|
-
|
27
|
-
status: Optional[
|
28
|
-
dict[
|
29
|
-
int,
|
30
|
-
WorkflowTaskStatusType,
|
31
|
-
]
|
32
|
-
] = None
|
33
|
-
|
34
|
-
|
35
19
|
class Dataset(_DatasetBase, SQLModel, table=True):
|
36
20
|
"""
|
37
21
|
Represent a dataset
|
@@ -43,7 +27,8 @@ class Dataset(_DatasetBase, SQLModel, table=True):
|
|
43
27
|
ID of the project the workflow belongs to.
|
44
28
|
meta:
|
45
29
|
Metadata of the Dataset
|
46
|
-
|
30
|
+
history:
|
31
|
+
History of the Dataset
|
47
32
|
resource_list:
|
48
33
|
(Mapper attribute)
|
49
34
|
|
@@ -58,6 +43,9 @@ class Dataset(_DatasetBase, SQLModel, table=True):
|
|
58
43
|
}
|
59
44
|
)
|
60
45
|
meta: dict[str, Any] = Field(sa_column=Column(JSON), default={})
|
46
|
+
history: list[dict[str, Any]] = Field(
|
47
|
+
sa_column=Column(JSON, server_default="[]", nullable=False)
|
48
|
+
)
|
61
49
|
|
62
50
|
class Config:
|
63
51
|
arbitrary_types_allowed = True
|
@@ -1,4 +1,3 @@
|
|
1
|
-
from enum import Enum
|
2
1
|
from typing import Any
|
3
2
|
from typing import Optional
|
4
3
|
from typing import Union
|
@@ -175,21 +174,3 @@ class Workflow(_WorkflowBase, SQLModel, table=True):
|
|
175
174
|
@property
|
176
175
|
def output_type(self):
|
177
176
|
return self.task_list[-1].task.output_type
|
178
|
-
|
179
|
-
|
180
|
-
class WorkflowTaskStatusType(str, Enum):
|
181
|
-
"""
|
182
|
-
Define the available values for the status of a `WorkflowTask`.
|
183
|
-
|
184
|
-
This kind of status is constructed in the
|
185
|
-
`api/v1/project/{project_id}/dataset/{dataset_id}/status` endpoint.
|
186
|
-
|
187
|
-
Attributes:
|
188
|
-
SUBMITTED: The `WorkflowTask` is part of a running job.
|
189
|
-
DONE: The most-recent execution of this `WorkflowTask` was successful.
|
190
|
-
FAILED: The most-recent execution of this `WorkflowTask` failed.
|
191
|
-
"""
|
192
|
-
|
193
|
-
SUBMITTED = "submitted"
|
194
|
-
DONE = "done"
|
195
|
-
FAILED = "failed"
|
@@ -4,6 +4,7 @@
|
|
4
4
|
# Original authors:
|
5
5
|
# Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
|
6
6
|
# Tommaso Comparin <tommaso.comparin@exact-lab.it>
|
7
|
+
# Marco Franzon <marco.franzon@exact-lab.it>
|
7
8
|
#
|
8
9
|
# This file is part of Fractal and was originally developed by eXact lab S.r.l.
|
9
10
|
# <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
|
@@ -225,11 +226,12 @@ async def submit_workflow(
|
|
225
226
|
db_sync = next(DB.get_sync_db())
|
226
227
|
db_sync.close()
|
227
228
|
|
228
|
-
|
229
|
+
output_dataset_meta_hist = await process_workflow(
|
229
230
|
workflow=workflow,
|
230
231
|
input_paths=input_paths,
|
231
232
|
output_path=output_path,
|
232
233
|
input_metadata=input_dataset.meta,
|
234
|
+
input_history=input_dataset.history,
|
233
235
|
slurm_user=slurm_user,
|
234
236
|
user_cache_dir=user_cache_dir,
|
235
237
|
workflow_dir=WORKFLOW_DIR,
|
@@ -249,15 +251,9 @@ async def submit_workflow(
|
|
249
251
|
# Replace output_dataset.meta with output_dataset_meta, while handling
|
250
252
|
# the history property in a special way (i.e. appending to and
|
251
253
|
# existing entry rather than replacing it)
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
# For non-history keys, replace with new value
|
256
|
-
new_meta[key] = value
|
257
|
-
else:
|
258
|
-
# For history key, append to existing entry
|
259
|
-
new_meta[key] = output_dataset.meta.get(key, []) + value
|
260
|
-
output_dataset.meta = new_meta
|
254
|
+
output_dataset.history = output_dataset_meta_hist.pop("history")
|
255
|
+
output_dataset.meta = output_dataset_meta_hist.pop("metadata")
|
256
|
+
|
261
257
|
db_sync.merge(output_dataset)
|
262
258
|
|
263
259
|
# Update job DB entry
|
@@ -277,19 +273,18 @@ async def submit_workflow(
|
|
277
273
|
|
278
274
|
# Assemble output_dataset.meta based on the last successful task, i.e.
|
279
275
|
# based on METADATA_FILENAME
|
280
|
-
|
276
|
+
output_dataset.meta = assemble_meta_failed_job(job, output_dataset)
|
281
277
|
|
282
278
|
# Assemble new history and assign it to output_dataset.meta
|
283
279
|
failed_wftask = db_sync.get(WorkflowTask, e.workflow_task_id)
|
284
|
-
|
280
|
+
output_dataset.history = assemble_history_failed_job(
|
285
281
|
job,
|
286
282
|
output_dataset,
|
287
283
|
workflow,
|
288
284
|
logger,
|
289
285
|
failed_wftask=failed_wftask,
|
290
286
|
)
|
291
|
-
|
292
|
-
output_dataset.meta = new_meta
|
287
|
+
|
293
288
|
db_sync.merge(output_dataset)
|
294
289
|
|
295
290
|
job.status = JobStatusType.FAILED
|
@@ -313,17 +308,16 @@ async def submit_workflow(
|
|
313
308
|
|
314
309
|
# Assemble output_dataset.meta based on the last successful task, i.e.
|
315
310
|
# based on METADATA_FILENAME
|
316
|
-
|
311
|
+
output_dataset.meta = assemble_meta_failed_job(job, output_dataset)
|
317
312
|
|
318
313
|
# Assemble new history and assign it to output_dataset.meta
|
319
|
-
|
314
|
+
output_dataset.history = assemble_history_failed_job(
|
320
315
|
job,
|
321
316
|
output_dataset,
|
322
317
|
workflow,
|
323
318
|
logger,
|
324
319
|
)
|
325
|
-
|
326
|
-
output_dataset.meta = new_meta
|
320
|
+
|
327
321
|
db_sync.merge(output_dataset)
|
328
322
|
|
329
323
|
job.status = JobStatusType.FAILED
|
@@ -343,17 +337,16 @@ async def submit_workflow(
|
|
343
337
|
|
344
338
|
# Assemble output_dataset.meta based on the last successful task, i.e.
|
345
339
|
# based on METADATA_FILENAME
|
346
|
-
|
340
|
+
output_dataset.meta = assemble_meta_failed_job(job, output_dataset)
|
347
341
|
|
348
342
|
# Assemble new history and assign it to output_dataset.meta
|
349
|
-
|
343
|
+
output_dataset.history = assemble_history_failed_job(
|
350
344
|
job,
|
351
345
|
output_dataset,
|
352
346
|
workflow,
|
353
347
|
logger,
|
354
348
|
)
|
355
|
-
|
356
|
-
output_dataset.meta = new_meta
|
349
|
+
|
357
350
|
db_sync.merge(output_dataset)
|
358
351
|
|
359
352
|
job.status = JobStatusType.FAILED
|
@@ -20,13 +20,13 @@ from typing import Optional
|
|
20
20
|
|
21
21
|
from ...logger import get_logger
|
22
22
|
from ..models import WorkflowTask
|
23
|
-
from ..
|
23
|
+
from ..schemas import WorkflowTaskStatusType
|
24
24
|
from .common import JobExecutionError
|
25
25
|
from .common import TaskExecutionError
|
26
26
|
from .common import TaskParameters
|
27
27
|
from .common import write_args_file
|
28
28
|
|
29
|
-
|
29
|
+
HISTORY_FILENAME = "history.json"
|
30
30
|
METADATA_FILENAME = "metadata.json"
|
31
31
|
SHUTDOWN_FILENAME = "shutdown"
|
32
32
|
|
@@ -259,7 +259,7 @@ def call_single_task(
|
|
259
259
|
|
260
260
|
# write args file (by assembling task_pars and wftask.args)
|
261
261
|
write_args_file(
|
262
|
-
task_pars.dict(),
|
262
|
+
task_pars.dict(exclude={"history"}),
|
263
263
|
wftask.args or {},
|
264
264
|
path=task_files.args,
|
265
265
|
)
|
@@ -296,17 +296,12 @@ def call_single_task(
|
|
296
296
|
if diff_metadata is None:
|
297
297
|
diff_metadata = {}
|
298
298
|
|
299
|
+
# Prepare updated_metadata
|
299
300
|
updated_metadata = task_pars.metadata.copy()
|
300
301
|
updated_metadata.update(diff_metadata)
|
301
302
|
|
302
|
-
#
|
303
|
-
|
304
|
-
try:
|
305
|
-
updated_metadata["HISTORY_LEGACY"].append(HISTORY_LEGACY)
|
306
|
-
except KeyError:
|
307
|
-
updated_metadata["HISTORY_LEGACY"] = [HISTORY_LEGACY]
|
308
|
-
|
309
|
-
# Update history
|
303
|
+
# Prepare updated_history (note: the expected type for history items is
|
304
|
+
# defined in `_DatasetHistoryItem`)
|
310
305
|
wftask_dump = wftask.dict(exclude={"task"})
|
311
306
|
wftask_dump["task"] = wftask.task.dict()
|
312
307
|
new_history_item = dict(
|
@@ -314,15 +309,15 @@ def call_single_task(
|
|
314
309
|
status=WorkflowTaskStatusType.DONE,
|
315
310
|
parallelization=None,
|
316
311
|
)
|
317
|
-
|
318
|
-
|
319
|
-
except KeyError:
|
320
|
-
updated_metadata["history"] = [new_history_item]
|
312
|
+
updated_history = task_pars.history.copy()
|
313
|
+
updated_history.append(new_history_item)
|
321
314
|
|
315
|
+
# Assemble a TaskParameter object
|
322
316
|
out_task_parameters = TaskParameters(
|
323
317
|
input_paths=[task_pars.output_path],
|
324
318
|
output_path=task_pars.output_path,
|
325
319
|
metadata=updated_metadata,
|
320
|
+
history=updated_history,
|
326
321
|
)
|
327
322
|
|
328
323
|
return out_task_parameters
|
@@ -335,7 +330,7 @@ def call_single_parallel_task(
|
|
335
330
|
task_pars: TaskParameters,
|
336
331
|
workflow_dir: Path,
|
337
332
|
workflow_dir_user: Optional[Path] = None,
|
338
|
-
) ->
|
333
|
+
) -> Any:
|
339
334
|
"""
|
340
335
|
Call a single instance of a parallel task
|
341
336
|
|
@@ -366,7 +361,8 @@ def call_single_parallel_task(
|
|
366
361
|
relevant for multi-user executors).
|
367
362
|
|
368
363
|
Returns:
|
369
|
-
The `
|
364
|
+
The `json.load`-ed contents of the metadiff output file, or `None` if
|
365
|
+
the file is missing.
|
370
366
|
|
371
367
|
Raises:
|
372
368
|
TaskExecutionError: If the wrapped task raises a task-related error.
|
@@ -390,7 +386,7 @@ def call_single_parallel_task(
|
|
390
386
|
|
391
387
|
# write args file (by assembling task_pars, wftask.args and component)
|
392
388
|
write_args_file(
|
393
|
-
task_pars.dict(),
|
389
|
+
task_pars.dict(exclude={"history"}),
|
394
390
|
wftask.args or {},
|
395
391
|
dict(component=component),
|
396
392
|
path=task_files.args,
|
@@ -406,13 +402,21 @@ def call_single_parallel_task(
|
|
406
402
|
_call_command_wrapper(
|
407
403
|
cmd, stdout=task_files.out, stderr=task_files.err
|
408
404
|
)
|
409
|
-
return task_files.metadiff
|
410
405
|
except TaskExecutionError as e:
|
411
406
|
e.workflow_task_order = wftask.order
|
412
407
|
e.workflow_task_id = wftask.id
|
413
408
|
e.task_name = wftask.task.name
|
414
409
|
raise e
|
415
410
|
|
411
|
+
# JSON-load metadiff file and return its contents (or None)
|
412
|
+
try:
|
413
|
+
with task_files.metadiff.open("r") as f:
|
414
|
+
this_meta_update = json.load(f)
|
415
|
+
except FileNotFoundError:
|
416
|
+
this_meta_update = None
|
417
|
+
|
418
|
+
return this_meta_update
|
419
|
+
|
416
420
|
|
417
421
|
def call_parallel_task(
|
418
422
|
*,
|
@@ -431,6 +435,10 @@ def call_parallel_task(
|
|
431
435
|
and return a single TaskParameters instance to be passed on to the
|
432
436
|
next task.
|
433
437
|
|
438
|
+
**NOTE**: this function is executed by the same user that runs
|
439
|
+
`fractal-server`, and therefore may not have access to some of user's
|
440
|
+
files.
|
441
|
+
|
434
442
|
Args:
|
435
443
|
executor:
|
436
444
|
The `concurrent.futures.Executor`-compatible executor that will
|
@@ -508,62 +516,48 @@ def call_parallel_task(
|
|
508
516
|
# make this call blocking. This is required *also* because otherwise the
|
509
517
|
# shutdown of a FractalSlurmExecutor while running map() may not work
|
510
518
|
aggregated_metadata_update: dict[str, Any] = {}
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
521
|
-
|
522
|
-
|
523
|
-
|
524
|
-
|
525
|
-
|
526
|
-
"This feature is experimental and it may change in "
|
527
|
-
"future releases."
|
528
|
-
)
|
529
|
-
except FileNotFoundError as e:
|
530
|
-
logger.error(
|
531
|
-
"Skip collection and aggregation of parallel-task updated "
|
532
|
-
f"metadata. Original error: {str(e)}"
|
519
|
+
for this_meta_update in map_iter:
|
520
|
+
# Cover the case where the task wrote `null`, rather than a
|
521
|
+
# valid dictionary (ref fractal-server issue #878), or where the
|
522
|
+
# metadiff file was missing.
|
523
|
+
if this_meta_update is None:
|
524
|
+
this_meta_update = {}
|
525
|
+
# Include this_meta_update into aggregated_metadata_update
|
526
|
+
for key, val in this_meta_update.items():
|
527
|
+
aggregated_metadata_update.setdefault(key, []).append(val)
|
528
|
+
if aggregated_metadata_update:
|
529
|
+
logger.warning(
|
530
|
+
"Aggregating parallel-taks updated metadata (with keys "
|
531
|
+
f"{list(aggregated_metadata_update.keys())}).\n"
|
532
|
+
"This feature is experimental and it may change in "
|
533
|
+
"future releases."
|
533
534
|
)
|
534
|
-
aggregated_metadata_update = {}
|
535
535
|
|
536
|
-
#
|
536
|
+
# Prepare updated_metadata
|
537
537
|
updated_metadata = task_pars_depend.metadata.copy()
|
538
538
|
updated_metadata.update(aggregated_metadata_update)
|
539
539
|
|
540
|
-
#
|
541
|
-
|
542
|
-
try:
|
543
|
-
updated_metadata["HISTORY_LEGACY"].append(HISTORY_LEGACY)
|
544
|
-
except KeyError:
|
545
|
-
updated_metadata["HISTORY_LEGACY"] = [HISTORY_LEGACY]
|
546
|
-
|
547
|
-
# Update history
|
540
|
+
# Prepare updated_history (note: the expected type for history items is
|
541
|
+
# defined in `_DatasetHistoryItem`)
|
548
542
|
wftask_dump = wftask.dict(exclude={"task"})
|
549
543
|
wftask_dump["task"] = wftask.task.dict()
|
550
544
|
new_history_item = dict(
|
551
545
|
workflowtask=wftask_dump,
|
552
|
-
status=
|
546
|
+
status=WorkflowTaskStatusType.DONE,
|
553
547
|
parallelization=dict(
|
554
548
|
parallelization_level=wftask.parallelization_level,
|
555
549
|
component_list=component_list,
|
556
550
|
),
|
557
551
|
)
|
558
|
-
|
559
|
-
|
560
|
-
except KeyError:
|
561
|
-
updated_metadata["history"] = [new_history_item]
|
552
|
+
updated_history = task_pars_depend.history.copy()
|
553
|
+
updated_history.append(new_history_item)
|
562
554
|
|
555
|
+
# Assemble a TaskParameter object
|
563
556
|
out_task_parameters = TaskParameters(
|
564
557
|
input_paths=[task_pars_depend.output_path],
|
565
558
|
output_path=task_pars_depend.output_path,
|
566
559
|
metadata=updated_metadata,
|
560
|
+
history=updated_history,
|
567
561
|
)
|
568
562
|
|
569
563
|
return out_task_parameters
|
@@ -671,4 +665,8 @@ def execute_tasks(
|
|
671
665
|
with open(workflow_dir / METADATA_FILENAME, "w") as f:
|
672
666
|
json.dump(current_task_pars.metadata, f, indent=2)
|
673
667
|
|
668
|
+
# Write most recent metadata to HISTORY_FILENAME
|
669
|
+
with open(workflow_dir / HISTORY_FILENAME, "w") as f:
|
670
|
+
json.dump(current_task_pars.history, f, indent=2)
|
671
|
+
|
674
672
|
return current_task_pars
|
@@ -38,6 +38,7 @@ def _process_workflow(
|
|
38
38
|
input_paths: list[Path],
|
39
39
|
output_path: Path,
|
40
40
|
input_metadata: dict[str, Any],
|
41
|
+
input_history: list[dict[str, Any]],
|
41
42
|
logger_name: str,
|
42
43
|
workflow_dir: Path,
|
43
44
|
first_task_index: int,
|
@@ -62,14 +63,17 @@ def _process_workflow(
|
|
62
63
|
input_paths=input_paths,
|
63
64
|
output_path=output_path,
|
64
65
|
metadata=input_metadata,
|
66
|
+
history=input_history,
|
65
67
|
),
|
66
68
|
workflow_dir=workflow_dir,
|
67
69
|
workflow_dir_user=workflow_dir,
|
68
70
|
logger_name=logger_name,
|
69
71
|
submit_setup_call=_local_submit_setup,
|
70
72
|
)
|
71
|
-
|
72
|
-
|
73
|
+
output_dataset_metadata_history = dict(
|
74
|
+
metadata=output_task_pars.metadata, history=output_task_pars.history
|
75
|
+
)
|
76
|
+
return output_dataset_metadata_history
|
73
77
|
|
74
78
|
|
75
79
|
async def process_workflow(
|
@@ -78,6 +82,7 @@ async def process_workflow(
|
|
78
82
|
input_paths: list[Path],
|
79
83
|
output_path: Path,
|
80
84
|
input_metadata: dict[str, Any],
|
85
|
+
input_history: list[dict[str, Any]],
|
81
86
|
logger_name: str,
|
82
87
|
workflow_dir: Path,
|
83
88
|
workflow_dir_user: Optional[Path] = None,
|
@@ -160,14 +165,15 @@ async def process_workflow(
|
|
160
165
|
last_task_index=last_task_index,
|
161
166
|
)
|
162
167
|
|
163
|
-
|
168
|
+
output_dataset_metadata_history = await async_wrap(_process_workflow)(
|
164
169
|
workflow=workflow,
|
165
170
|
input_paths=input_paths,
|
166
171
|
output_path=output_path,
|
167
172
|
input_metadata=input_metadata,
|
173
|
+
input_history=input_history,
|
168
174
|
logger_name=logger_name,
|
169
175
|
workflow_dir=workflow_dir,
|
170
176
|
first_task_index=first_task_index,
|
171
177
|
last_task_index=last_task_index,
|
172
178
|
)
|
173
|
-
return
|
179
|
+
return output_dataset_metadata_history
|
@@ -4,6 +4,7 @@
|
|
4
4
|
# Original authors:
|
5
5
|
# Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
|
6
6
|
# Tommaso Comparin <tommaso.comparin@exact-lab.it>
|
7
|
+
# Marco Franzon <marco.franzon@exact-lab.it>
|
7
8
|
#
|
8
9
|
# This file is part of Fractal and was originally developed by eXact lab S.r.l.
|
9
10
|
# <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
|
@@ -35,6 +36,7 @@ def _process_workflow(
|
|
35
36
|
input_paths: list[Path],
|
36
37
|
output_path: Path,
|
37
38
|
input_metadata: dict[str, Any],
|
39
|
+
input_history: list[dict[str, Any]],
|
38
40
|
logger_name: str,
|
39
41
|
workflow_dir: Path,
|
40
42
|
workflow_dir_user: Path,
|
@@ -83,14 +85,17 @@ def _process_workflow(
|
|
83
85
|
input_paths=input_paths,
|
84
86
|
output_path=output_path,
|
85
87
|
metadata=input_metadata,
|
88
|
+
history=input_history,
|
86
89
|
),
|
87
90
|
workflow_dir=workflow_dir,
|
88
91
|
workflow_dir_user=workflow_dir_user,
|
89
92
|
submit_setup_call=_slurm_submit_setup,
|
90
93
|
logger_name=logger_name,
|
91
94
|
)
|
92
|
-
|
93
|
-
|
95
|
+
output_dataset_metadata_history = dict(
|
96
|
+
metadata=output_task_pars.metadata, history=output_task_pars.history
|
97
|
+
)
|
98
|
+
return output_dataset_metadata_history
|
94
99
|
|
95
100
|
|
96
101
|
async def process_workflow(
|
@@ -99,6 +104,7 @@ async def process_workflow(
|
|
99
104
|
input_paths: list[Path],
|
100
105
|
output_path: Path,
|
101
106
|
input_metadata: dict[str, Any],
|
107
|
+
input_history: list[dict[str, Any]],
|
102
108
|
logger_name: str,
|
103
109
|
workflow_dir: Path,
|
104
110
|
workflow_dir_user: Optional[Path] = None,
|
@@ -122,11 +128,12 @@ async def process_workflow(
|
|
122
128
|
last_task_index=last_task_index,
|
123
129
|
)
|
124
130
|
|
125
|
-
|
131
|
+
output_dataset_metadata_history = await async_wrap(_process_workflow)(
|
126
132
|
workflow=workflow,
|
127
133
|
input_paths=input_paths,
|
128
134
|
output_path=output_path,
|
129
135
|
input_metadata=input_metadata,
|
136
|
+
input_history=input_history,
|
130
137
|
logger_name=logger_name,
|
131
138
|
workflow_dir=workflow_dir,
|
132
139
|
workflow_dir_user=workflow_dir_user,
|
@@ -136,4 +143,4 @@ async def process_workflow(
|
|
136
143
|
first_task_index=first_task_index,
|
137
144
|
last_task_index=last_task_index,
|
138
145
|
)
|
139
|
-
return
|
146
|
+
return output_dataset_metadata_history
|
@@ -108,6 +108,8 @@ class FractalSlurmWaitThread(FractalFileWaitThread):
|
|
108
108
|
The function is copied from clusterfutures 0.5. Original Copyright: 2022
|
109
109
|
Adrian Sampson, released under the MIT licence
|
110
110
|
|
111
|
+
**Note**: if `self.interval != 1` then this should be modified, but for
|
112
|
+
`clusterfutures` v0.5 `self.interval` is indeed equal to `1`.
|
111
113
|
|
112
114
|
Changed from clusterfutures:
|
113
115
|
* Rename `id_to_filename` to `id_to_filenames`
|
@@ -176,11 +176,15 @@ class TaskParameters(BaseModel):
|
|
176
176
|
metadata:
|
177
177
|
Dataset metadata, as found in the input dataset or as updated by
|
178
178
|
the previous task.
|
179
|
+
history:
|
180
|
+
Dataset history, as found in the input dataset or as updated by
|
181
|
+
the previous task.
|
179
182
|
"""
|
180
183
|
|
181
184
|
input_paths: list[Path]
|
182
185
|
output_path: Path
|
183
186
|
metadata: dict[str, Any]
|
187
|
+
history: list[dict[str, Any]]
|
184
188
|
|
185
189
|
class Config:
|
186
190
|
arbitrary_types_allowed = True
|
@@ -3,6 +3,7 @@
|
|
3
3
|
#
|
4
4
|
# Original authors:
|
5
5
|
# Tommaso Comparin <tommaso.comparin@exact-lab.it>
|
6
|
+
# Marco Franzon <marco.franzon@exact-lab.it>
|
6
7
|
#
|
7
8
|
# This file is part of Fractal and was originally developed by eXact lab S.r.l.
|
8
9
|
# <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
|
@@ -23,6 +24,7 @@ from ..models import Dataset
|
|
23
24
|
from ..models import Workflow
|
24
25
|
from ..models import WorkflowTask
|
25
26
|
from ..models import WorkflowTaskStatusType
|
27
|
+
from ._common import HISTORY_FILENAME
|
26
28
|
from ._common import METADATA_FILENAME
|
27
29
|
|
28
30
|
|
@@ -59,14 +61,13 @@ def assemble_history_failed_job(
|
|
59
61
|
# information.
|
60
62
|
|
61
63
|
# Part 1: Read exising history from DB
|
62
|
-
new_history = output_dataset.
|
64
|
+
new_history = output_dataset.history
|
63
65
|
|
64
66
|
# Part 2: Extend history based on tmp_metadata_file
|
65
|
-
|
67
|
+
tmp_history_file = Path(job.working_dir) / HISTORY_FILENAME
|
66
68
|
try:
|
67
|
-
with
|
68
|
-
|
69
|
-
tmp_file_history = tmp_file_meta.get("history", [])
|
69
|
+
with tmp_history_file.open("r") as f:
|
70
|
+
tmp_file_history = json.load(f)
|
70
71
|
new_history.extend(tmp_file_history)
|
71
72
|
except FileNotFoundError:
|
72
73
|
tmp_file_history = []
|
@@ -5,6 +5,7 @@ from .applyworkflow import ApplyWorkflowCreate # noqa: F401
|
|
5
5
|
from .applyworkflow import ApplyWorkflowRead # noqa: F401
|
6
6
|
from .dataset import DatasetCreate # noqa: F401
|
7
7
|
from .dataset import DatasetRead # noqa: F401
|
8
|
+
from .dataset import DatasetStatusRead # noqa: F401
|
8
9
|
from .dataset import DatasetUpdate # noqa: F401
|
9
10
|
from .dataset import ResourceCreate # noqa: F401
|
10
11
|
from .dataset import ResourceRead # noqa: F401
|
@@ -32,5 +33,6 @@ from .workflow import WorkflowTaskCreate # noqa: F401
|
|
32
33
|
from .workflow import WorkflowTaskExport # noqa: F401
|
33
34
|
from .workflow import WorkflowTaskImport # noqa: F401
|
34
35
|
from .workflow import WorkflowTaskRead # noqa: F401
|
36
|
+
from .workflow import WorkflowTaskStatusType # noqa: F401
|
35
37
|
from .workflow import WorkflowTaskUpdate # noqa: F401
|
36
38
|
from .workflow import WorkflowUpdate # noqa: F401
|
@@ -107,7 +107,6 @@ class ApplyWorkflowRead(_ApplyWorkflowBase):
|
|
107
107
|
status: str
|
108
108
|
log: Optional[str]
|
109
109
|
workflow_dump: Optional[dict[str, Any]]
|
110
|
-
history: Optional[list[str]]
|
111
110
|
working_dir: Optional[str]
|
112
111
|
working_dir_user: Optional[str]
|
113
112
|
first_task_index: Optional[int]
|
@@ -7,6 +7,8 @@ from pydantic import validator
|
|
7
7
|
|
8
8
|
from ._validators import val_absolute_path
|
9
9
|
from ._validators import valstr
|
10
|
+
from .workflow import WorkflowTaskRead
|
11
|
+
from .workflow import WorkflowTaskStatusType
|
10
12
|
|
11
13
|
|
12
14
|
__all__ = (
|
@@ -16,6 +18,7 @@ __all__ = (
|
|
16
18
|
"ResourceCreate",
|
17
19
|
"ResourceRead",
|
18
20
|
"ResourceUpdate",
|
21
|
+
"DatasetStatusRead",
|
19
22
|
)
|
20
23
|
|
21
24
|
|
@@ -61,6 +64,22 @@ class ResourceRead(_ResourceBase):
|
|
61
64
|
dataset_id: int
|
62
65
|
|
63
66
|
|
67
|
+
class _DatasetHistoryItem(BaseModel):
|
68
|
+
"""
|
69
|
+
Class for an item of `Dataset.history`.
|
70
|
+
|
71
|
+
Attributes:
|
72
|
+
workflowtask:
|
73
|
+
status:
|
74
|
+
parallelization: If provided, it includes keys `parallelization_level`
|
75
|
+
and `component_list`.
|
76
|
+
"""
|
77
|
+
|
78
|
+
workflowtask: WorkflowTaskRead
|
79
|
+
status: WorkflowTaskStatusType
|
80
|
+
parallelization: Optional[dict]
|
81
|
+
|
82
|
+
|
64
83
|
class _DatasetBase(BaseModel):
|
65
84
|
"""
|
66
85
|
Base class for `Dataset`.
|
@@ -69,12 +88,14 @@ class _DatasetBase(BaseModel):
|
|
69
88
|
name:
|
70
89
|
type:
|
71
90
|
meta:
|
91
|
+
history:
|
72
92
|
read_only:
|
73
93
|
"""
|
74
94
|
|
75
95
|
name: str
|
76
96
|
type: Optional[str]
|
77
97
|
meta: dict[str, Any] = Field(default={})
|
98
|
+
history: list[_DatasetHistoryItem] = Field(default=[])
|
78
99
|
read_only: bool = False
|
79
100
|
|
80
101
|
|
@@ -85,11 +106,13 @@ class DatasetUpdate(_DatasetBase):
|
|
85
106
|
Attributes:
|
86
107
|
name:
|
87
108
|
meta:
|
109
|
+
history:
|
88
110
|
read_only:
|
89
111
|
"""
|
90
112
|
|
91
113
|
name: Optional[str]
|
92
114
|
meta: Optional[dict[str, Any]] = None
|
115
|
+
history: Optional[list[_DatasetHistoryItem]] = None
|
93
116
|
read_only: Optional[bool]
|
94
117
|
|
95
118
|
# Validators
|
@@ -122,3 +145,17 @@ class DatasetRead(_DatasetBase):
|
|
122
145
|
resource_list: list[ResourceRead]
|
123
146
|
project_id: int
|
124
147
|
read_only: bool
|
148
|
+
|
149
|
+
|
150
|
+
class DatasetStatusRead(BaseModel):
|
151
|
+
"""
|
152
|
+
Response type for the
|
153
|
+
`/project/{project_id}/dataset/{dataset_id}/status/` endpoint
|
154
|
+
"""
|
155
|
+
|
156
|
+
status: Optional[
|
157
|
+
dict[
|
158
|
+
int,
|
159
|
+
WorkflowTaskStatusType,
|
160
|
+
]
|
161
|
+
] = None
|
@@ -30,18 +30,10 @@ class _ProjectBase(BaseModel):
|
|
30
30
|
class ProjectCreate(_ProjectBase):
|
31
31
|
"""
|
32
32
|
Class for `Project` creation.
|
33
|
-
|
34
|
-
Attributes:
|
35
|
-
default_dataset_name:
|
36
33
|
"""
|
37
34
|
|
38
|
-
default_dataset_name: Optional[str] = "default"
|
39
|
-
|
40
35
|
# Validators
|
41
36
|
_name = validator("name", allow_reuse=True)(valstr("name"))
|
42
|
-
_default_dataset_name = validator(
|
43
|
-
"default_dataset_name", allow_reuse=True
|
44
|
-
)(valstr("default_dataset_name"))
|
45
37
|
|
46
38
|
|
47
39
|
class ProjectRead(_ProjectBase):
|
@@ -1,3 +1,4 @@
|
|
1
|
+
from enum import Enum
|
1
2
|
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
@@ -22,6 +23,7 @@ __all__ = (
|
|
22
23
|
"WorkflowTaskExport",
|
23
24
|
"WorkflowTaskRead",
|
24
25
|
"WorkflowTaskUpdate",
|
26
|
+
"WorkflowTaskStatusType",
|
25
27
|
)
|
26
28
|
|
27
29
|
|
@@ -185,3 +187,22 @@ class WorkflowExport(_WorkflowBase):
|
|
185
187
|
"""
|
186
188
|
|
187
189
|
task_list: list[WorkflowTaskExport]
|
190
|
+
|
191
|
+
|
192
|
+
class WorkflowTaskStatusType(str, Enum):
|
193
|
+
"""
|
194
|
+
Define the available values for the status of a `WorkflowTask`.
|
195
|
+
|
196
|
+
This model is used within the `Dataset.history` attribute, which is
|
197
|
+
constructed in the runner and then used in the API (e.g. in the
|
198
|
+
`api/v1/project/{project_id}/dataset/{dataset_id}/status` endpoint).
|
199
|
+
|
200
|
+
Attributes:
|
201
|
+
SUBMITTED: The `WorkflowTask` is part of a running job.
|
202
|
+
DONE: The most-recent execution of this `WorkflowTask` was successful.
|
203
|
+
FAILED: The most-recent execution of this `WorkflowTask` failed.
|
204
|
+
"""
|
205
|
+
|
206
|
+
SUBMITTED = "submitted"
|
207
|
+
DONE = "done"
|
208
|
+
FAILED = "failed"
|
fractal_server/config.py
CHANGED
@@ -419,4 +419,9 @@ class Settings(BaseSettings):
|
|
419
419
|
|
420
420
|
|
421
421
|
def get_settings(settings=Settings()) -> Settings:
|
422
|
+
logging.debug("Fractal Settings:")
|
423
|
+
for key, value in settings.dict().items():
|
424
|
+
if any(s in key.upper() for s in ["PASSWORD", "SECRET"]):
|
425
|
+
value = "*****"
|
426
|
+
logging.debug(f"{key}: {value}")
|
422
427
|
return settings
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"""Add Dataset.history
|
2
|
+
|
3
|
+
Revision ID: 99ea79d9e5d2
|
4
|
+
Revises: 8f79bd162e35
|
5
|
+
Create Date: 2023-10-16 09:45:15.132185
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "99ea79d9e5d2"
|
14
|
+
down_revision = "8f79bd162e35"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
22
|
+
batch_op.add_column(
|
23
|
+
sa.Column(
|
24
|
+
"history", sa.JSON(), server_default="[]", nullable=False
|
25
|
+
)
|
26
|
+
)
|
27
|
+
|
28
|
+
# ### end Alembic commands ###
|
29
|
+
|
30
|
+
|
31
|
+
def downgrade() -> None:
|
32
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
33
|
+
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
34
|
+
batch_op.drop_column("history")
|
35
|
+
|
36
|
+
# ### end Alembic commands ###
|
@@ -83,7 +83,7 @@ def get_absolute_venv_path(venv_path: Path) -> Path:
|
|
83
83
|
package_path = venv_path
|
84
84
|
else:
|
85
85
|
settings = Inject(get_settings)
|
86
|
-
package_path = settings.FRACTAL_TASKS_DIR / venv_path
|
86
|
+
package_path = settings.FRACTAL_TASKS_DIR / venv_path
|
87
87
|
return package_path
|
88
88
|
|
89
89
|
|
@@ -214,7 +214,7 @@ def create_package_dir_pip(
|
|
214
214
|
"with `version=None`."
|
215
215
|
)
|
216
216
|
package_dir = f"{task_pkg.package}{task_pkg.package_version}"
|
217
|
-
venv_path = settings.FRACTAL_TASKS_DIR / user / package_dir
|
217
|
+
venv_path = settings.FRACTAL_TASKS_DIR / user / package_dir
|
218
218
|
if create:
|
219
219
|
venv_path.mkdir(exist_ok=False, parents=True)
|
220
220
|
return venv_path
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fractal-server
|
3
|
-
Version: 1.3.
|
3
|
+
Version: 1.3.12
|
4
4
|
Summary: Server component of the Fractal analytics platform
|
5
5
|
Home-page: https://github.com/fractal-analytics-platform/fractal-server
|
6
6
|
License: BSD-3-Clause
|
@@ -31,6 +31,7 @@ Requires-Dist: python-dotenv (>=0.20.0,<0.21.0)
|
|
31
31
|
Requires-Dist: sqlalchemy (>=1.4,<2.0)
|
32
32
|
Requires-Dist: sqlmodel (>=0.0.8,<0.0.9)
|
33
33
|
Requires-Dist: uvicorn (>=0.20.0,<0.21.0)
|
34
|
+
Project-URL: Changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
|
34
35
|
Project-URL: Documentation, https://fractal-analytics-platform.github.io/fractal-server
|
35
36
|
Project-URL: Repository, https://github.com/fractal-analytics-platform/fractal-server
|
36
37
|
Description-Content-Type: text/markdown
|
@@ -1,76 +1,77 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=qaJUIaR-bHaNPEu4hp4VMdOkjJr1Mo6mAbuEBNVfiF8,23
|
2
2
|
fractal_server/__main__.py,sha256=znijcImbcEC4P26ICOhEJ9VY3_5vWdMwQcl-WP25sYA,2202
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
fractal_server/app/api/__init__.py,sha256=_g30kAzDmakaxQQYwSZwVbid1O-3zMzQqnSuQZOWI3U,1192
|
6
6
|
fractal_server/app/api/v1/__init__.py,sha256=2HMymr1YkUk39V8iof7KENyLnre4ghouOSvNZ_kF1ec,24
|
7
7
|
fractal_server/app/api/v1/_aux_functions.py,sha256=_-D-iTKvOfH1yufa4lIHvP88Sgic8cvKXq3GmDHC-lk,9659
|
8
|
-
fractal_server/app/api/v1/dataset.py,sha256=
|
9
|
-
fractal_server/app/api/v1/job.py,sha256=
|
10
|
-
fractal_server/app/api/v1/project.py,sha256=
|
8
|
+
fractal_server/app/api/v1/dataset.py,sha256=lJTuYoQpOFWyw_f5WhJK88JRKcUWzFiFmPvyUfrv2PM,14500
|
9
|
+
fractal_server/app/api/v1/job.py,sha256=k1TxhmU_VMiHb6RxJUsiVaLWcH5GvtwsB44DsnmxNmc,4550
|
10
|
+
fractal_server/app/api/v1/project.py,sha256=LlNkkKpJ7WtEcMOQ1KzGwxYqxScUex-wTlu1x1KL4rs,10236
|
11
11
|
fractal_server/app/api/v1/task.py,sha256=0MJNhn5f8KZy4XBMUoJNhrk3E6GBQWcVfKoQzP5XXWw,5582
|
12
12
|
fractal_server/app/api/v1/task_collection.py,sha256=mY1cSGepWvVz6IJCnFYA8iy4hU-8qsA1HbiQXZjg1OM,11697
|
13
13
|
fractal_server/app/api/v1/workflow.py,sha256=A54m9PDD7MhT6s5-8kIrefEJ5uVxigxBohelT7XCLVo,9379
|
14
14
|
fractal_server/app/api/v1/workflowtask.py,sha256=TIsCSBFImoRq0rz16ZVlFwTL-Qd9Uqywbq-DT4OxYh0,5421
|
15
15
|
fractal_server/app/db/__init__.py,sha256=g2KS-KxWL6CheZqa8nsUI3_Hw5mGVz_geUWCNY102XM,3109
|
16
16
|
fractal_server/app/models/__init__.py,sha256=RuxWH8fsmkTWsjLhYjrxSt-mvk74coCilAQlX2Q6OO0,353
|
17
|
-
fractal_server/app/models/dataset.py,sha256=
|
17
|
+
fractal_server/app/models/dataset.py,sha256=TK2-tnEMfC3lAwSJMMJre-e2Ao6AbEqnhWTcVvcrJxo,1482
|
18
18
|
fractal_server/app/models/job.py,sha256=eU1RIWg4C2n9BzjGdvOxqP-eueNNYdo6tpW2RBqZYrE,3753
|
19
19
|
fractal_server/app/models/linkuserproject.py,sha256=RVtl25Q_N99uoVDE7wx0IN0SgFjc7Id5XbScsgrjv_E,309
|
20
20
|
fractal_server/app/models/project.py,sha256=U3MvLVBestug5dCCw22VA-l8BeUnGoaNye5OPexsqoA,1191
|
21
21
|
fractal_server/app/models/security.py,sha256=Dp54Hf7I72oo9PERdyR0_zStw2ppYlFVi5MhFWIE6Lw,2438
|
22
22
|
fractal_server/app/models/state.py,sha256=0L4lcaaMyQE43Tk1DVYbvPFcsQ9OZDSzsEwJO-vLrKA,1111
|
23
23
|
fractal_server/app/models/task.py,sha256=APndtea9A7EF7TtpVK8kWapBM01a6nk3FFCrQbbioI8,2632
|
24
|
-
fractal_server/app/models/workflow.py,sha256=
|
24
|
+
fractal_server/app/models/workflow.py,sha256=VlX-MNfqw3z-EVKMvwVR9HbnOFNHRnO-5sDYFQOSijQ,5191
|
25
25
|
fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
|
26
|
-
fractal_server/app/runner/__init__.py,sha256=
|
27
|
-
fractal_server/app/runner/_common.py,sha256=
|
28
|
-
fractal_server/app/runner/_local/__init__.py,sha256=
|
26
|
+
fractal_server/app/runner/__init__.py,sha256=tdRkLDLZmfiMZbDpeDZiMqewIpfP58--IlERMVp33bI,13039
|
27
|
+
fractal_server/app/runner/_common.py,sha256=XjyE8DZE6WECeFXI6i0vHVD6JywZQxkJgZrL-ep1USQ,22642
|
28
|
+
fractal_server/app/runner/_local/__init__.py,sha256=mSJzpF6u6rgsSYO25szNVr2B296h7_iKD1eqS3o87Qo,6532
|
29
29
|
fractal_server/app/runner/_local/_local_config.py,sha256=-oNTsjEUmytHlsYpWfw2CrPvSxDFeEhZSdQvI_wf3Mk,3245
|
30
30
|
fractal_server/app/runner/_local/_submit_setup.py,sha256=cP4gjQ_3TFgqglscQacp9dB3aqeXup5tVqqWE7TZl9Q,1631
|
31
31
|
fractal_server/app/runner/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
|
32
32
|
fractal_server/app/runner/_slurm/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
|
33
|
-
fractal_server/app/runner/_slurm/__init__.py,sha256=
|
33
|
+
fractal_server/app/runner/_slurm/__init__.py,sha256=SjCzSjlf8_aVZ6jw1vUZZVV5GKp92_G7Ui_1RyL6dV0,4694
|
34
34
|
fractal_server/app/runner/_slurm/_batching.py,sha256=KE4NrLXRHFZQSLW2vbUyu0X7TE7bTd2WCRrbYhXRTow,8840
|
35
|
-
fractal_server/app/runner/_slurm/_executor_wait_thread.py,sha256=
|
35
|
+
fractal_server/app/runner/_slurm/_executor_wait_thread.py,sha256=ZGwquq2UHCr84f-b5gH14cmRFgJHB7pYwQSeRkIzxcA,4402
|
36
36
|
fractal_server/app/runner/_slurm/_slurm_config.py,sha256=pPyduXQSOzPEy_fJxrE8NE9hzXDLZ3NmWAIhv2OMCH8,20562
|
37
37
|
fractal_server/app/runner/_slurm/_submit_setup.py,sha256=JIPmZEqyLRByQ3SgqiyocQlsHjfm0wKCk7W-KRBGu_0,2930
|
38
38
|
fractal_server/app/runner/_slurm/_subprocess_run_as_user.py,sha256=so0S9TRxHlVZoOTGbeZtklIErJatGbRRMXcD8F4jcv4,3862
|
39
39
|
fractal_server/app/runner/_slurm/executor.py,sha256=ao5YuWtjsIfTYUucE1SvNS8a99Sgzm01RLLu1wLjP0Y,41983
|
40
40
|
fractal_server/app/runner/_slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
|
41
|
-
fractal_server/app/runner/common.py,sha256=
|
42
|
-
fractal_server/app/runner/handle_failed_job.py,sha256=
|
43
|
-
fractal_server/app/schemas/__init__.py,sha256=
|
41
|
+
fractal_server/app/runner/common.py,sha256=nz0ZuIro0iwZm-OV-e-Y-PrtgKcLK0d7BrzebWyEWEk,9496
|
42
|
+
fractal_server/app/runner/handle_failed_job.py,sha256=PKgJARHjXyv33sDsl7oTINdcTu7EwmFmIkp38RqAE3Q,4641
|
43
|
+
fractal_server/app/schemas/__init__.py,sha256=Roc1gkp3jsafkgcsY8u5S6nshj9ER9fuAkthplrX4kI,1804
|
44
44
|
fractal_server/app/schemas/_validators.py,sha256=7YEbgrnGRpzkLMfZzQNfczEmcNnO__SmVOaBHhzaiXE,1819
|
45
|
-
fractal_server/app/schemas/applyworkflow.py,sha256=
|
46
|
-
fractal_server/app/schemas/dataset.py,sha256=
|
45
|
+
fractal_server/app/schemas/applyworkflow.py,sha256=gGlVSfYZlVdWRhIhQ_0tzBKfuW1y1-ZeIS5UvjpXZTM,2954
|
46
|
+
fractal_server/app/schemas/dataset.py,sha256=PPqGTsRQ5JEwkiM4NcjPYFckxnCdi_Zov-bWXDm1LUk,3092
|
47
47
|
fractal_server/app/schemas/manifest.py,sha256=xxTd39dAXMK9Ox1y-p3gbyg0zd5udW99pV4JngCUGwM,3819
|
48
|
-
fractal_server/app/schemas/project.py,sha256=
|
48
|
+
fractal_server/app/schemas/project.py,sha256=GoV1yUPVSJ7eFGXIBrYh_4FOVKYcBRzPbW7ImnBAg-4,1047
|
49
49
|
fractal_server/app/schemas/state.py,sha256=CS8Rs5qF21TsnqmyzUHLqRaX1b61Oc6Yra6POYpYSQY,762
|
50
50
|
fractal_server/app/schemas/task.py,sha256=2TBE5Ne9tO_-a2-Es0PRXMT8ZddSInTOPMor7u8-gx0,3671
|
51
51
|
fractal_server/app/schemas/task_collection.py,sha256=mPk6E1LK2UvnHkhIQWHmTztsVT99iHZn-UZy7mGNjUk,2965
|
52
52
|
fractal_server/app/schemas/user.py,sha256=zhB-2WfJ30hNcHaW2V126v5i7rHl66fX_SRmIWCQrjM,1587
|
53
|
-
fractal_server/app/schemas/workflow.py,sha256=
|
53
|
+
fractal_server/app/schemas/workflow.py,sha256=oFoO62JH5hfMJjKoicdpyC5hd2O9XgqoAm5RN9YjXAI,4238
|
54
54
|
fractal_server/app/security/__init__.py,sha256=DCUIaIgzatnmtABAO4bR9jISVSoGowHlIQIHuV5oLUU,10880
|
55
|
-
fractal_server/config.py,sha256=
|
55
|
+
fractal_server/config.py,sha256=UF9K_IfJU7-VPjCJbMDcdV11_DsmMxqq4ezTNBQASq8,14126
|
56
56
|
fractal_server/logger.py,sha256=keri8i960WHT8Zz9Rm2MwfnrA2dw9TsrfCmojqtGDLs,4562
|
57
57
|
fractal_server/main.py,sha256=9_T_cMqf0EfbfYwkYhKeU36v9PFi95BoydapKpmaTKc,5932
|
58
58
|
fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
|
59
59
|
fractal_server/migrations/env.py,sha256=05EoWw0p43ojTNiz7UVG4lsl057B4ImSgXiHmiU-M80,2690
|
60
60
|
fractal_server/migrations/script.py.mako,sha256=27QIow4iwANuPt6USRnqKXe8NX3POsWO0qT007P1HFU,550
|
61
|
-
fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256
|
61
|
+
fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
|
62
62
|
fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=zwXegXs9J40eyCWi3w0c_iIBVJjXNn4VdVnQaT3KxDg,8770
|
63
63
|
fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=Q-DsMzG3IcUV2Ol1dhJWosDvKERamBE6QvA2zzS5zpQ,1632
|
64
|
-
fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py,sha256=
|
65
|
-
fractal_server/migrations/versions/
|
66
|
-
fractal_server/migrations/versions/
|
64
|
+
fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py,sha256=6pgODDtyAxevZvAJBj9IJ41inhV1RpwbpZr_qfPPu1A,1115
|
65
|
+
fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0im6TxDr53sKKcjiPgeH4ftVRGnRXZSh2lPbRQ1Ir9w,883
|
66
|
+
fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py,sha256=ekDUML7ILpmdoqEclKbEUdyLi4uw9HSG_sTjG2hp_JE,867
|
67
|
+
fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
|
67
68
|
fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
68
69
|
fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
|
69
70
|
fractal_server/tasks/__init__.py,sha256=Wzuxf5EoH1v0fYzRpAZHG_S-Z9f6DmbIsuSvllBCGvc,72
|
70
|
-
fractal_server/tasks/collection.py,sha256=
|
71
|
+
fractal_server/tasks/collection.py,sha256=POKvQyS5G5ySybH0r0v21I_ZQ5AREe9kAqr_uFfGyaU,17627
|
71
72
|
fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
|
72
|
-
fractal_server-1.3.
|
73
|
-
fractal_server-1.3.
|
74
|
-
fractal_server-1.3.
|
75
|
-
fractal_server-1.3.
|
76
|
-
fractal_server-1.3.
|
73
|
+
fractal_server-1.3.12.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
74
|
+
fractal_server-1.3.12.dist-info/METADATA,sha256=Ak6e_L9Tth8H31dMN-PU3lbagRjYwMcD0YImfn4LhmI,3836
|
75
|
+
fractal_server-1.3.12.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
76
|
+
fractal_server-1.3.12.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
77
|
+
fractal_server-1.3.12.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|