fractal-server 1.4.0a0__py3-none-any.whl → 1.4.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/api/__init__.py +1 -0
- fractal_server/app/api/v1/dataset.py +1 -1
- fractal_server/app/api/v1/job.py +43 -3
- fractal_server/app/api/v1/monitoring.py +150 -0
- fractal_server/app/api/v1/project.py +2 -2
- fractal_server/app/models/job.py +2 -27
- fractal_server/app/runner/__init__.py +1 -1
- fractal_server/app/schemas/__init__.py +1 -0
- fractal_server/app/schemas/applyworkflow.py +26 -0
- fractal_server/main.py +5 -0
- {fractal_server-1.4.0a0.dist-info → fractal_server-1.4.0a1.dist-info}/METADATA +1 -1
- {fractal_server-1.4.0a0.dist-info → fractal_server-1.4.0a1.dist-info}/RECORD +16 -15
- {fractal_server-1.4.0a0.dist-info → fractal_server-1.4.0a1.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.0a0.dist-info → fractal_server-1.4.0a1.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.0a0.dist-info → fractal_server-1.4.0a1.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "1.4.
|
1
|
+
__VERSION__ = "1.4.0a1"
|
@@ -7,6 +7,7 @@ from ...config import get_settings
|
|
7
7
|
from ...syringe import Inject
|
8
8
|
from .v1.dataset import router as dataset_router
|
9
9
|
from .v1.job import router as job_router
|
10
|
+
from .v1.monitoring import router as router_monitoring # noqa
|
10
11
|
from .v1.project import router as project_router
|
11
12
|
from .v1.task import router as task_router
|
12
13
|
from .v1.task_collection import router as taskcollection_router
|
@@ -13,13 +13,13 @@ from ...db import AsyncSession
|
|
13
13
|
from ...db import get_db
|
14
14
|
from ...models import ApplyWorkflow
|
15
15
|
from ...models import Dataset
|
16
|
-
from ...models import JobStatusType
|
17
16
|
from ...models import Resource
|
18
17
|
from ...runner._common import HISTORY_FILENAME
|
19
18
|
from ...schemas import DatasetCreate
|
20
19
|
from ...schemas import DatasetRead
|
21
20
|
from ...schemas import DatasetStatusRead
|
22
21
|
from ...schemas import DatasetUpdate
|
22
|
+
from ...schemas import JobStatusType
|
23
23
|
from ...schemas import ResourceCreate
|
24
24
|
from ...schemas import ResourceRead
|
25
25
|
from ...schemas import ResourceUpdate
|
fractal_server/app/api/v1/job.py
CHANGED
@@ -7,6 +7,7 @@ from zipfile import ZipFile
|
|
7
7
|
from fastapi import APIRouter
|
8
8
|
from fastapi import Depends
|
9
9
|
from fastapi import HTTPException
|
10
|
+
from fastapi import Response
|
10
11
|
from fastapi import status
|
11
12
|
from fastapi.responses import StreamingResponse
|
12
13
|
from sqlmodel import select
|
@@ -22,11 +23,50 @@ from ...security import current_active_user
|
|
22
23
|
from ...security import User
|
23
24
|
from ._aux_functions import _get_job_check_owner
|
24
25
|
from ._aux_functions import _get_project_check_owner
|
26
|
+
from ._aux_functions import _get_workflow_check_owner
|
25
27
|
|
26
28
|
|
27
29
|
router = APIRouter()
|
28
30
|
|
29
31
|
|
32
|
+
@router.get("/project/job/", response_model=list[ApplyWorkflowRead])
|
33
|
+
async def get_user_jobs(
|
34
|
+
user: User = Depends(current_active_user),
|
35
|
+
) -> list[ApplyWorkflowRead]:
|
36
|
+
"""
|
37
|
+
Returns all the jobs of the current user
|
38
|
+
"""
|
39
|
+
|
40
|
+
job_list = [
|
41
|
+
job for project in user.project_list for job in project.job_list
|
42
|
+
]
|
43
|
+
|
44
|
+
return job_list
|
45
|
+
|
46
|
+
|
47
|
+
@router.get(
|
48
|
+
"/project/{project_id}/workflow/{workflow_id}/job/",
|
49
|
+
response_model=list[ApplyWorkflowRead],
|
50
|
+
)
|
51
|
+
async def get_workflow_jobs(
|
52
|
+
project_id: int,
|
53
|
+
workflow_id: int,
|
54
|
+
user: User = Depends(current_active_user),
|
55
|
+
db: AsyncSession = Depends(get_db),
|
56
|
+
) -> Optional[list[ApplyWorkflowRead]]:
|
57
|
+
"""
|
58
|
+
Returns all the jobs related to a specific workflow
|
59
|
+
"""
|
60
|
+
|
61
|
+
workflow = await _get_workflow_check_owner(
|
62
|
+
project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
|
63
|
+
)
|
64
|
+
job_list = workflow.job_list
|
65
|
+
await db.close()
|
66
|
+
|
67
|
+
return job_list
|
68
|
+
|
69
|
+
|
30
70
|
@router.get(
|
31
71
|
"/project/{project_id}/job/{job_id}",
|
32
72
|
response_model=ApplyWorkflowRead,
|
@@ -119,14 +159,14 @@ async def get_job_list(
|
|
119
159
|
|
120
160
|
@router.get(
|
121
161
|
"/project/{project_id}/job/{job_id}/stop/",
|
122
|
-
status_code=
|
162
|
+
status_code=204,
|
123
163
|
)
|
124
164
|
async def stop_job(
|
125
165
|
project_id: int,
|
126
166
|
job_id: int,
|
127
167
|
user: User = Depends(current_active_user),
|
128
168
|
db: AsyncSession = Depends(get_db),
|
129
|
-
) ->
|
169
|
+
) -> Response:
|
130
170
|
"""
|
131
171
|
Stop execution of a workflow job (only available for slurm backend)
|
132
172
|
"""
|
@@ -161,4 +201,4 @@ async def stop_job(
|
|
161
201
|
with shutdown_file.open("w") as f:
|
162
202
|
f.write(f"Trigger executor shutdown for {job.id=}, {project_id=}.")
|
163
203
|
|
164
|
-
return
|
204
|
+
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
@@ -0,0 +1,150 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from fastapi import APIRouter
|
5
|
+
from fastapi import Depends
|
6
|
+
from sqlalchemy import func
|
7
|
+
from sqlmodel import select
|
8
|
+
|
9
|
+
from ...db import AsyncSession
|
10
|
+
from ...db import get_db
|
11
|
+
from ...models import ApplyWorkflow
|
12
|
+
from ...models import Dataset
|
13
|
+
from ...models import JobStatusType
|
14
|
+
from ...models import Project
|
15
|
+
from ...models import Workflow
|
16
|
+
from ...schemas import ApplyWorkflowRead
|
17
|
+
from ...schemas import DatasetRead
|
18
|
+
from ...schemas import ProjectRead
|
19
|
+
from ...schemas import WorkflowRead
|
20
|
+
from ...security import current_active_superuser
|
21
|
+
from ...security import User
|
22
|
+
|
23
|
+
|
24
|
+
router = APIRouter()
|
25
|
+
|
26
|
+
|
27
|
+
@router.get("/project/", response_model=list[ProjectRead])
|
28
|
+
async def monitor_project(
|
29
|
+
id: Optional[int] = None,
|
30
|
+
user_id: Optional[int] = None,
|
31
|
+
user: User = Depends(current_active_superuser),
|
32
|
+
db: AsyncSession = Depends(get_db),
|
33
|
+
) -> list[ProjectRead]:
|
34
|
+
|
35
|
+
stm = select(Project)
|
36
|
+
|
37
|
+
if id is not None:
|
38
|
+
stm = stm.where(Project.id == id)
|
39
|
+
|
40
|
+
if user_id is not None:
|
41
|
+
stm = stm.where(Project.user_list.any(User.id == user_id))
|
42
|
+
|
43
|
+
res = await db.execute(stm)
|
44
|
+
project_list = res.scalars().all()
|
45
|
+
await db.close()
|
46
|
+
|
47
|
+
return project_list
|
48
|
+
|
49
|
+
|
50
|
+
@router.get("/workflow/", response_model=list[WorkflowRead])
|
51
|
+
async def monitor_workflow(
|
52
|
+
id: Optional[int] = None,
|
53
|
+
project_id: Optional[int] = None,
|
54
|
+
name_contains: Optional[str] = None,
|
55
|
+
user: User = Depends(current_active_superuser),
|
56
|
+
db: AsyncSession = Depends(get_db),
|
57
|
+
) -> list[WorkflowRead]:
|
58
|
+
stm = select(Workflow)
|
59
|
+
|
60
|
+
if id is not None:
|
61
|
+
stm = stm.where(Workflow.id == id)
|
62
|
+
if project_id is not None:
|
63
|
+
stm = stm.where(Workflow.project_id == project_id)
|
64
|
+
if name_contains is not None:
|
65
|
+
# SQLAlchemy2: use icontains
|
66
|
+
stm = stm.where(
|
67
|
+
func.lower(Workflow.name).contains(name_contains.lower())
|
68
|
+
)
|
69
|
+
|
70
|
+
res = await db.execute(stm)
|
71
|
+
workflow_list = res.scalars().all()
|
72
|
+
await db.close()
|
73
|
+
|
74
|
+
return workflow_list
|
75
|
+
|
76
|
+
|
77
|
+
@router.get("/dataset/", response_model=list[DatasetRead])
|
78
|
+
async def monitor_dataset(
|
79
|
+
id: Optional[int] = None,
|
80
|
+
project_id: Optional[int] = None,
|
81
|
+
name_contains: Optional[str] = None,
|
82
|
+
type: Optional[str] = None,
|
83
|
+
user: User = Depends(current_active_superuser),
|
84
|
+
db: AsyncSession = Depends(get_db),
|
85
|
+
) -> list[DatasetRead]:
|
86
|
+
stm = select(Dataset)
|
87
|
+
|
88
|
+
if id is not None:
|
89
|
+
stm = stm.where(Dataset.id == id)
|
90
|
+
if project_id is not None:
|
91
|
+
stm = stm.where(Dataset.project_id == project_id)
|
92
|
+
if name_contains is not None:
|
93
|
+
# SQLAlchemy2: use icontains
|
94
|
+
stm = stm.where(
|
95
|
+
func.lower(Dataset.name).contains(name_contains.lower())
|
96
|
+
)
|
97
|
+
if type is not None:
|
98
|
+
stm = stm.where(Dataset.type == type)
|
99
|
+
|
100
|
+
res = await db.execute(stm)
|
101
|
+
dataset_list = res.scalars().all()
|
102
|
+
await db.close()
|
103
|
+
|
104
|
+
return dataset_list
|
105
|
+
|
106
|
+
|
107
|
+
@router.get("/job/", response_model=list[ApplyWorkflowRead])
|
108
|
+
async def monitor_job(
|
109
|
+
id: Optional[int] = None,
|
110
|
+
project_id: Optional[int] = None,
|
111
|
+
input_dataset_id: Optional[int] = None,
|
112
|
+
output_dataset_id: Optional[int] = None,
|
113
|
+
workflow_id: Optional[int] = None,
|
114
|
+
status: Optional[JobStatusType] = None,
|
115
|
+
start_timestamp_min: Optional[datetime] = None,
|
116
|
+
start_timestamp_max: Optional[datetime] = None,
|
117
|
+
end_timestamp_min: Optional[datetime] = None,
|
118
|
+
end_timestamp_max: Optional[datetime] = None,
|
119
|
+
user: User = Depends(current_active_superuser),
|
120
|
+
db: AsyncSession = Depends(get_db),
|
121
|
+
) -> list[ApplyWorkflowRead]:
|
122
|
+
|
123
|
+
stm = select(ApplyWorkflow)
|
124
|
+
|
125
|
+
if id is not None:
|
126
|
+
stm = stm.where(ApplyWorkflow.id == id)
|
127
|
+
if project_id is not None:
|
128
|
+
stm = stm.where(ApplyWorkflow.project_id == project_id)
|
129
|
+
if input_dataset_id is not None:
|
130
|
+
stm = stm.where(ApplyWorkflow.input_dataset_id == input_dataset_id)
|
131
|
+
if output_dataset_id is not None:
|
132
|
+
stm = stm.where(ApplyWorkflow.output_dataset_id == output_dataset_id)
|
133
|
+
if workflow_id is not None:
|
134
|
+
stm = stm.where(ApplyWorkflow.workflow_id == workflow_id)
|
135
|
+
if status is not None:
|
136
|
+
stm = stm.where(ApplyWorkflow.status == status)
|
137
|
+
if start_timestamp_min is not None:
|
138
|
+
stm = stm.where(ApplyWorkflow.start_timestamp >= start_timestamp_min)
|
139
|
+
if start_timestamp_max is not None:
|
140
|
+
stm = stm.where(ApplyWorkflow.start_timestamp <= start_timestamp_max)
|
141
|
+
if end_timestamp_min is not None:
|
142
|
+
stm = stm.where(ApplyWorkflow.end_timestamp >= end_timestamp_min)
|
143
|
+
if end_timestamp_max is not None:
|
144
|
+
stm = stm.where(ApplyWorkflow.end_timestamp <= end_timestamp_max)
|
145
|
+
|
146
|
+
res = await db.execute(stm)
|
147
|
+
job_list = res.scalars().all()
|
148
|
+
await db.close()
|
149
|
+
|
150
|
+
return job_list
|
@@ -18,7 +18,6 @@ from ...db import DBSyncSession
|
|
18
18
|
from ...db import get_db
|
19
19
|
from ...db import get_sync_db
|
20
20
|
from ...models import ApplyWorkflow
|
21
|
-
from ...models import JobStatusType
|
22
21
|
from ...models import LinkUserProject
|
23
22
|
from ...models import Project
|
24
23
|
from ...runner import submit_workflow
|
@@ -26,6 +25,7 @@ from ...runner import validate_workflow_compatibility
|
|
26
25
|
from ...runner.common import set_start_and_last_task_index
|
27
26
|
from ...schemas import ApplyWorkflowCreate
|
28
27
|
from ...schemas import ApplyWorkflowRead
|
28
|
+
from ...schemas import JobStatusType
|
29
29
|
from ...schemas import ProjectCreate
|
30
30
|
from ...schemas import ProjectRead
|
31
31
|
from ...schemas import ProjectUpdate
|
@@ -308,7 +308,7 @@ async def apply_workflow(
|
|
308
308
|
workflow_dump=dict(
|
309
309
|
workflow.dict(exclude={"task_list"}),
|
310
310
|
task_list=[
|
311
|
-
dict(wf_task.
|
311
|
+
dict(wf_task.dict(exclude={"task"}), task=wf_task.task.dict())
|
312
312
|
for wf_task in workflow.task_list
|
313
313
|
],
|
314
314
|
),
|
fractal_server/app/models/job.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1
1
|
from datetime import datetime
|
2
|
-
from enum import Enum
|
3
2
|
from typing import Any
|
4
3
|
from typing import Optional
|
5
4
|
|
@@ -10,34 +9,10 @@ from sqlmodel import Field
|
|
10
9
|
from sqlmodel import SQLModel
|
11
10
|
|
12
11
|
from ...utils import get_timestamp
|
12
|
+
from ..schemas import JobStatusType
|
13
13
|
from ..schemas.applyworkflow import _ApplyWorkflowBase
|
14
14
|
|
15
15
|
|
16
|
-
class JobStatusType(str, Enum):
|
17
|
-
"""
|
18
|
-
Define the job status available
|
19
|
-
|
20
|
-
Attributes:
|
21
|
-
SUBMITTED:
|
22
|
-
The workflow has been applied but not yet scheduled with an
|
23
|
-
executor. In this phase, due diligence takes place, such as
|
24
|
-
creating working directory, assemblying arguments, etc.
|
25
|
-
RUNNING:
|
26
|
-
The workflow was scheduled with an executor. Note that it might not
|
27
|
-
yet be running within the executor, e.g., jobs could still be
|
28
|
-
pending within a SLURM executor.
|
29
|
-
DONE:
|
30
|
-
The workflow was applied successfully
|
31
|
-
FAILED:
|
32
|
-
The workflow terminated with an error.
|
33
|
-
"""
|
34
|
-
|
35
|
-
SUBMITTED = "submitted"
|
36
|
-
RUNNING = "running"
|
37
|
-
DONE = "done"
|
38
|
-
FAILED = "failed"
|
39
|
-
|
40
|
-
|
41
16
|
class ApplyWorkflow(_ApplyWorkflowBase, SQLModel, table=True):
|
42
17
|
"""
|
43
18
|
Represent a workflow run
|
@@ -115,5 +90,5 @@ class ApplyWorkflow(_ApplyWorkflowBase, SQLModel, table=True):
|
|
115
90
|
end_timestamp: Optional[datetime] = Field(
|
116
91
|
default=None, sa_column=Column(DateTime(timezone=True))
|
117
92
|
)
|
118
|
-
status:
|
93
|
+
status: str = JobStatusType.SUBMITTED
|
119
94
|
log: Optional[str] = None
|
@@ -30,9 +30,9 @@ from ...utils import get_timestamp
|
|
30
30
|
from ..db import DB
|
31
31
|
from ..models import ApplyWorkflow
|
32
32
|
from ..models import Dataset
|
33
|
-
from ..models import JobStatusType
|
34
33
|
from ..models import Workflow
|
35
34
|
from ..models import WorkflowTask
|
35
|
+
from ..schemas import JobStatusType
|
36
36
|
from ._local import process_workflow as local_process_workflow
|
37
37
|
from .common import close_job_logger
|
38
38
|
from .common import JobExecutionError
|
@@ -3,6 +3,7 @@ Schemas for API request/response bodies
|
|
3
3
|
"""
|
4
4
|
from .applyworkflow import ApplyWorkflowCreate # noqa: F401
|
5
5
|
from .applyworkflow import ApplyWorkflowRead # noqa: F401
|
6
|
+
from .applyworkflow import JobStatusType # noqa: F401
|
6
7
|
from .dataset import DatasetCreate # noqa: F401
|
7
8
|
from .dataset import DatasetRead # noqa: F401
|
8
9
|
from .dataset import DatasetStatusRead # noqa: F401
|
@@ -1,4 +1,5 @@
|
|
1
1
|
from datetime import datetime
|
2
|
+
from enum import Enum
|
2
3
|
from typing import Any
|
3
4
|
from typing import Optional
|
4
5
|
|
@@ -14,6 +15,31 @@ __all__ = (
|
|
14
15
|
)
|
15
16
|
|
16
17
|
|
18
|
+
class JobStatusType(str, Enum):
|
19
|
+
"""
|
20
|
+
Define the available job statuses
|
21
|
+
|
22
|
+
Attributes:
|
23
|
+
SUBMITTED:
|
24
|
+
The workflow has been applied but not yet scheduled with an
|
25
|
+
executor. In this phase, due diligence takes place, such as
|
26
|
+
creating working directory, assemblying arguments, etc.
|
27
|
+
RUNNING:
|
28
|
+
The workflow was scheduled with an executor. Note that it might not
|
29
|
+
yet be running within the executor, e.g., jobs could still be
|
30
|
+
pending within a SLURM executor.
|
31
|
+
DONE:
|
32
|
+
The workflow was applied successfully
|
33
|
+
FAILED:
|
34
|
+
The workflow terminated with an error.
|
35
|
+
"""
|
36
|
+
|
37
|
+
SUBMITTED = "submitted"
|
38
|
+
RUNNING = "running"
|
39
|
+
DONE = "done"
|
40
|
+
FAILED = "failed"
|
41
|
+
|
42
|
+
|
17
43
|
class _ApplyWorkflowBase(BaseModel):
|
18
44
|
"""
|
19
45
|
Base class for `ApplyWorkflow`.
|
fractal_server/main.py
CHANGED
@@ -48,10 +48,15 @@ def collect_routers(app: FastAPI) -> None:
|
|
48
48
|
"""
|
49
49
|
from .app.api import router_default
|
50
50
|
from .app.api import router_v1
|
51
|
+
from .app.api import router_monitoring
|
52
|
+
|
51
53
|
from .app.security import auth_router
|
52
54
|
|
53
55
|
app.include_router(router_default, prefix="/api")
|
54
56
|
app.include_router(router_v1, prefix="/api/v1")
|
57
|
+
app.include_router(
|
58
|
+
router_monitoring, prefix="/monitoring", tags=["Monitoring"]
|
59
|
+
)
|
55
60
|
app.include_router(auth_router, prefix="/auth", tags=["auth"])
|
56
61
|
|
57
62
|
|
@@ -1,13 +1,14 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=FghpTUvsrKafSXB-cqMEGar7ZwCBi90wwLD4GOQ4kfs,24
|
2
2
|
fractal_server/__main__.py,sha256=znijcImbcEC4P26ICOhEJ9VY3_5vWdMwQcl-WP25sYA,2202
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
|
-
fractal_server/app/api/__init__.py,sha256=
|
5
|
+
fractal_server/app/api/__init__.py,sha256=zYFP-qSzNrgPYwqBza-NYGjSm-PyAnHXftmJsd1SOlo,1255
|
6
6
|
fractal_server/app/api/v1/__init__.py,sha256=2HMymr1YkUk39V8iof7KENyLnre4ghouOSvNZ_kF1ec,24
|
7
7
|
fractal_server/app/api/v1/_aux_functions.py,sha256=_-D-iTKvOfH1yufa4lIHvP88Sgic8cvKXq3GmDHC-lk,9659
|
8
|
-
fractal_server/app/api/v1/dataset.py,sha256=
|
9
|
-
fractal_server/app/api/v1/job.py,sha256=
|
10
|
-
fractal_server/app/api/v1/
|
8
|
+
fractal_server/app/api/v1/dataset.py,sha256=sCtOPfKWiuhCzM2w2gPx2UsKQKDQizTZojw1biqi7qw,13952
|
9
|
+
fractal_server/app/api/v1/job.py,sha256=APac5RLanwCf_3oY1PbWpKASQP4i1CJWq-bFtnEKpxg,5613
|
10
|
+
fractal_server/app/api/v1/monitoring.py,sha256=LN9RI0iST_NshPfmnOa7vFW_wKyZbcA3gBDjFY-9ffw,4736
|
11
|
+
fractal_server/app/api/v1/project.py,sha256=ZJCZi1CYqkdBXI83jzqZ8W3qPutYGxrh538TLfNfZA8,10368
|
11
12
|
fractal_server/app/api/v1/task.py,sha256=0MJNhn5f8KZy4XBMUoJNhrk3E6GBQWcVfKoQzP5XXWw,5582
|
12
13
|
fractal_server/app/api/v1/task_collection.py,sha256=mY1cSGepWvVz6IJCnFYA8iy4hU-8qsA1HbiQXZjg1OM,11697
|
13
14
|
fractal_server/app/api/v1/workflow.py,sha256=TwSRMGHgLJZxwgHI24q8Z7-ONu2CL1YVpxG8yBYE_yk,8858
|
@@ -15,7 +16,7 @@ fractal_server/app/api/v1/workflowtask.py,sha256=TIsCSBFImoRq0rz16ZVlFwTL-Qd9Uqy
|
|
15
16
|
fractal_server/app/db/__init__.py,sha256=4DP-jOZDLawKwPTToOVTR0vO2bc-R7s0lwZUXFv7k5s,3464
|
16
17
|
fractal_server/app/models/__init__.py,sha256=RuxWH8fsmkTWsjLhYjrxSt-mvk74coCilAQlX2Q6OO0,353
|
17
18
|
fractal_server/app/models/dataset.py,sha256=fcZkb2y7PXlFJAyZtndJ7Gf4c8VpkWjZe47iMybv1aE,2109
|
18
|
-
fractal_server/app/models/job.py,sha256=
|
19
|
+
fractal_server/app/models/job.py,sha256=DbX7OMx88eC-232C_OdYOpNeyN0tma7p8J3x7HB43os,2768
|
19
20
|
fractal_server/app/models/linkuserproject.py,sha256=RVtl25Q_N99uoVDE7wx0IN0SgFjc7Id5XbScsgrjv_E,309
|
20
21
|
fractal_server/app/models/project.py,sha256=SNY8CCDHjd_iv1S7theFaRuVy87BuppTrByHFkJqcpE,1137
|
21
22
|
fractal_server/app/models/security.py,sha256=Dp54Hf7I72oo9PERdyR0_zStw2ppYlFVi5MhFWIE6Lw,2438
|
@@ -23,7 +24,7 @@ fractal_server/app/models/state.py,sha256=rSTjYPfPZntEfdQudKp6yu5vsdyfHA7nMYNRIB
|
|
23
24
|
fractal_server/app/models/task.py,sha256=APndtea9A7EF7TtpVK8kWapBM01a6nk3FFCrQbbioI8,2632
|
24
25
|
fractal_server/app/models/workflow.py,sha256=r_bdKxzGgRjiwJW2fMYV9pvqcAs2swh1xg_q8pvXSbE,5328
|
25
26
|
fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
|
26
|
-
fractal_server/app/runner/__init__.py,sha256=
|
27
|
+
fractal_server/app/runner/__init__.py,sha256=WNMxT9XFEocPOdJRN6FfMek-LecOpBYXfJxmxupMRUE,13597
|
27
28
|
fractal_server/app/runner/_common.py,sha256=XjyE8DZE6WECeFXI6i0vHVD6JywZQxkJgZrL-ep1USQ,22642
|
28
29
|
fractal_server/app/runner/_local/__init__.py,sha256=mSJzpF6u6rgsSYO25szNVr2B296h7_iKD1eqS3o87Qo,6532
|
29
30
|
fractal_server/app/runner/_local/_local_config.py,sha256=-oNTsjEUmytHlsYpWfw2CrPvSxDFeEhZSdQvI_wf3Mk,3245
|
@@ -40,9 +41,9 @@ fractal_server/app/runner/_slurm/executor.py,sha256=ao5YuWtjsIfTYUucE1SvNS8a99Sg
|
|
40
41
|
fractal_server/app/runner/_slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
|
41
42
|
fractal_server/app/runner/common.py,sha256=nz0ZuIro0iwZm-OV-e-Y-PrtgKcLK0d7BrzebWyEWEk,9496
|
42
43
|
fractal_server/app/runner/handle_failed_job.py,sha256=PKgJARHjXyv33sDsl7oTINdcTu7EwmFmIkp38RqAE3Q,4641
|
43
|
-
fractal_server/app/schemas/__init__.py,sha256=
|
44
|
+
fractal_server/app/schemas/__init__.py,sha256=th51Dzbe94xatOIMVheqlq0rVFy8oI1CHRfzCbjiTSg,1859
|
44
45
|
fractal_server/app/schemas/_validators.py,sha256=7YEbgrnGRpzkLMfZzQNfczEmcNnO__SmVOaBHhzaiXE,1819
|
45
|
-
fractal_server/app/schemas/applyworkflow.py,sha256=
|
46
|
+
fractal_server/app/schemas/applyworkflow.py,sha256=iT7FBWgb9EfMSUdFCSv6vPy6223nsPqdQCLTYHrM8Uw,3871
|
46
47
|
fractal_server/app/schemas/dataset.py,sha256=PPqGTsRQ5JEwkiM4NcjPYFckxnCdi_Zov-bWXDm1LUk,3092
|
47
48
|
fractal_server/app/schemas/json_schemas/manifest.json,sha256=yXYKHbYXPYSkSXMTLfTpfCUGBtmQuPTk1xuSXscdba4,1787
|
48
49
|
fractal_server/app/schemas/manifest.py,sha256=xxTd39dAXMK9Ox1y-p3gbyg0zd5udW99pV4JngCUGwM,3819
|
@@ -55,7 +56,7 @@ fractal_server/app/schemas/workflow.py,sha256=oFoO62JH5hfMJjKoicdpyC5hd2O9XgqoAm
|
|
55
56
|
fractal_server/app/security/__init__.py,sha256=qjTt5vvtIq6eMIMBCDewrQK9A6Lw2DW2HnifWuxeDmA,10933
|
56
57
|
fractal_server/config.py,sha256=zekTDA_FhQG_RYgOWEIxT6KyJKRpMge-pB-iYRr4sIY,14447
|
57
58
|
fractal_server/logger.py,sha256=keri8i960WHT8Zz9Rm2MwfnrA2dw9TsrfCmojqtGDLs,4562
|
58
|
-
fractal_server/main.py,sha256=
|
59
|
+
fractal_server/main.py,sha256=JR-Q7QBFq97Wy0FTrXy9YxiyusWNuCFJMDyvJ_Vatko,6075
|
59
60
|
fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
|
60
61
|
fractal_server/migrations/env.py,sha256=05EoWw0p43ojTNiz7UVG4lsl057B4ImSgXiHmiU-M80,2690
|
61
62
|
fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhRWiYdS5YgNbI,526
|
@@ -73,8 +74,8 @@ fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,278
|
|
73
74
|
fractal_server/tasks/__init__.py,sha256=Wzuxf5EoH1v0fYzRpAZHG_S-Z9f6DmbIsuSvllBCGvc,72
|
74
75
|
fractal_server/tasks/collection.py,sha256=POKvQyS5G5ySybH0r0v21I_ZQ5AREe9kAqr_uFfGyaU,17627
|
75
76
|
fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
|
76
|
-
fractal_server-1.4.
|
77
|
-
fractal_server-1.4.
|
78
|
-
fractal_server-1.4.
|
79
|
-
fractal_server-1.4.
|
80
|
-
fractal_server-1.4.
|
77
|
+
fractal_server-1.4.0a1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
78
|
+
fractal_server-1.4.0a1.dist-info/METADATA,sha256=2drgsD8Zsk0dNZIT_CkCRCdqCWqBcIuZAwFMfvuAYhw,3773
|
79
|
+
fractal_server-1.4.0a1.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
80
|
+
fractal_server-1.4.0a1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
81
|
+
fractal_server-1.4.0a1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|