fractal-server 1.4.3a0__py3-none-any.whl → 1.4.3a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +36 -25
- fractal_server/app/routes/admin.py +8 -8
- fractal_server/app/routes/api/v1/_aux_functions.py +3 -5
- fractal_server/app/routes/api/v1/dataset.py +24 -23
- fractal_server/app/routes/api/v1/job.py +7 -7
- fractal_server/app/routes/api/v1/project.py +14 -19
- fractal_server/app/routes/api/v1/task.py +6 -6
- fractal_server/app/routes/api/v1/task_collection.py +12 -126
- fractal_server/app/routes/api/v1/workflow.py +13 -13
- fractal_server/app/routes/api/v1/workflowtask.py +5 -5
- fractal_server/app/routes/auth.py +2 -2
- fractal_server/app/runner/__init__.py +0 -1
- fractal_server/app/schemas/__init__.py +1 -0
- fractal_server/app/schemas/applyworkflow.py +5 -9
- fractal_server/app/schemas/task_collection.py +2 -10
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/config.py +14 -0
- fractal_server/tasks/_TaskCollectPip.py +103 -0
- fractal_server/tasks/__init__.py +3 -1
- fractal_server/tasks/background_operations.py +384 -0
- fractal_server/tasks/endpoint_operations.py +167 -0
- fractal_server/tasks/utils.py +86 -0
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/METADATA +1 -1
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/RECORD +28 -25
- fractal_server/tasks/collection.py +0 -556
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,5 @@
|
|
1
|
-
import json
|
2
1
|
from pathlib import Path
|
3
2
|
from shutil import copy as shell_copy
|
4
|
-
from shutil import rmtree as shell_rmtree
|
5
3
|
from tempfile import TemporaryDirectory
|
6
4
|
|
7
5
|
from fastapi import APIRouter
|
@@ -17,27 +15,21 @@ from .....config import get_settings
|
|
17
15
|
from .....logger import close_logger
|
18
16
|
from .....logger import set_logger
|
19
17
|
from .....syringe import Inject
|
20
|
-
from .....tasks.
|
21
|
-
from .....tasks.
|
22
|
-
from .....tasks.
|
23
|
-
from .....tasks.
|
24
|
-
from .....tasks.
|
25
|
-
from .....tasks.
|
26
|
-
from .....tasks.
|
27
|
-
from .....tasks.
|
28
|
-
from .....tasks.collection import inspect_package
|
29
|
-
from .....tasks.collection import slugify_task_name
|
18
|
+
from .....tasks._TaskCollectPip import _TaskCollectPip
|
19
|
+
from .....tasks.background_operations import background_collect_pip
|
20
|
+
from .....tasks.endpoint_operations import create_package_dir_pip
|
21
|
+
from .....tasks.endpoint_operations import download_package
|
22
|
+
from .....tasks.endpoint_operations import get_collection_data
|
23
|
+
from .....tasks.endpoint_operations import inspect_package
|
24
|
+
from .....tasks.utils import get_collection_log
|
25
|
+
from .....tasks.utils import slugify_task_name
|
30
26
|
from ....db import AsyncSession
|
31
|
-
from ....db import
|
32
|
-
from ....db import get_db
|
33
|
-
from ....db import get_sync_db
|
27
|
+
from ....db import get_async_db
|
34
28
|
from ....models import State
|
35
29
|
from ....models import Task
|
36
30
|
from ....schemas import StateRead
|
37
31
|
from ....schemas import TaskCollectPip
|
38
32
|
from ....schemas import TaskCollectStatus
|
39
|
-
from ....schemas import TaskCreate
|
40
|
-
from ....schemas import TaskRead
|
41
33
|
from ....security import current_active_user
|
42
34
|
from ....security import current_active_verified_user
|
43
35
|
from ....security import User
|
@@ -47,112 +39,6 @@ router = APIRouter()
|
|
47
39
|
logger = set_logger(__name__)
|
48
40
|
|
49
41
|
|
50
|
-
async def _background_collect_pip(
|
51
|
-
state_id: int,
|
52
|
-
venv_path: Path,
|
53
|
-
task_pkg: _TaskCollectPip,
|
54
|
-
) -> None:
|
55
|
-
"""
|
56
|
-
Install package and collect tasks
|
57
|
-
|
58
|
-
Install a python package and collect the tasks it provides according to
|
59
|
-
the manifest.
|
60
|
-
|
61
|
-
In case of error, copy the log into the state and delete the package
|
62
|
-
directory.
|
63
|
-
"""
|
64
|
-
logger_name = task_pkg.package.replace("/", "_")
|
65
|
-
logger = set_logger(
|
66
|
-
logger_name=logger_name,
|
67
|
-
log_file_path=get_log_path(venv_path),
|
68
|
-
)
|
69
|
-
logger.debug("Start background task collection")
|
70
|
-
for key, value in task_pkg.dict(exclude={"package_manifest"}).items():
|
71
|
-
logger.debug(f"{key}: {value}")
|
72
|
-
|
73
|
-
with next(get_sync_db()) as db:
|
74
|
-
state: State = db.get(State, state_id)
|
75
|
-
data = TaskCollectStatus(**state.data)
|
76
|
-
data.info = None
|
77
|
-
|
78
|
-
try:
|
79
|
-
# install
|
80
|
-
logger.debug("Task-collection status: installing")
|
81
|
-
data.status = "installing"
|
82
|
-
|
83
|
-
state.data = data.sanitised_dict()
|
84
|
-
db.merge(state)
|
85
|
-
db.commit()
|
86
|
-
task_list = await create_package_environment_pip(
|
87
|
-
venv_path=venv_path,
|
88
|
-
task_pkg=task_pkg,
|
89
|
-
logger_name=logger_name,
|
90
|
-
)
|
91
|
-
|
92
|
-
# collect
|
93
|
-
logger.debug("Task-collection status: collecting")
|
94
|
-
data.status = "collecting"
|
95
|
-
state.data = data.sanitised_dict()
|
96
|
-
db.merge(state)
|
97
|
-
db.commit()
|
98
|
-
tasks = await _insert_tasks(task_list=task_list, db=db)
|
99
|
-
|
100
|
-
# finalise
|
101
|
-
logger.debug("Task-collection status: finalising")
|
102
|
-
collection_path = get_collection_path(venv_path)
|
103
|
-
data.task_list = [TaskRead(**task.model_dump()) for task in tasks]
|
104
|
-
with collection_path.open("w") as f:
|
105
|
-
json.dump(data.sanitised_dict(), f)
|
106
|
-
|
107
|
-
# Update DB
|
108
|
-
data.status = "OK"
|
109
|
-
data.log = get_collection_log(venv_path)
|
110
|
-
state.data = data.sanitised_dict()
|
111
|
-
db.add(state)
|
112
|
-
db.merge(state)
|
113
|
-
db.commit()
|
114
|
-
|
115
|
-
# Write last logs to file
|
116
|
-
logger.debug("Task-collection status: OK")
|
117
|
-
logger.info("Background task collection completed successfully")
|
118
|
-
close_logger(logger)
|
119
|
-
db.close()
|
120
|
-
|
121
|
-
except Exception as e:
|
122
|
-
# Write last logs to file
|
123
|
-
logger.debug("Task-collection status: fail")
|
124
|
-
logger.info(f"Background collection failed. Original error: {e}")
|
125
|
-
close_logger(logger)
|
126
|
-
|
127
|
-
# Update db
|
128
|
-
data.status = "fail"
|
129
|
-
data.info = f"Original error: {e}"
|
130
|
-
data.log = get_collection_log(venv_path)
|
131
|
-
state.data = data.sanitised_dict()
|
132
|
-
db.merge(state)
|
133
|
-
db.commit()
|
134
|
-
db.close()
|
135
|
-
|
136
|
-
# Delete corrupted package dir
|
137
|
-
shell_rmtree(venv_path)
|
138
|
-
|
139
|
-
|
140
|
-
async def _insert_tasks(
|
141
|
-
task_list: list[TaskCreate],
|
142
|
-
db: DBSyncSession,
|
143
|
-
) -> list[Task]:
|
144
|
-
"""
|
145
|
-
Insert tasks into database
|
146
|
-
"""
|
147
|
-
task_db_list = [Task(**t.dict()) for t in task_list]
|
148
|
-
db.add_all(task_db_list)
|
149
|
-
db.commit()
|
150
|
-
for t in task_db_list:
|
151
|
-
db.refresh(t)
|
152
|
-
db.close()
|
153
|
-
return task_db_list
|
154
|
-
|
155
|
-
|
156
42
|
@router.post(
|
157
43
|
"/collect/pip/",
|
158
44
|
response_model=StateRead,
|
@@ -175,7 +61,7 @@ async def collect_tasks_pip(
|
|
175
61
|
background_tasks: BackgroundTasks,
|
176
62
|
response: Response,
|
177
63
|
user: User = Depends(current_active_verified_user),
|
178
|
-
db: AsyncSession = Depends(
|
64
|
+
db: AsyncSession = Depends(get_async_db),
|
179
65
|
) -> StateRead: # State[TaskCollectStatus]
|
180
66
|
"""
|
181
67
|
Task collection endpoint
|
@@ -289,7 +175,7 @@ async def collect_tasks_pip(
|
|
289
175
|
await db.refresh(state)
|
290
176
|
|
291
177
|
background_tasks.add_task(
|
292
|
-
|
178
|
+
background_collect_pip,
|
293
179
|
state_id=state.id,
|
294
180
|
venv_path=venv_path,
|
295
181
|
task_pkg=task_pkg,
|
@@ -314,7 +200,7 @@ async def check_collection_status(
|
|
314
200
|
state_id: int,
|
315
201
|
user: User = Depends(current_active_user),
|
316
202
|
verbose: bool = False,
|
317
|
-
db: AsyncSession = Depends(
|
203
|
+
db: AsyncSession = Depends(get_async_db),
|
318
204
|
) -> StateRead: # State[TaskCollectStatus]
|
319
205
|
"""
|
320
206
|
Check status of background task collection
|
@@ -22,7 +22,7 @@ from sqlmodel import select
|
|
22
22
|
from .....logger import close_logger
|
23
23
|
from .....logger import set_logger
|
24
24
|
from ....db import AsyncSession
|
25
|
-
from ....db import
|
25
|
+
from ....db import get_async_db
|
26
26
|
from ....models import ApplyWorkflow
|
27
27
|
from ....models import Project
|
28
28
|
from ....models import Task
|
@@ -36,8 +36,8 @@ from ....schemas import WorkflowUpdate
|
|
36
36
|
from ....security import current_active_user
|
37
37
|
from ....security import User
|
38
38
|
from ._aux_functions import _check_workflow_exists
|
39
|
-
from ._aux_functions import _get_active_jobs_statement
|
40
39
|
from ._aux_functions import _get_project_check_owner
|
40
|
+
from ._aux_functions import _get_submitted_jobs_statement
|
41
41
|
from ._aux_functions import _get_workflow_check_owner
|
42
42
|
from ._aux_functions import _workflow_insert_task
|
43
43
|
|
@@ -52,7 +52,7 @@ router = APIRouter()
|
|
52
52
|
async def get_workflow_list(
|
53
53
|
project_id: int,
|
54
54
|
user: User = Depends(current_active_user),
|
55
|
-
db: AsyncSession = Depends(
|
55
|
+
db: AsyncSession = Depends(get_async_db),
|
56
56
|
) -> Optional[list[WorkflowRead]]:
|
57
57
|
"""
|
58
58
|
Get workflow list for given project
|
@@ -79,7 +79,7 @@ async def create_workflow(
|
|
79
79
|
project_id: int,
|
80
80
|
workflow: WorkflowCreate,
|
81
81
|
user: User = Depends(current_active_user),
|
82
|
-
db: AsyncSession = Depends(
|
82
|
+
db: AsyncSession = Depends(get_async_db),
|
83
83
|
) -> Optional[WorkflowRead]:
|
84
84
|
"""
|
85
85
|
Create a workflow, associate to a project
|
@@ -109,7 +109,7 @@ async def read_workflow(
|
|
109
109
|
project_id: int,
|
110
110
|
workflow_id: int,
|
111
111
|
user: User = Depends(current_active_user),
|
112
|
-
db: AsyncSession = Depends(
|
112
|
+
db: AsyncSession = Depends(get_async_db),
|
113
113
|
) -> Optional[WorkflowRead]:
|
114
114
|
"""
|
115
115
|
Get info on an existing workflow
|
@@ -131,7 +131,7 @@ async def update_workflow(
|
|
131
131
|
workflow_id: int,
|
132
132
|
patch: WorkflowUpdate,
|
133
133
|
user: User = Depends(current_active_user),
|
134
|
-
db: AsyncSession = Depends(
|
134
|
+
db: AsyncSession = Depends(get_async_db),
|
135
135
|
) -> Optional[WorkflowRead]:
|
136
136
|
"""
|
137
137
|
Edit a workflow
|
@@ -182,7 +182,7 @@ async def delete_workflow(
|
|
182
182
|
project_id: int,
|
183
183
|
workflow_id: int,
|
184
184
|
user: User = Depends(current_active_user),
|
185
|
-
db: AsyncSession = Depends(
|
185
|
+
db: AsyncSession = Depends(get_async_db),
|
186
186
|
) -> Response:
|
187
187
|
"""
|
188
188
|
Delete a workflow
|
@@ -192,9 +192,9 @@ async def delete_workflow(
|
|
192
192
|
project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
|
193
193
|
)
|
194
194
|
|
195
|
-
# Fail if there exist jobs that are
|
196
|
-
#
|
197
|
-
stm =
|
195
|
+
# Fail if there exist jobs that are submitted and in relation with the
|
196
|
+
# current workflow.
|
197
|
+
stm = _get_submitted_jobs_statement().where(
|
198
198
|
ApplyWorkflow.workflow_id == workflow.id
|
199
199
|
)
|
200
200
|
res = await db.execute(stm)
|
@@ -234,7 +234,7 @@ async def export_worfklow(
|
|
234
234
|
project_id: int,
|
235
235
|
workflow_id: int,
|
236
236
|
user: User = Depends(current_active_user),
|
237
|
-
db: AsyncSession = Depends(
|
237
|
+
db: AsyncSession = Depends(get_async_db),
|
238
238
|
) -> Optional[WorkflowExport]:
|
239
239
|
"""
|
240
240
|
Export an existing workflow, after stripping all IDs
|
@@ -267,7 +267,7 @@ async def import_workflow(
|
|
267
267
|
project_id: int,
|
268
268
|
workflow: WorkflowImport,
|
269
269
|
user: User = Depends(current_active_user),
|
270
|
-
db: AsyncSession = Depends(
|
270
|
+
db: AsyncSession = Depends(get_async_db),
|
271
271
|
) -> Optional[WorkflowRead]:
|
272
272
|
"""
|
273
273
|
Import an existing workflow into a project
|
@@ -338,7 +338,7 @@ async def import_workflow(
|
|
338
338
|
@router.get("/workflow/", response_model=list[WorkflowRead])
|
339
339
|
async def get_user_workflows(
|
340
340
|
user: User = Depends(current_active_user),
|
341
|
-
db: AsyncSession = Depends(
|
341
|
+
db: AsyncSession = Depends(get_async_db),
|
342
342
|
) -> list[WorkflowRead]:
|
343
343
|
"""
|
344
344
|
Returns all the workflows of the current user
|
@@ -21,7 +21,7 @@ from fastapi import Response
|
|
21
21
|
from fastapi import status
|
22
22
|
|
23
23
|
from ....db import AsyncSession
|
24
|
-
from ....db import
|
24
|
+
from ....db import get_async_db
|
25
25
|
from ....models import Task
|
26
26
|
from ....schemas import WorkflowTaskCreate
|
27
27
|
from ....schemas import WorkflowTaskRead
|
@@ -46,7 +46,7 @@ async def create_workflowtask(
|
|
46
46
|
task_id: int,
|
47
47
|
new_task: WorkflowTaskCreate,
|
48
48
|
user: User = Depends(current_active_user),
|
49
|
-
db: AsyncSession = Depends(
|
49
|
+
db: AsyncSession = Depends(get_async_db),
|
50
50
|
) -> Optional[WorkflowTaskRead]:
|
51
51
|
"""
|
52
52
|
Add a WorkflowTask to a Workflow
|
@@ -85,7 +85,7 @@ async def read_workflowtask(
|
|
85
85
|
workflow_id: int,
|
86
86
|
workflow_task_id: int,
|
87
87
|
user: User = Depends(current_active_user),
|
88
|
-
db: AsyncSession = Depends(
|
88
|
+
db: AsyncSession = Depends(get_async_db),
|
89
89
|
):
|
90
90
|
workflow_task, _ = await _get_workflow_task_check_owner(
|
91
91
|
project_id=project_id,
|
@@ -107,7 +107,7 @@ async def update_workflowtask(
|
|
107
107
|
workflow_task_id: int,
|
108
108
|
workflow_task_update: WorkflowTaskUpdate,
|
109
109
|
user: User = Depends(current_active_user),
|
110
|
-
db: AsyncSession = Depends(
|
110
|
+
db: AsyncSession = Depends(get_async_db),
|
111
111
|
) -> Optional[WorkflowTaskRead]:
|
112
112
|
"""
|
113
113
|
Edit a WorkflowTask of a Workflow
|
@@ -162,7 +162,7 @@ async def delete_workflowtask(
|
|
162
162
|
workflow_id: int,
|
163
163
|
workflow_task_id: int,
|
164
164
|
user: User = Depends(current_active_user),
|
165
|
-
db: AsyncSession = Depends(
|
165
|
+
db: AsyncSession = Depends(get_async_db),
|
166
166
|
) -> Response:
|
167
167
|
"""
|
168
168
|
Delete a WorkflowTask of a Workflow
|
@@ -13,7 +13,7 @@ from sqlmodel import select
|
|
13
13
|
|
14
14
|
from ...config import get_settings
|
15
15
|
from ...syringe import Inject
|
16
|
-
from ..db import
|
16
|
+
from ..db import get_async_db
|
17
17
|
from ..models.security import UserOAuth as User
|
18
18
|
from ..schemas.user import UserCreate
|
19
19
|
from ..schemas.user import UserRead
|
@@ -97,7 +97,7 @@ async def get_current_user(user: User = Depends(current_active_user)):
|
|
97
97
|
@router_auth.get("/users/", response_model=list[UserRead])
|
98
98
|
async def list_users(
|
99
99
|
user: User = Depends(current_active_superuser),
|
100
|
-
db: AsyncSession = Depends(
|
100
|
+
db: AsyncSession = Depends(get_async_db),
|
101
101
|
):
|
102
102
|
"""
|
103
103
|
Return list of all users
|
@@ -26,6 +26,7 @@ from .task import TaskUpdate # noqa: F401
|
|
26
26
|
from .task_collection import TaskCollectPip # noqa: F401
|
27
27
|
from .task_collection import TaskCollectStatus # noqa: F401
|
28
28
|
from .user import UserCreate # noqa: F401
|
29
|
+
from .user import UserRead # noqa: F401
|
29
30
|
from .user import UserUpdate # noqa: F401
|
30
31
|
from .user import UserUpdateStrict # noqa: F401
|
31
32
|
from .workflow import WorkflowCreate # noqa: F401
|
@@ -25,21 +25,17 @@ class JobStatusType(str, Enum):
|
|
25
25
|
|
26
26
|
Attributes:
|
27
27
|
SUBMITTED:
|
28
|
-
The
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
The workflow was scheduled with an executor. Note that it might not
|
33
|
-
yet be running within the executor, e.g., jobs could still be
|
34
|
-
pending within a SLURM executor.
|
28
|
+
The job was created. This does not guarantee that it was also
|
29
|
+
submitted to an executor (e.g. other errors could have prevented
|
30
|
+
this), nor that it is actually running (e.g. SLURM jobs could be
|
31
|
+
still in the queue).
|
35
32
|
DONE:
|
36
|
-
The
|
33
|
+
The job successfully reached its end.
|
37
34
|
FAILED:
|
38
35
|
The workflow terminated with an error.
|
39
36
|
"""
|
40
37
|
|
41
38
|
SUBMITTED = "submitted"
|
42
|
-
RUNNING = "running"
|
43
39
|
DONE = "done"
|
44
40
|
FAILED = "failed"
|
45
41
|
|
@@ -15,15 +15,7 @@ __all__ = (
|
|
15
15
|
)
|
16
16
|
|
17
17
|
|
18
|
-
class
|
19
|
-
"""
|
20
|
-
Base class for `TaskCollectPip`.
|
21
|
-
"""
|
22
|
-
|
23
|
-
pass
|
24
|
-
|
25
|
-
|
26
|
-
class TaskCollectPip(_TaskCollectBase):
|
18
|
+
class TaskCollectPip(BaseModel):
|
27
19
|
"""
|
28
20
|
TaskCollectPip class
|
29
21
|
|
@@ -89,7 +81,7 @@ class TaskCollectPip(_TaskCollectBase):
|
|
89
81
|
return v
|
90
82
|
|
91
83
|
|
92
|
-
class TaskCollectStatus(
|
84
|
+
class TaskCollectStatus(BaseModel):
|
93
85
|
"""
|
94
86
|
TaskCollectStatus class
|
95
87
|
|
@@ -56,7 +56,7 @@ from sqlmodel import select
|
|
56
56
|
|
57
57
|
from ...config import get_settings
|
58
58
|
from ...syringe import Inject
|
59
|
-
from ..db import
|
59
|
+
from ..db import get_async_db
|
60
60
|
from ..models.security import OAuthAccount
|
61
61
|
from ..models.security import UserOAuth as User
|
62
62
|
from fractal_server.app.models.security import UserOAuth
|
@@ -175,7 +175,7 @@ class SQLModelUserDatabaseAsync(Generic[UP, ID], BaseUserDatabase[UP, ID]):
|
|
175
175
|
|
176
176
|
|
177
177
|
async def get_user_db(
|
178
|
-
session: AsyncSession = Depends(
|
178
|
+
session: AsyncSession = Depends(get_async_db),
|
179
179
|
) -> AsyncGenerator[SQLModelUserDatabaseAsync, None]:
|
180
180
|
yield SQLModelUserDatabaseAsync(session, User, OAuthAccount)
|
181
181
|
|
@@ -247,7 +247,7 @@ current_active_superuser = fastapi_users.current_user(
|
|
247
247
|
active=True, superuser=True
|
248
248
|
)
|
249
249
|
|
250
|
-
get_async_session_context = contextlib.asynccontextmanager(
|
250
|
+
get_async_session_context = contextlib.asynccontextmanager(get_async_db)
|
251
251
|
get_user_db_context = contextlib.asynccontextmanager(get_user_db)
|
252
252
|
get_user_manager_context = contextlib.asynccontextmanager(get_user_manager)
|
253
253
|
|
fractal_server/config.py
CHANGED
@@ -360,6 +360,14 @@ class Settings(BaseSettings):
|
|
360
360
|
raise FractalConfigurationError(
|
361
361
|
"POSTGRES_DB cannot be None when DB_ENGINE=postgres."
|
362
362
|
)
|
363
|
+
try:
|
364
|
+
import psycopg2 # noqa: F401
|
365
|
+
import asyncpg # noqa: F401
|
366
|
+
except ModuleNotFoundError:
|
367
|
+
raise FractalConfigurationError(
|
368
|
+
"DB engine is `postgres` but `psycopg2` or `asyncpg` "
|
369
|
+
"are not available"
|
370
|
+
)
|
363
371
|
else:
|
364
372
|
if not self.SQLITE_PATH:
|
365
373
|
raise FractalConfigurationError(
|
@@ -375,6 +383,12 @@ class Settings(BaseSettings):
|
|
375
383
|
|
376
384
|
info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
|
377
385
|
if self.FRACTAL_RUNNER_BACKEND == "slurm":
|
386
|
+
try:
|
387
|
+
import cfut # noqa: F401
|
388
|
+
except ModuleNotFoundError:
|
389
|
+
raise FractalConfigurationError(
|
390
|
+
f"{info} but `clusterfutures` is not available"
|
391
|
+
)
|
378
392
|
if not self.FRACTAL_SLURM_CONFIG_FILE:
|
379
393
|
raise FractalConfigurationError(
|
380
394
|
f"Must set FRACTAL_SLURM_CONFIG_FILE when {info}"
|
@@ -0,0 +1,103 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from pydantic import root_validator
|
5
|
+
|
6
|
+
from fractal_server.app.schemas import ManifestV1
|
7
|
+
from fractal_server.app.schemas import TaskCollectPip
|
8
|
+
|
9
|
+
|
10
|
+
class _TaskCollectPip(TaskCollectPip):
|
11
|
+
"""
|
12
|
+
Internal TaskCollectPip schema
|
13
|
+
|
14
|
+
Differences with its parent class (`TaskCollectPip`):
|
15
|
+
|
16
|
+
1. We check if the package corresponds to a path in the filesystem, and
|
17
|
+
whether it exists (via new validator `check_local_package`, new
|
18
|
+
method `is_local_package` and new attribute `package_path`).
|
19
|
+
2. We include an additional `package_manifest` attribute.
|
20
|
+
3. We expose an additional attribute `package_name`, which is filled
|
21
|
+
during task collection.
|
22
|
+
"""
|
23
|
+
|
24
|
+
package_name: Optional[str] = None
|
25
|
+
package_path: Optional[Path] = None
|
26
|
+
package_manifest: Optional[ManifestV1] = None
|
27
|
+
|
28
|
+
@property
|
29
|
+
def is_local_package(self) -> bool:
|
30
|
+
return bool(self.package_path)
|
31
|
+
|
32
|
+
@root_validator(pre=True)
|
33
|
+
def check_local_package(cls, values):
|
34
|
+
"""
|
35
|
+
Checks if package corresponds to an existing path on the filesystem
|
36
|
+
|
37
|
+
In this case, the user is providing directly a package file, rather
|
38
|
+
than a remote one from PyPI. We set the `package_path` attribute and
|
39
|
+
get the actual package name and version from the package file name.
|
40
|
+
"""
|
41
|
+
if "/" in values["package"]:
|
42
|
+
package_path = Path(values["package"])
|
43
|
+
if not package_path.is_absolute():
|
44
|
+
raise ValueError("Package path must be absolute")
|
45
|
+
if package_path.exists():
|
46
|
+
values["package_path"] = package_path
|
47
|
+
(
|
48
|
+
values["package"],
|
49
|
+
values["version"],
|
50
|
+
*_,
|
51
|
+
) = package_path.name.split("-")
|
52
|
+
else:
|
53
|
+
raise ValueError(f"Package {package_path} does not exist.")
|
54
|
+
return values
|
55
|
+
|
56
|
+
@property
|
57
|
+
def package_source(self) -> str:
|
58
|
+
"""
|
59
|
+
NOTE: As of PR #1188 in `fractal-server`, the attribute
|
60
|
+
`self.package_name` is normalized; this means e.g. that `_` is
|
61
|
+
replaced by `-`. To guarantee backwards compatibility with
|
62
|
+
`Task.source` attributes created before this change, we still replace
|
63
|
+
`-` with `_` upon generation of the `source` attribute, in this
|
64
|
+
method.
|
65
|
+
"""
|
66
|
+
if not self.package_name or not self.package_version:
|
67
|
+
raise ValueError(
|
68
|
+
"Cannot construct `package_source` property with "
|
69
|
+
f"{self.package_name=} and {self.package_version=}."
|
70
|
+
)
|
71
|
+
if self.is_local_package:
|
72
|
+
collection_type = "pip_local"
|
73
|
+
else:
|
74
|
+
collection_type = "pip_remote"
|
75
|
+
|
76
|
+
package_extras = self.package_extras or ""
|
77
|
+
if self.python_version:
|
78
|
+
python_version = f"py{self.python_version}"
|
79
|
+
else:
|
80
|
+
python_version = "" # FIXME: can we allow this?
|
81
|
+
|
82
|
+
source = ":".join(
|
83
|
+
(
|
84
|
+
collection_type,
|
85
|
+
self.package_name.replace("-", "_"), # see method docstring
|
86
|
+
self.package_version,
|
87
|
+
package_extras,
|
88
|
+
python_version,
|
89
|
+
)
|
90
|
+
)
|
91
|
+
return source
|
92
|
+
|
93
|
+
def check(self):
|
94
|
+
"""
|
95
|
+
Verify that the package has all attributes that are needed to continue
|
96
|
+
with task collection
|
97
|
+
"""
|
98
|
+
if not self.package_name:
|
99
|
+
raise ValueError("`package_name` attribute is not set")
|
100
|
+
if not self.package_version:
|
101
|
+
raise ValueError("`package_version` attribute is not set")
|
102
|
+
if not self.package_manifest:
|
103
|
+
raise ValueError("`package_manifest` attribute is not set")
|