fractal-server 1.4.10__py3-none-any.whl → 2.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/__init__.py +3 -7
- fractal_server/app/models/linkuserproject.py +9 -0
- fractal_server/app/models/security.py +6 -0
- fractal_server/app/models/state.py +1 -1
- fractal_server/app/models/v1/__init__.py +11 -0
- fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
- fractal_server/app/models/{job.py → v1/job.py} +5 -5
- fractal_server/app/models/{project.py → v1/project.py} +5 -5
- fractal_server/app/models/{task.py → v1/task.py} +7 -2
- fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
- fractal_server/app/models/v2/__init__.py +20 -0
- fractal_server/app/models/v2/dataset.py +55 -0
- fractal_server/app/models/v2/job.py +51 -0
- fractal_server/app/models/v2/project.py +31 -0
- fractal_server/app/models/v2/task.py +93 -0
- fractal_server/app/models/v2/workflow.py +43 -0
- fractal_server/app/models/v2/workflowtask.py +90 -0
- fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
- fractal_server/app/routes/admin/v2.py +274 -0
- fractal_server/app/routes/api/v1/__init__.py +7 -7
- fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
- fractal_server/app/routes/api/v1/dataset.py +37 -37
- fractal_server/app/routes/api/v1/job.py +14 -14
- fractal_server/app/routes/api/v1/project.py +23 -21
- fractal_server/app/routes/api/v1/task.py +24 -14
- fractal_server/app/routes/api/v1/task_collection.py +16 -14
- fractal_server/app/routes/api/v1/workflow.py +24 -24
- fractal_server/app/routes/api/v1/workflowtask.py +10 -10
- fractal_server/app/routes/api/v2/__init__.py +28 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
- fractal_server/app/routes/api/v2/dataset.py +309 -0
- fractal_server/app/routes/api/v2/images.py +207 -0
- fractal_server/app/routes/api/v2/job.py +200 -0
- fractal_server/app/routes/api/v2/project.py +202 -0
- fractal_server/app/routes/api/v2/submit.py +220 -0
- fractal_server/app/routes/api/v2/task.py +222 -0
- fractal_server/app/routes/api/v2/task_collection.py +229 -0
- fractal_server/app/routes/api/v2/workflow.py +397 -0
- fractal_server/app/routes/api/v2/workflowtask.py +269 -0
- fractal_server/app/routes/aux/_job.py +1 -1
- fractal_server/app/runner/async_wrap.py +27 -0
- fractal_server/app/runner/components.py +5 -0
- fractal_server/app/runner/exceptions.py +129 -0
- fractal_server/app/runner/executors/slurm/__init__.py +3 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
- fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +32 -19
- fractal_server/app/runner/filenames.py +6 -0
- fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
- fractal_server/app/runner/task_files.py +103 -0
- fractal_server/app/runner/{__init__.py → v1/__init__.py} +22 -20
- fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
- fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -5
- fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
- fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
- fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
- fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
- fractal_server/app/runner/v1/common.py +117 -0
- fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
- fractal_server/app/runner/v2/__init__.py +336 -0
- fractal_server/app/runner/v2/_local/__init__.py +167 -0
- fractal_server/app/runner/v2/_local/_local_config.py +118 -0
- fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
- fractal_server/app/runner/v2/_local/executor.py +100 -0
- fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +34 -45
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
- fractal_server/app/runner/v2/deduplicate_list.py +22 -0
- fractal_server/app/runner/v2/handle_failed_job.py +156 -0
- fractal_server/app/runner/v2/merge_outputs.py +38 -0
- fractal_server/app/runner/v2/runner.py +267 -0
- fractal_server/app/runner/v2/runner_functions.py +341 -0
- fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
- fractal_server/app/runner/v2/task_interface.py +43 -0
- fractal_server/app/runner/v2/v1_compat.py +21 -0
- fractal_server/app/schemas/__init__.py +4 -42
- fractal_server/app/schemas/v1/__init__.py +42 -0
- fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
- fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
- fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
- fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
- fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
- fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
- fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
- fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
- fractal_server/app/schemas/v2/__init__.py +34 -0
- fractal_server/app/schemas/v2/dataset.py +89 -0
- fractal_server/app/schemas/v2/dumps.py +87 -0
- fractal_server/app/schemas/v2/job.py +114 -0
- fractal_server/app/schemas/v2/manifest.py +159 -0
- fractal_server/app/schemas/v2/project.py +37 -0
- fractal_server/app/schemas/v2/task.py +120 -0
- fractal_server/app/schemas/v2/task_collection.py +105 -0
- fractal_server/app/schemas/v2/workflow.py +79 -0
- fractal_server/app/schemas/v2/workflowtask.py +119 -0
- fractal_server/config.py +5 -4
- fractal_server/images/__init__.py +2 -0
- fractal_server/images/models.py +50 -0
- fractal_server/images/tools.py +85 -0
- fractal_server/main.py +11 -3
- fractal_server/migrations/env.py +0 -2
- fractal_server/migrations/versions/d71e732236cd_v2.py +239 -0
- fractal_server/tasks/__init__.py +0 -5
- fractal_server/tasks/endpoint_operations.py +13 -19
- fractal_server/tasks/utils.py +35 -0
- fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
- fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
- fractal_server/tasks/v1/get_collection_data.py +14 -0
- fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
- fractal_server/tasks/v2/background_operations.py +381 -0
- fractal_server/tasks/v2/get_collection_data.py +14 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/METADATA +1 -1
- fractal_server-2.0.0a1.dist-info/RECORD +160 -0
- fractal_server/app/runner/_slurm/.gitignore +0 -2
- fractal_server/app/runner/common.py +0 -311
- fractal_server-1.4.10.dist-info/RECORD +0 -98
- /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
- /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,85 @@
|
|
1
|
+
from copy import copy
|
2
|
+
from typing import Any
|
3
|
+
from typing import Literal
|
4
|
+
from typing import Optional
|
5
|
+
from typing import Union
|
6
|
+
|
7
|
+
from fractal_server.images import Filters
|
8
|
+
|
9
|
+
|
10
|
+
ImageSearch = dict[Literal["image", "index"], Union[int, dict[str, Any]]]
|
11
|
+
|
12
|
+
|
13
|
+
def find_image_by_path(
|
14
|
+
*,
|
15
|
+
images: list[dict[str, Any]],
|
16
|
+
path: str,
|
17
|
+
) -> Optional[ImageSearch]:
|
18
|
+
"""
|
19
|
+
Return a copy of the image with a given path, and its positional index.
|
20
|
+
|
21
|
+
Arguments:
|
22
|
+
images: List of images.
|
23
|
+
path: Path that the returned image must have.
|
24
|
+
|
25
|
+
Returns:
|
26
|
+
The first image from `images` which has path equal to `path`.
|
27
|
+
"""
|
28
|
+
image_paths = [img["path"] for img in images]
|
29
|
+
try:
|
30
|
+
ind = image_paths.index(path)
|
31
|
+
except ValueError:
|
32
|
+
return None
|
33
|
+
return dict(image=copy(images[ind]), index=ind)
|
34
|
+
|
35
|
+
|
36
|
+
# FIXME: what is filters
|
37
|
+
def match_filter(image: dict[str, Any], filters: Filters) -> bool:
|
38
|
+
"""
|
39
|
+
Find whether an image matches a filter set.
|
40
|
+
|
41
|
+
Arguments:
|
42
|
+
image: A single image.
|
43
|
+
filters: A set of filters.
|
44
|
+
|
45
|
+
Returns:
|
46
|
+
Whether the image matches the filter set.
|
47
|
+
"""
|
48
|
+
# Verify match with types (using a False default)
|
49
|
+
for key, value in filters.types.items():
|
50
|
+
if image["types"].get(key, False) != value:
|
51
|
+
return False
|
52
|
+
# Verify match with attributes (only for non-None filters)
|
53
|
+
for key, value in filters.attributes.items():
|
54
|
+
if value is None:
|
55
|
+
continue
|
56
|
+
if image["attributes"].get(key) != value:
|
57
|
+
return False
|
58
|
+
return True
|
59
|
+
|
60
|
+
|
61
|
+
def filter_image_list(
|
62
|
+
images: list[dict[str, Any]],
|
63
|
+
filters: Filters,
|
64
|
+
) -> list[dict[str, Any]]:
|
65
|
+
"""
|
66
|
+
Compute a sublist with images that match a filter set.
|
67
|
+
|
68
|
+
Arguments:
|
69
|
+
images: A list of images.
|
70
|
+
filters: A set of filters.
|
71
|
+
|
72
|
+
Returns:
|
73
|
+
List of the `images` elements which match the filter set.
|
74
|
+
"""
|
75
|
+
|
76
|
+
# When no filter is provided, return all images
|
77
|
+
if filters.attributes == {} and filters.types == {}:
|
78
|
+
return images
|
79
|
+
|
80
|
+
filtered_images = [
|
81
|
+
copy(this_image)
|
82
|
+
for this_image in images
|
83
|
+
if match_filter(this_image, filters=filters)
|
84
|
+
]
|
85
|
+
return filtered_images
|
fractal_server/main.py
CHANGED
@@ -32,13 +32,21 @@ def collect_routers(app: FastAPI) -> None:
|
|
32
32
|
"""
|
33
33
|
from .app.routes.api import router_api
|
34
34
|
from .app.routes.api.v1 import router_api_v1
|
35
|
-
from .app.routes.
|
35
|
+
from .app.routes.api.v2 import router_api_v2
|
36
|
+
from .app.routes.admin.v1 import router_admin_v1
|
37
|
+
from .app.routes.admin.v2 import router_admin_v2
|
36
38
|
from .app.routes.auth import router_auth
|
37
39
|
|
38
40
|
app.include_router(router_api, prefix="/api")
|
39
41
|
app.include_router(router_api_v1, prefix="/api/v1")
|
40
|
-
app.include_router(
|
41
|
-
app.include_router(
|
42
|
+
app.include_router(router_api_v2, prefix="/api/v2")
|
43
|
+
app.include_router(
|
44
|
+
router_admin_v1, prefix="/admin/v1", tags=["V1 Admin area"]
|
45
|
+
)
|
46
|
+
app.include_router(
|
47
|
+
router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"]
|
48
|
+
)
|
49
|
+
app.include_router(router_auth, prefix="/auth", tags=["Authentication"])
|
42
50
|
|
43
51
|
|
44
52
|
def check_settings() -> None:
|
fractal_server/migrations/env.py
CHANGED
@@ -58,7 +58,6 @@ def run_migrations_offline() -> None:
|
|
58
58
|
target_metadata=target_metadata,
|
59
59
|
literal_binds=True,
|
60
60
|
dialect_opts={"paramstyle": "named"},
|
61
|
-
render_as_batch=True,
|
62
61
|
)
|
63
62
|
|
64
63
|
with context.begin_transaction():
|
@@ -69,7 +68,6 @@ def do_run_migrations(connection: Connection) -> None:
|
|
69
68
|
context.configure(
|
70
69
|
connection=connection,
|
71
70
|
target_metadata=target_metadata,
|
72
|
-
render_as_batch=True,
|
73
71
|
)
|
74
72
|
|
75
73
|
with context.begin_transaction():
|
@@ -0,0 +1,239 @@
|
|
1
|
+
"""v2
|
2
|
+
|
3
|
+
Revision ID: d71e732236cd
|
4
|
+
Revises: 9fd26a2b0de4
|
5
|
+
Create Date: 2024-04-05 11:09:17.639183
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
import sqlmodel
|
10
|
+
from alembic import op
|
11
|
+
|
12
|
+
|
13
|
+
# revision identifiers, used by Alembic.
|
14
|
+
revision = "d71e732236cd"
|
15
|
+
down_revision = "9fd26a2b0de4"
|
16
|
+
branch_labels = None
|
17
|
+
depends_on = None
|
18
|
+
|
19
|
+
|
20
|
+
def upgrade() -> None:
|
21
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
22
|
+
op.create_table(
|
23
|
+
"projectv2",
|
24
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
25
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
26
|
+
sa.Column("read_only", sa.Boolean(), nullable=False),
|
27
|
+
sa.Column(
|
28
|
+
"timestamp_created", sa.DateTime(timezone=True), nullable=False
|
29
|
+
),
|
30
|
+
sa.PrimaryKeyConstraint("id"),
|
31
|
+
)
|
32
|
+
op.create_table(
|
33
|
+
"taskv2",
|
34
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
35
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
36
|
+
sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
37
|
+
sa.Column(
|
38
|
+
"command_non_parallel",
|
39
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
40
|
+
nullable=True,
|
41
|
+
),
|
42
|
+
sa.Column(
|
43
|
+
"command_parallel",
|
44
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
45
|
+
nullable=True,
|
46
|
+
),
|
47
|
+
sa.Column(
|
48
|
+
"source", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
49
|
+
),
|
50
|
+
sa.Column(
|
51
|
+
"meta_non_parallel", sa.JSON(), server_default="{}", nullable=False
|
52
|
+
),
|
53
|
+
sa.Column(
|
54
|
+
"meta_parallel", sa.JSON(), server_default="{}", nullable=False
|
55
|
+
),
|
56
|
+
sa.Column("owner", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
57
|
+
sa.Column(
|
58
|
+
"version", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
59
|
+
),
|
60
|
+
sa.Column("args_schema_non_parallel", sa.JSON(), nullable=True),
|
61
|
+
sa.Column("args_schema_parallel", sa.JSON(), nullable=True),
|
62
|
+
sa.Column(
|
63
|
+
"args_schema_version",
|
64
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
65
|
+
nullable=True,
|
66
|
+
),
|
67
|
+
sa.Column(
|
68
|
+
"docs_info", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
69
|
+
),
|
70
|
+
sa.Column(
|
71
|
+
"docs_link", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
72
|
+
),
|
73
|
+
sa.Column("input_types", sa.JSON(), nullable=True),
|
74
|
+
sa.Column("output_types", sa.JSON(), nullable=True),
|
75
|
+
sa.PrimaryKeyConstraint("id"),
|
76
|
+
sa.UniqueConstraint("source"),
|
77
|
+
)
|
78
|
+
op.create_table(
|
79
|
+
"datasetv2",
|
80
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
81
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
82
|
+
sa.Column("project_id", sa.Integer(), nullable=False),
|
83
|
+
sa.Column("history", sa.JSON(), server_default="[]", nullable=False),
|
84
|
+
sa.Column("read_only", sa.Boolean(), nullable=False),
|
85
|
+
sa.Column(
|
86
|
+
"timestamp_created", sa.DateTime(timezone=True), nullable=False
|
87
|
+
),
|
88
|
+
sa.Column(
|
89
|
+
"zarr_dir", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
90
|
+
),
|
91
|
+
sa.Column("images", sa.JSON(), server_default="[]", nullable=False),
|
92
|
+
sa.Column(
|
93
|
+
"filters",
|
94
|
+
sa.JSON(),
|
95
|
+
server_default='{"attributes": {}, "types": {}}',
|
96
|
+
nullable=False,
|
97
|
+
),
|
98
|
+
sa.ForeignKeyConstraint(
|
99
|
+
["project_id"],
|
100
|
+
["projectv2.id"],
|
101
|
+
),
|
102
|
+
sa.PrimaryKeyConstraint("id"),
|
103
|
+
)
|
104
|
+
op.create_table(
|
105
|
+
"linkuserprojectv2",
|
106
|
+
sa.Column("project_id", sa.Integer(), nullable=False),
|
107
|
+
sa.Column("user_id", sa.Integer(), nullable=False),
|
108
|
+
sa.ForeignKeyConstraint(
|
109
|
+
["project_id"],
|
110
|
+
["projectv2.id"],
|
111
|
+
),
|
112
|
+
sa.ForeignKeyConstraint(
|
113
|
+
["user_id"],
|
114
|
+
["user_oauth.id"],
|
115
|
+
),
|
116
|
+
sa.PrimaryKeyConstraint("project_id", "user_id"),
|
117
|
+
)
|
118
|
+
op.create_table(
|
119
|
+
"workflowv2",
|
120
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
121
|
+
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
122
|
+
sa.Column("project_id", sa.Integer(), nullable=False),
|
123
|
+
sa.Column(
|
124
|
+
"timestamp_created", sa.DateTime(timezone=True), nullable=False
|
125
|
+
),
|
126
|
+
sa.ForeignKeyConstraint(
|
127
|
+
["project_id"],
|
128
|
+
["projectv2.id"],
|
129
|
+
),
|
130
|
+
sa.PrimaryKeyConstraint("id"),
|
131
|
+
)
|
132
|
+
op.create_table(
|
133
|
+
"jobv2",
|
134
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
135
|
+
sa.Column("project_id", sa.Integer(), nullable=True),
|
136
|
+
sa.Column("workflow_id", sa.Integer(), nullable=True),
|
137
|
+
sa.Column("dataset_id", sa.Integer(), nullable=True),
|
138
|
+
sa.Column(
|
139
|
+
"user_email", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
140
|
+
),
|
141
|
+
sa.Column(
|
142
|
+
"slurm_account", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
143
|
+
),
|
144
|
+
sa.Column("dataset_dump", sa.JSON(), nullable=False),
|
145
|
+
sa.Column("workflow_dump", sa.JSON(), nullable=False),
|
146
|
+
sa.Column("project_dump", sa.JSON(), nullable=False),
|
147
|
+
sa.Column(
|
148
|
+
"worker_init", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
149
|
+
),
|
150
|
+
sa.Column(
|
151
|
+
"working_dir", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
152
|
+
),
|
153
|
+
sa.Column(
|
154
|
+
"working_dir_user",
|
155
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
156
|
+
nullable=True,
|
157
|
+
),
|
158
|
+
sa.Column("first_task_index", sa.Integer(), nullable=False),
|
159
|
+
sa.Column("last_task_index", sa.Integer(), nullable=False),
|
160
|
+
sa.Column(
|
161
|
+
"start_timestamp", sa.DateTime(timezone=True), nullable=False
|
162
|
+
),
|
163
|
+
sa.Column("end_timestamp", sa.DateTime(timezone=True), nullable=True),
|
164
|
+
sa.Column(
|
165
|
+
"status", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
166
|
+
),
|
167
|
+
sa.Column("log", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
168
|
+
sa.ForeignKeyConstraint(
|
169
|
+
["dataset_id"],
|
170
|
+
["datasetv2.id"],
|
171
|
+
),
|
172
|
+
sa.ForeignKeyConstraint(
|
173
|
+
["project_id"],
|
174
|
+
["projectv2.id"],
|
175
|
+
),
|
176
|
+
sa.ForeignKeyConstraint(
|
177
|
+
["workflow_id"],
|
178
|
+
["workflowv2.id"],
|
179
|
+
),
|
180
|
+
sa.PrimaryKeyConstraint("id"),
|
181
|
+
)
|
182
|
+
op.create_table(
|
183
|
+
"workflowtaskv2",
|
184
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
185
|
+
sa.Column("workflow_id", sa.Integer(), nullable=False),
|
186
|
+
sa.Column("order", sa.Integer(), nullable=True),
|
187
|
+
sa.Column("meta_parallel", sa.JSON(), nullable=True),
|
188
|
+
sa.Column("meta_non_parallel", sa.JSON(), nullable=True),
|
189
|
+
sa.Column("args_parallel", sa.JSON(), nullable=True),
|
190
|
+
sa.Column("args_non_parallel", sa.JSON(), nullable=True),
|
191
|
+
sa.Column(
|
192
|
+
"input_filters",
|
193
|
+
sa.JSON(),
|
194
|
+
server_default='{"attributes": {}, "types": {}}',
|
195
|
+
nullable=False,
|
196
|
+
),
|
197
|
+
sa.Column("is_legacy_task", sa.Boolean(), nullable=False),
|
198
|
+
sa.Column(
|
199
|
+
"task_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
200
|
+
),
|
201
|
+
sa.Column("task_id", sa.Integer(), nullable=True),
|
202
|
+
sa.Column("task_legacy_id", sa.Integer(), nullable=True),
|
203
|
+
sa.ForeignKeyConstraint(
|
204
|
+
["task_id"],
|
205
|
+
["taskv2.id"],
|
206
|
+
),
|
207
|
+
sa.ForeignKeyConstraint(
|
208
|
+
["task_legacy_id"],
|
209
|
+
["task.id"],
|
210
|
+
),
|
211
|
+
sa.ForeignKeyConstraint(
|
212
|
+
["workflow_id"],
|
213
|
+
["workflowv2.id"],
|
214
|
+
),
|
215
|
+
sa.PrimaryKeyConstraint("id"),
|
216
|
+
)
|
217
|
+
op.add_column(
|
218
|
+
"task",
|
219
|
+
sa.Column(
|
220
|
+
"is_v2_compatible",
|
221
|
+
sa.Boolean(),
|
222
|
+
server_default=sa.text("false"),
|
223
|
+
nullable=False,
|
224
|
+
),
|
225
|
+
)
|
226
|
+
# ### end Alembic commands ###
|
227
|
+
|
228
|
+
|
229
|
+
def downgrade() -> None:
|
230
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
231
|
+
op.drop_column("task", "is_v2_compatible")
|
232
|
+
op.drop_table("workflowtaskv2")
|
233
|
+
op.drop_table("jobv2")
|
234
|
+
op.drop_table("workflowv2")
|
235
|
+
op.drop_table("linkuserprojectv2")
|
236
|
+
op.drop_table("datasetv2")
|
237
|
+
op.drop_table("taskv2")
|
238
|
+
op.drop_table("projectv2")
|
239
|
+
# ### end Alembic commands ###
|
fractal_server/tasks/__init__.py
CHANGED
@@ -4,35 +4,26 @@ from typing import Optional
|
|
4
4
|
from typing import Union
|
5
5
|
from zipfile import ZipFile
|
6
6
|
|
7
|
-
from
|
8
|
-
from
|
7
|
+
from .utils import _normalize_package_name
|
8
|
+
from .utils import get_python_interpreter
|
9
|
+
from .v1._TaskCollectPip import _TaskCollectPip as _TaskCollectPipV1
|
10
|
+
from .v2._TaskCollectPip import _TaskCollectPip as _TaskCollectPipV2
|
11
|
+
from fractal_server.app.schemas.v1 import ManifestV1
|
12
|
+
from fractal_server.app.schemas.v2 import ManifestV2
|
9
13
|
from fractal_server.config import get_settings
|
10
14
|
from fractal_server.logger import get_logger
|
11
15
|
from fractal_server.syringe import Inject
|
12
|
-
from fractal_server.tasks._TaskCollectPip import _TaskCollectPip
|
13
|
-
from fractal_server.tasks.utils import _normalize_package_name
|
14
|
-
from fractal_server.tasks.utils import get_absolute_venv_path
|
15
|
-
from fractal_server.tasks.utils import get_collection_path
|
16
|
-
from fractal_server.tasks.utils import get_python_interpreter
|
17
16
|
from fractal_server.utils import execute_command
|
18
17
|
|
19
18
|
|
20
19
|
FRACTAL_PUBLIC_TASK_SUBDIR = ".fractal"
|
21
20
|
|
22
21
|
|
23
|
-
def get_collection_data(venv_path: Path) -> TaskCollectStatus:
|
24
|
-
package_path = get_absolute_venv_path(venv_path)
|
25
|
-
collection_path = get_collection_path(package_path)
|
26
|
-
with collection_path.open() as f:
|
27
|
-
data = json.load(f)
|
28
|
-
return TaskCollectStatus(**data)
|
29
|
-
|
30
|
-
|
31
22
|
async def download_package(
|
32
23
|
*,
|
33
|
-
task_pkg:
|
24
|
+
task_pkg: Union[_TaskCollectPipV1, _TaskCollectPipV2],
|
34
25
|
dest: Union[str, Path],
|
35
|
-
):
|
26
|
+
) -> Path:
|
36
27
|
"""
|
37
28
|
Download package to destination
|
38
29
|
"""
|
@@ -52,7 +43,7 @@ async def download_package(
|
|
52
43
|
|
53
44
|
def _load_manifest_from_wheel(
|
54
45
|
path: Path, wheel: ZipFile, logger_name: Optional[str] = None
|
55
|
-
) -> ManifestV1:
|
46
|
+
) -> Union[ManifestV1, ManifestV2]:
|
56
47
|
logger = get_logger(logger_name)
|
57
48
|
namelist = wheel.namelist()
|
58
49
|
try:
|
@@ -69,6 +60,9 @@ def _load_manifest_from_wheel(
|
|
69
60
|
if manifest_version == "1":
|
70
61
|
pkg_manifest = ManifestV1(**manifest_dict)
|
71
62
|
return pkg_manifest
|
63
|
+
elif manifest_version == "2":
|
64
|
+
pkg_manifest = ManifestV2(**manifest_dict)
|
65
|
+
return pkg_manifest
|
72
66
|
else:
|
73
67
|
msg = f"Manifest version {manifest_version=} not supported"
|
74
68
|
logger.error(msg)
|
@@ -146,7 +140,7 @@ def inspect_package(path: Path, logger_name: Optional[str] = None) -> dict:
|
|
146
140
|
|
147
141
|
def create_package_dir_pip(
|
148
142
|
*,
|
149
|
-
task_pkg:
|
143
|
+
task_pkg: Union[_TaskCollectPipV1, _TaskCollectPipV2],
|
150
144
|
create: bool = True,
|
151
145
|
) -> Path:
|
152
146
|
"""
|
fractal_server/tasks/utils.py
CHANGED
@@ -5,7 +5,9 @@ from pathlib import Path
|
|
5
5
|
from typing import Optional
|
6
6
|
|
7
7
|
from fractal_server.config import get_settings
|
8
|
+
from fractal_server.logger import get_logger
|
8
9
|
from fractal_server.syringe import Inject
|
10
|
+
from fractal_server.utils import execute_command
|
9
11
|
|
10
12
|
COLLECTION_FILENAME = "collection.json"
|
11
13
|
COLLECTION_LOG_FILENAME = "collection.log"
|
@@ -84,3 +86,36 @@ def _normalize_package_name(name: str) -> str:
|
|
84
86
|
The normalized package name.
|
85
87
|
"""
|
86
88
|
return re.sub(r"[-_.]+", "-", name).lower()
|
89
|
+
|
90
|
+
|
91
|
+
async def _init_venv(
|
92
|
+
*,
|
93
|
+
path: Path,
|
94
|
+
python_version: Optional[str] = None,
|
95
|
+
logger_name: str,
|
96
|
+
) -> Path:
|
97
|
+
"""
|
98
|
+
Set a virtual environment at `path/venv`
|
99
|
+
|
100
|
+
Args:
|
101
|
+
path : Path
|
102
|
+
path to directory in which to set up the virtual environment
|
103
|
+
python_version : default=None
|
104
|
+
Python version the virtual environment will be based upon
|
105
|
+
|
106
|
+
Returns:
|
107
|
+
python_bin : Path
|
108
|
+
path to python interpreter
|
109
|
+
"""
|
110
|
+
logger = get_logger(logger_name)
|
111
|
+
logger.debug(f"[_init_venv] {path=}")
|
112
|
+
interpreter = get_python_interpreter(version=python_version)
|
113
|
+
logger.debug(f"[_init_venv] {interpreter=}")
|
114
|
+
await execute_command(
|
115
|
+
cwd=path,
|
116
|
+
command=f"{interpreter} -m venv venv",
|
117
|
+
logger_name=logger_name,
|
118
|
+
)
|
119
|
+
python_bin = path / "venv/bin/python"
|
120
|
+
logger.debug(f"[_init_venv] {python_bin=}")
|
121
|
+
return python_bin
|
@@ -3,11 +3,11 @@ from typing import Optional
|
|
3
3
|
|
4
4
|
from pydantic import root_validator
|
5
5
|
|
6
|
-
from fractal_server.app.schemas import ManifestV1
|
7
|
-
from fractal_server.app.schemas import
|
6
|
+
from fractal_server.app.schemas.v1 import ManifestV1
|
7
|
+
from fractal_server.app.schemas.v1 import TaskCollectPipV1
|
8
8
|
|
9
9
|
|
10
|
-
class _TaskCollectPip(
|
10
|
+
class _TaskCollectPip(TaskCollectPipV1):
|
11
11
|
"""
|
12
12
|
Internal TaskCollectPip schema
|
13
13
|
|
@@ -5,61 +5,27 @@ is used as a background task for the task-collection endpoint.
|
|
5
5
|
import json
|
6
6
|
from pathlib import Path
|
7
7
|
from shutil import rmtree as shell_rmtree
|
8
|
-
from typing import Optional
|
9
8
|
|
9
|
+
from ..utils import _init_venv
|
10
|
+
from ..utils import _normalize_package_name
|
11
|
+
from ..utils import get_collection_log
|
12
|
+
from ..utils import get_collection_path
|
13
|
+
from ..utils import get_log_path
|
14
|
+
from ..utils import slugify_task_name
|
15
|
+
from ._TaskCollectPip import _TaskCollectPip
|
10
16
|
from fractal_server.app.db import DBSyncSession
|
11
17
|
from fractal_server.app.db import get_sync_db
|
12
18
|
from fractal_server.app.models import State
|
13
19
|
from fractal_server.app.models import Task
|
14
|
-
from fractal_server.app.schemas import
|
15
|
-
from fractal_server.app.schemas import
|
16
|
-
from fractal_server.app.schemas import
|
20
|
+
from fractal_server.app.schemas.v1 import TaskCollectStatusV1
|
21
|
+
from fractal_server.app.schemas.v1 import TaskCreateV1
|
22
|
+
from fractal_server.app.schemas.v1 import TaskReadV1
|
17
23
|
from fractal_server.logger import close_logger
|
18
24
|
from fractal_server.logger import get_logger
|
19
25
|
from fractal_server.logger import set_logger
|
20
|
-
from fractal_server.tasks._TaskCollectPip import _TaskCollectPip
|
21
|
-
from fractal_server.tasks.utils import _normalize_package_name
|
22
|
-
from fractal_server.tasks.utils import get_collection_log
|
23
|
-
from fractal_server.tasks.utils import get_collection_path
|
24
|
-
from fractal_server.tasks.utils import get_log_path
|
25
|
-
from fractal_server.tasks.utils import get_python_interpreter
|
26
|
-
from fractal_server.tasks.utils import slugify_task_name
|
27
26
|
from fractal_server.utils import execute_command
|
28
27
|
|
29
28
|
|
30
|
-
async def _init_venv(
|
31
|
-
*,
|
32
|
-
path: Path,
|
33
|
-
python_version: Optional[str] = None,
|
34
|
-
logger_name: str,
|
35
|
-
) -> Path:
|
36
|
-
"""
|
37
|
-
Set a virtual environment at `path/venv`
|
38
|
-
|
39
|
-
Args:
|
40
|
-
path : Path
|
41
|
-
path to directory in which to set up the virtual environment
|
42
|
-
python_version : default=None
|
43
|
-
Python version the virtual environment will be based upon
|
44
|
-
|
45
|
-
Returns:
|
46
|
-
python_bin : Path
|
47
|
-
path to python interpreter
|
48
|
-
"""
|
49
|
-
logger = get_logger(logger_name)
|
50
|
-
logger.debug(f"[_init_venv] {path=}")
|
51
|
-
interpreter = get_python_interpreter(version=python_version)
|
52
|
-
logger.debug(f"[_init_venv] {interpreter=}")
|
53
|
-
await execute_command(
|
54
|
-
cwd=path,
|
55
|
-
command=f"{interpreter} -m venv venv",
|
56
|
-
logger_name=logger_name,
|
57
|
-
)
|
58
|
-
python_bin = path / "venv/bin/python"
|
59
|
-
logger.debug(f"[_init_venv] {python_bin=}")
|
60
|
-
return python_bin
|
61
|
-
|
62
|
-
|
63
29
|
async def _pip_install(
|
64
30
|
venv_path: Path,
|
65
31
|
task_pkg: _TaskCollectPip,
|
@@ -218,7 +184,7 @@ async def create_package_environment_pip(
|
|
218
184
|
task_pkg: _TaskCollectPip,
|
219
185
|
venv_path: Path,
|
220
186
|
logger_name: str,
|
221
|
-
) -> list[
|
187
|
+
) -> list[TaskCreateV1]:
|
222
188
|
"""
|
223
189
|
Create environment, install package, and prepare task list
|
224
190
|
"""
|
@@ -263,7 +229,7 @@ async def create_package_environment_pip(
|
|
263
229
|
)
|
264
230
|
else:
|
265
231
|
additional_attrs = {}
|
266
|
-
this_task =
|
232
|
+
this_task = TaskCreateV1(
|
267
233
|
**t.dict(),
|
268
234
|
command=cmd,
|
269
235
|
version=task_pkg.package_version,
|
@@ -279,7 +245,7 @@ async def create_package_environment_pip(
|
|
279
245
|
|
280
246
|
|
281
247
|
async def _insert_tasks(
|
282
|
-
task_list: list[
|
248
|
+
task_list: list[TaskCreateV1],
|
283
249
|
db: DBSyncSession,
|
284
250
|
) -> list[Task]:
|
285
251
|
"""
|
@@ -319,7 +285,7 @@ async def background_collect_pip(
|
|
319
285
|
|
320
286
|
with next(get_sync_db()) as db:
|
321
287
|
state: State = db.get(State, state_id)
|
322
|
-
data =
|
288
|
+
data = TaskCollectStatusV1(**state.data)
|
323
289
|
data.info = None
|
324
290
|
|
325
291
|
try:
|
@@ -347,9 +313,11 @@ async def background_collect_pip(
|
|
347
313
|
# finalise
|
348
314
|
logger.debug("Task-collection status: finalising")
|
349
315
|
collection_path = get_collection_path(venv_path)
|
350
|
-
data.task_list = [
|
316
|
+
data.task_list = [
|
317
|
+
TaskReadV1(**task.model_dump()) for task in tasks
|
318
|
+
]
|
351
319
|
with collection_path.open("w") as f:
|
352
|
-
json.dump(data.sanitised_dict(), f)
|
320
|
+
json.dump(data.sanitised_dict(), f, indent=2)
|
353
321
|
|
354
322
|
# Update DB
|
355
323
|
data.status = "OK"
|
@@ -0,0 +1,14 @@
|
|
1
|
+
import json
|
2
|
+
from pathlib import Path
|
3
|
+
|
4
|
+
from fractal_server.app.schemas.v1 import TaskCollectStatusV1
|
5
|
+
from fractal_server.tasks.utils import get_absolute_venv_path
|
6
|
+
from fractal_server.tasks.utils import get_collection_path
|
7
|
+
|
8
|
+
|
9
|
+
def get_collection_data(venv_path: Path) -> TaskCollectStatusV1:
|
10
|
+
package_path = get_absolute_venv_path(venv_path)
|
11
|
+
collection_path = get_collection_path(package_path)
|
12
|
+
with collection_path.open() as f:
|
13
|
+
data = json.load(f)
|
14
|
+
return TaskCollectStatusV1(**data)
|