fractal-server 1.4.2a5__py3-none-any.whl → 1.4.3a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +36 -25
- fractal_server/app/models/dataset.py +8 -0
- fractal_server/app/models/workflow.py +9 -55
- fractal_server/app/routes/admin.py +8 -8
- fractal_server/app/routes/api/v1/_aux_functions.py +64 -5
- fractal_server/app/routes/api/v1/dataset.py +24 -23
- fractal_server/app/routes/api/v1/job.py +7 -7
- fractal_server/app/routes/api/v1/project.py +29 -26
- fractal_server/app/routes/api/v1/task.py +6 -6
- fractal_server/app/routes/api/v1/task_collection.py +12 -126
- fractal_server/app/routes/api/v1/workflow.py +16 -14
- fractal_server/app/routes/api/v1/workflowtask.py +8 -6
- fractal_server/app/routes/auth.py +2 -2
- fractal_server/app/runner/__init__.py +0 -1
- fractal_server/app/schemas/__init__.py +1 -0
- fractal_server/app/schemas/applyworkflow.py +9 -13
- fractal_server/app/schemas/dataset.py +2 -0
- fractal_server/app/schemas/dumps.py +2 -0
- fractal_server/app/schemas/task_collection.py +2 -10
- fractal_server/app/schemas/user.py +7 -3
- fractal_server/app/schemas/workflow.py +2 -0
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/config.py +14 -0
- fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py +42 -0
- fractal_server/migrations/versions/9fd26a2b0de4_add_workflow_timestamp_created.py +60 -0
- fractal_server/tasks/_TaskCollectPip.py +103 -0
- fractal_server/tasks/__init__.py +3 -1
- fractal_server/tasks/background_operations.py +384 -0
- fractal_server/tasks/endpoint_operations.py +167 -0
- fractal_server/tasks/utils.py +86 -0
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/METADATA +2 -2
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/RECORD +36 -31
- fractal_server/tasks/collection.py +0 -556
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/entry_points.txt +0 -0
@@ -15,15 +15,7 @@ __all__ = (
|
|
15
15
|
)
|
16
16
|
|
17
17
|
|
18
|
-
class
|
19
|
-
"""
|
20
|
-
Base class for `TaskCollectPip`.
|
21
|
-
"""
|
22
|
-
|
23
|
-
pass
|
24
|
-
|
25
|
-
|
26
|
-
class TaskCollectPip(_TaskCollectBase):
|
18
|
+
class TaskCollectPip(BaseModel):
|
27
19
|
"""
|
28
20
|
TaskCollectPip class
|
29
21
|
|
@@ -89,7 +81,7 @@ class TaskCollectPip(_TaskCollectBase):
|
|
89
81
|
return v
|
90
82
|
|
91
83
|
|
92
|
-
class TaskCollectStatus(
|
84
|
+
class TaskCollectStatus(BaseModel):
|
93
85
|
"""
|
94
86
|
TaskCollectStatus class
|
95
87
|
|
@@ -118,9 +118,13 @@ class UserCreate(schemas.BaseUserCreate):
|
|
118
118
|
|
119
119
|
# Validators
|
120
120
|
|
121
|
-
|
122
|
-
|
123
|
-
|
121
|
+
@validator("slurm_accounts")
|
122
|
+
def slurm_accounts_validator(cls, value):
|
123
|
+
for i, element in enumerate(value):
|
124
|
+
value[i] = valstr(attribute=f"slurm_accounts[{i}]")(element)
|
125
|
+
val_unique_list("slurm_accounts")(value)
|
126
|
+
return value
|
127
|
+
|
124
128
|
_slurm_user = validator("slurm_user", allow_reuse=True)(
|
125
129
|
valstr("slurm_user")
|
126
130
|
)
|
@@ -1,3 +1,4 @@
|
|
1
|
+
from datetime import datetime
|
1
2
|
from enum import Enum
|
2
3
|
from typing import Any
|
3
4
|
from typing import Optional
|
@@ -131,6 +132,7 @@ class WorkflowRead(_WorkflowBase):
|
|
131
132
|
project_id: int
|
132
133
|
task_list: list[WorkflowTaskRead]
|
133
134
|
project: ProjectRead
|
135
|
+
timestamp_created: datetime
|
134
136
|
|
135
137
|
|
136
138
|
class WorkflowCreate(_WorkflowBase):
|
@@ -56,7 +56,7 @@ from sqlmodel import select
|
|
56
56
|
|
57
57
|
from ...config import get_settings
|
58
58
|
from ...syringe import Inject
|
59
|
-
from ..db import
|
59
|
+
from ..db import get_async_db
|
60
60
|
from ..models.security import OAuthAccount
|
61
61
|
from ..models.security import UserOAuth as User
|
62
62
|
from fractal_server.app.models.security import UserOAuth
|
@@ -175,7 +175,7 @@ class SQLModelUserDatabaseAsync(Generic[UP, ID], BaseUserDatabase[UP, ID]):
|
|
175
175
|
|
176
176
|
|
177
177
|
async def get_user_db(
|
178
|
-
session: AsyncSession = Depends(
|
178
|
+
session: AsyncSession = Depends(get_async_db),
|
179
179
|
) -> AsyncGenerator[SQLModelUserDatabaseAsync, None]:
|
180
180
|
yield SQLModelUserDatabaseAsync(session, User, OAuthAccount)
|
181
181
|
|
@@ -247,7 +247,7 @@ current_active_superuser = fastapi_users.current_user(
|
|
247
247
|
active=True, superuser=True
|
248
248
|
)
|
249
249
|
|
250
|
-
get_async_session_context = contextlib.asynccontextmanager(
|
250
|
+
get_async_session_context = contextlib.asynccontextmanager(get_async_db)
|
251
251
|
get_user_db_context = contextlib.asynccontextmanager(get_user_db)
|
252
252
|
get_user_manager_context = contextlib.asynccontextmanager(get_user_manager)
|
253
253
|
|
fractal_server/config.py
CHANGED
@@ -360,6 +360,14 @@ class Settings(BaseSettings):
|
|
360
360
|
raise FractalConfigurationError(
|
361
361
|
"POSTGRES_DB cannot be None when DB_ENGINE=postgres."
|
362
362
|
)
|
363
|
+
try:
|
364
|
+
import psycopg2 # noqa: F401
|
365
|
+
import asyncpg # noqa: F401
|
366
|
+
except ModuleNotFoundError:
|
367
|
+
raise FractalConfigurationError(
|
368
|
+
"DB engine is `postgres` but `psycopg2` or `asyncpg` "
|
369
|
+
"are not available"
|
370
|
+
)
|
363
371
|
else:
|
364
372
|
if not self.SQLITE_PATH:
|
365
373
|
raise FractalConfigurationError(
|
@@ -375,6 +383,12 @@ class Settings(BaseSettings):
|
|
375
383
|
|
376
384
|
info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
|
377
385
|
if self.FRACTAL_RUNNER_BACKEND == "slurm":
|
386
|
+
try:
|
387
|
+
import cfut # noqa: F401
|
388
|
+
except ModuleNotFoundError:
|
389
|
+
raise FractalConfigurationError(
|
390
|
+
f"{info} but `clusterfutures` is not available"
|
391
|
+
)
|
378
392
|
if not self.FRACTAL_SLURM_CONFIG_FILE:
|
379
393
|
raise FractalConfigurationError(
|
380
394
|
f"Must set FRACTAL_SLURM_CONFIG_FILE when {info}"
|
@@ -0,0 +1,42 @@
|
|
1
|
+
"""WorkflowTask foreign keys not nullables
|
2
|
+
|
3
|
+
Revision ID: 4cedeb448a53
|
4
|
+
Revises: efa89c30e0a4
|
5
|
+
Create Date: 2024-01-16 13:57:47.891931
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "4cedeb448a53"
|
14
|
+
down_revision = "efa89c30e0a4"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("workflowtask", schema=None) as batch_op:
|
22
|
+
batch_op.alter_column(
|
23
|
+
"workflow_id", existing_type=sa.INTEGER(), nullable=False
|
24
|
+
)
|
25
|
+
batch_op.alter_column(
|
26
|
+
"task_id", existing_type=sa.INTEGER(), nullable=False
|
27
|
+
)
|
28
|
+
|
29
|
+
# ### end Alembic commands ###
|
30
|
+
|
31
|
+
|
32
|
+
def downgrade() -> None:
|
33
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
34
|
+
with op.batch_alter_table("workflowtask", schema=None) as batch_op:
|
35
|
+
batch_op.alter_column(
|
36
|
+
"task_id", existing_type=sa.INTEGER(), nullable=True
|
37
|
+
)
|
38
|
+
batch_op.alter_column(
|
39
|
+
"workflow_id", existing_type=sa.INTEGER(), nullable=True
|
40
|
+
)
|
41
|
+
|
42
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,60 @@
|
|
1
|
+
"""add Workflow.timestamp_created and Dataset.timestamp_created
|
2
|
+
|
3
|
+
Revision ID: 9fd26a2b0de4
|
4
|
+
Revises: efa89c30e0a4
|
5
|
+
Create Date: 2024-01-11 09:31:20.950090
|
6
|
+
|
7
|
+
"""
|
8
|
+
from datetime import datetime
|
9
|
+
from datetime import timezone
|
10
|
+
|
11
|
+
import sqlalchemy as sa
|
12
|
+
from alembic import op
|
13
|
+
|
14
|
+
|
15
|
+
# revision identifiers, used by Alembic.
|
16
|
+
revision = "9fd26a2b0de4"
|
17
|
+
down_revision = "4cedeb448a53"
|
18
|
+
branch_labels = None
|
19
|
+
depends_on = None
|
20
|
+
|
21
|
+
|
22
|
+
def upgrade() -> None:
|
23
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
24
|
+
with op.batch_alter_table("workflow", schema=None) as batch_op:
|
25
|
+
batch_op.add_column(
|
26
|
+
sa.Column(
|
27
|
+
"timestamp_created",
|
28
|
+
sa.DateTime(timezone=True),
|
29
|
+
nullable=False,
|
30
|
+
server_default=str(datetime(2000, 1, 1, tzinfo=timezone.utc)),
|
31
|
+
)
|
32
|
+
)
|
33
|
+
|
34
|
+
with op.batch_alter_table("workflow", schema=None) as batch_op:
|
35
|
+
batch_op.alter_column("timestamp_created", server_default=None)
|
36
|
+
|
37
|
+
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
38
|
+
batch_op.add_column(
|
39
|
+
sa.Column(
|
40
|
+
"timestamp_created",
|
41
|
+
sa.DateTime(timezone=True),
|
42
|
+
nullable=False,
|
43
|
+
server_default=str(datetime(2000, 1, 1, tzinfo=timezone.utc)),
|
44
|
+
)
|
45
|
+
)
|
46
|
+
|
47
|
+
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
48
|
+
batch_op.alter_column("timestamp_created", server_default=None)
|
49
|
+
# ### end Alembic commands ###
|
50
|
+
|
51
|
+
|
52
|
+
def downgrade() -> None:
|
53
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
54
|
+
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
55
|
+
batch_op.drop_column("timestamp_created")
|
56
|
+
|
57
|
+
with op.batch_alter_table("workflow", schema=None) as batch_op:
|
58
|
+
batch_op.drop_column("timestamp_created")
|
59
|
+
|
60
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,103 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from pydantic import root_validator
|
5
|
+
|
6
|
+
from fractal_server.app.schemas import ManifestV1
|
7
|
+
from fractal_server.app.schemas import TaskCollectPip
|
8
|
+
|
9
|
+
|
10
|
+
class _TaskCollectPip(TaskCollectPip):
|
11
|
+
"""
|
12
|
+
Internal TaskCollectPip schema
|
13
|
+
|
14
|
+
Differences with its parent class (`TaskCollectPip`):
|
15
|
+
|
16
|
+
1. We check if the package corresponds to a path in the filesystem, and
|
17
|
+
whether it exists (via new validator `check_local_package`, new
|
18
|
+
method `is_local_package` and new attribute `package_path`).
|
19
|
+
2. We include an additional `package_manifest` attribute.
|
20
|
+
3. We expose an additional attribute `package_name`, which is filled
|
21
|
+
during task collection.
|
22
|
+
"""
|
23
|
+
|
24
|
+
package_name: Optional[str] = None
|
25
|
+
package_path: Optional[Path] = None
|
26
|
+
package_manifest: Optional[ManifestV1] = None
|
27
|
+
|
28
|
+
@property
|
29
|
+
def is_local_package(self) -> bool:
|
30
|
+
return bool(self.package_path)
|
31
|
+
|
32
|
+
@root_validator(pre=True)
|
33
|
+
def check_local_package(cls, values):
|
34
|
+
"""
|
35
|
+
Checks if package corresponds to an existing path on the filesystem
|
36
|
+
|
37
|
+
In this case, the user is providing directly a package file, rather
|
38
|
+
than a remote one from PyPI. We set the `package_path` attribute and
|
39
|
+
get the actual package name and version from the package file name.
|
40
|
+
"""
|
41
|
+
if "/" in values["package"]:
|
42
|
+
package_path = Path(values["package"])
|
43
|
+
if not package_path.is_absolute():
|
44
|
+
raise ValueError("Package path must be absolute")
|
45
|
+
if package_path.exists():
|
46
|
+
values["package_path"] = package_path
|
47
|
+
(
|
48
|
+
values["package"],
|
49
|
+
values["version"],
|
50
|
+
*_,
|
51
|
+
) = package_path.name.split("-")
|
52
|
+
else:
|
53
|
+
raise ValueError(f"Package {package_path} does not exist.")
|
54
|
+
return values
|
55
|
+
|
56
|
+
@property
|
57
|
+
def package_source(self) -> str:
|
58
|
+
"""
|
59
|
+
NOTE: As of PR #1188 in `fractal-server`, the attribute
|
60
|
+
`self.package_name` is normalized; this means e.g. that `_` is
|
61
|
+
replaced by `-`. To guarantee backwards compatibility with
|
62
|
+
`Task.source` attributes created before this change, we still replace
|
63
|
+
`-` with `_` upon generation of the `source` attribute, in this
|
64
|
+
method.
|
65
|
+
"""
|
66
|
+
if not self.package_name or not self.package_version:
|
67
|
+
raise ValueError(
|
68
|
+
"Cannot construct `package_source` property with "
|
69
|
+
f"{self.package_name=} and {self.package_version=}."
|
70
|
+
)
|
71
|
+
if self.is_local_package:
|
72
|
+
collection_type = "pip_local"
|
73
|
+
else:
|
74
|
+
collection_type = "pip_remote"
|
75
|
+
|
76
|
+
package_extras = self.package_extras or ""
|
77
|
+
if self.python_version:
|
78
|
+
python_version = f"py{self.python_version}"
|
79
|
+
else:
|
80
|
+
python_version = "" # FIXME: can we allow this?
|
81
|
+
|
82
|
+
source = ":".join(
|
83
|
+
(
|
84
|
+
collection_type,
|
85
|
+
self.package_name.replace("-", "_"), # see method docstring
|
86
|
+
self.package_version,
|
87
|
+
package_extras,
|
88
|
+
python_version,
|
89
|
+
)
|
90
|
+
)
|
91
|
+
return source
|
92
|
+
|
93
|
+
def check(self):
|
94
|
+
"""
|
95
|
+
Verify that the package has all attributes that are needed to continue
|
96
|
+
with task collection
|
97
|
+
"""
|
98
|
+
if not self.package_name:
|
99
|
+
raise ValueError("`package_name` attribute is not set")
|
100
|
+
if not self.package_version:
|
101
|
+
raise ValueError("`package_version` attribute is not set")
|
102
|
+
if not self.package_manifest:
|
103
|
+
raise ValueError("`package_manifest` attribute is not set")
|