fractal-server 1.3.12a4__py3-none-any.whl → 1.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +11 -3
- fractal_server/app/models/dataset.py +3 -1
- fractal_server/app/runner/__init__.py +2 -3
- fractal_server/config.py +5 -0
- fractal_server/migrations/script.py.mako +0 -1
- fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py +36 -0
- fractal_server/tasks/collection.py +2 -2
- {fractal_server-1.3.12a4.dist-info → fractal_server-1.3.13.dist-info}/METADATA +2 -4
- {fractal_server-1.3.12a4.dist-info → fractal_server-1.3.13.dist-info}/RECORD +13 -13
- fractal_server/migrations/versions/5c4ff7526508_add_dataset_history.py +0 -35
- {fractal_server-1.3.12a4.dist-info → fractal_server-1.3.13.dist-info}/LICENSE +0 -0
- {fractal_server-1.3.12a4.dist-info → fractal_server-1.3.13.dist-info}/WHEEL +0 -0
- {fractal_server-1.3.12a4.dist-info → fractal_server-1.3.13.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "1.3.
|
1
|
+
__VERSION__ = "1.3.13"
|
@@ -6,6 +6,7 @@ from typing import AsyncGenerator
|
|
6
6
|
from typing import Generator
|
7
7
|
|
8
8
|
from sqlalchemy import create_engine
|
9
|
+
from sqlalchemy import event
|
9
10
|
from sqlalchemy.ext.asyncio import AsyncSession
|
10
11
|
from sqlalchemy.ext.asyncio import create_async_engine
|
11
12
|
from sqlalchemy.orm import Session as DBSyncSession
|
@@ -49,9 +50,9 @@ class DB:
|
|
49
50
|
|
50
51
|
if settings.DB_ENGINE == "sqlite":
|
51
52
|
logger.warning(
|
52
|
-
"SQLite is supported
|
53
|
-
"partial support for ForeignKey
|
54
|
-
"
|
53
|
+
"SQLite is supported (for version >=3.37) but discouraged "
|
54
|
+
"in production. Given its partial support for ForeignKey "
|
55
|
+
"constraints, database consistency cannot be guaranteed."
|
55
56
|
)
|
56
57
|
|
57
58
|
# Set some sqlite-specific options
|
@@ -86,6 +87,13 @@ class DB:
|
|
86
87
|
bind=cls._engine_sync, autocommit=False, autoflush=False
|
87
88
|
)
|
88
89
|
|
90
|
+
@event.listens_for(cls._engine_sync, "connect")
|
91
|
+
def set_sqlite_pragma(dbapi_connection, connection_record):
|
92
|
+
if settings.DB_ENGINE == "sqlite":
|
93
|
+
cursor = dbapi_connection.cursor()
|
94
|
+
cursor.execute("PRAGMA journal_mode=WAL")
|
95
|
+
cursor.close()
|
96
|
+
|
89
97
|
@classmethod
|
90
98
|
async def get_db(cls) -> AsyncGenerator[AsyncSession, None]:
|
91
99
|
"""
|
@@ -43,7 +43,9 @@ class Dataset(_DatasetBase, SQLModel, table=True):
|
|
43
43
|
}
|
44
44
|
)
|
45
45
|
meta: dict[str, Any] = Field(sa_column=Column(JSON), default={})
|
46
|
-
history: list[dict[str, Any]] = Field(
|
46
|
+
history: list[dict[str, Any]] = Field(
|
47
|
+
sa_column=Column(JSON, server_default="[]", nullable=False)
|
48
|
+
)
|
47
49
|
|
48
50
|
class Config:
|
49
51
|
arbitrary_types_allowed = True
|
@@ -248,9 +248,8 @@ async def submit_workflow(
|
|
248
248
|
)
|
249
249
|
logger.debug(f'END workflow "{workflow.name}"')
|
250
250
|
|
251
|
-
# Replace output_dataset.meta
|
252
|
-
#
|
253
|
-
# existing entry rather than replacing it)
|
251
|
+
# Replace output_dataset.meta and output_dataset.history with their
|
252
|
+
# up-to-date versions, obtained within process_workflow
|
254
253
|
output_dataset.history = output_dataset_meta_hist.pop("history")
|
255
254
|
output_dataset.meta = output_dataset_meta_hist.pop("metadata")
|
256
255
|
|
fractal_server/config.py
CHANGED
@@ -419,4 +419,9 @@ class Settings(BaseSettings):
|
|
419
419
|
|
420
420
|
|
421
421
|
def get_settings(settings=Settings()) -> Settings:
|
422
|
+
logging.debug("Fractal Settings:")
|
423
|
+
for key, value in settings.dict().items():
|
424
|
+
if any(s in key.upper() for s in ["PASSWORD", "SECRET"]):
|
425
|
+
value = "*****"
|
426
|
+
logging.debug(f"{key}: {value}")
|
422
427
|
return settings
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"""Add Dataset.history
|
2
|
+
|
3
|
+
Revision ID: 99ea79d9e5d2
|
4
|
+
Revises: 8f79bd162e35
|
5
|
+
Create Date: 2023-10-16 09:45:15.132185
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "99ea79d9e5d2"
|
14
|
+
down_revision = "8f79bd162e35"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
22
|
+
batch_op.add_column(
|
23
|
+
sa.Column(
|
24
|
+
"history", sa.JSON(), server_default="[]", nullable=False
|
25
|
+
)
|
26
|
+
)
|
27
|
+
|
28
|
+
# ### end Alembic commands ###
|
29
|
+
|
30
|
+
|
31
|
+
def downgrade() -> None:
|
32
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
33
|
+
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
34
|
+
batch_op.drop_column("history")
|
35
|
+
|
36
|
+
# ### end Alembic commands ###
|
@@ -83,7 +83,7 @@ def get_absolute_venv_path(venv_path: Path) -> Path:
|
|
83
83
|
package_path = venv_path
|
84
84
|
else:
|
85
85
|
settings = Inject(get_settings)
|
86
|
-
package_path = settings.FRACTAL_TASKS_DIR / venv_path
|
86
|
+
package_path = settings.FRACTAL_TASKS_DIR / venv_path
|
87
87
|
return package_path
|
88
88
|
|
89
89
|
|
@@ -214,7 +214,7 @@ def create_package_dir_pip(
|
|
214
214
|
"with `version=None`."
|
215
215
|
)
|
216
216
|
package_dir = f"{task_pkg.package}{task_pkg.package_version}"
|
217
|
-
venv_path = settings.FRACTAL_TASKS_DIR / user / package_dir
|
217
|
+
venv_path = settings.FRACTAL_TASKS_DIR / user / package_dir
|
218
218
|
if create:
|
219
219
|
venv_path.mkdir(exist_ok=False, parents=True)
|
220
220
|
return venv_path
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fractal-server
|
3
|
-
Version: 1.3.
|
3
|
+
Version: 1.3.13
|
4
4
|
Summary: Server component of the Fractal analytics platform
|
5
5
|
Home-page: https://github.com/fractal-analytics-platform/fractal-server
|
6
6
|
License: BSD-3-Clause
|
@@ -15,7 +15,6 @@ Classifier: Programming Language :: Python :: 3.11
|
|
15
15
|
Provides-Extra: gunicorn
|
16
16
|
Provides-Extra: postgres
|
17
17
|
Provides-Extra: slurm
|
18
|
-
Requires-Dist: SQLAlchemy-Utils (>=0.38.3,<0.39.0)
|
19
18
|
Requires-Dist: aiosqlite (>=0.17.0,<0.18.0)
|
20
19
|
Requires-Dist: alembic (>=1.9.1,<2.0.0)
|
21
20
|
Requires-Dist: asyncpg (>=0.27.0,<0.28.0) ; extra == "postgres"
|
@@ -23,12 +22,11 @@ Requires-Dist: cloudpickle (>=2.2.1,<2.3.0) ; extra == "slurm"
|
|
23
22
|
Requires-Dist: clusterfutures (>=0.5,<0.6) ; extra == "slurm"
|
24
23
|
Requires-Dist: fastapi (>=0.103.0,<0.104.0)
|
25
24
|
Requires-Dist: fastapi-users[oauth] (>=12.1.0,<13.0.0)
|
26
|
-
Requires-Dist: greenlet
|
27
25
|
Requires-Dist: gunicorn (>=20.1.0,<21.0.0) ; extra == "gunicorn"
|
28
26
|
Requires-Dist: psycopg2 (>=2.9.5,<3.0.0) ; extra == "postgres"
|
29
27
|
Requires-Dist: pydantic (>=1.10.8,<2)
|
30
28
|
Requires-Dist: python-dotenv (>=0.20.0,<0.21.0)
|
31
|
-
Requires-Dist: sqlalchemy (>=1.4,<2.0)
|
29
|
+
Requires-Dist: sqlalchemy[asyncio] (>=1.4,<2.0)
|
32
30
|
Requires-Dist: sqlmodel (>=0.0.8,<0.0.9)
|
33
31
|
Requires-Dist: uvicorn (>=0.20.0,<0.21.0)
|
34
32
|
Project-URL: Changelog, https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md
|
@@ -1,4 +1,4 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=aodRNVuLLZcEWJQyyHbOzUEaiJFHqb1hYcHjrIJCw6A,23
|
2
2
|
fractal_server/__main__.py,sha256=znijcImbcEC4P26ICOhEJ9VY3_5vWdMwQcl-WP25sYA,2202
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -12,9 +12,9 @@ fractal_server/app/api/v1/task.py,sha256=0MJNhn5f8KZy4XBMUoJNhrk3E6GBQWcVfKoQzP5
|
|
12
12
|
fractal_server/app/api/v1/task_collection.py,sha256=mY1cSGepWvVz6IJCnFYA8iy4hU-8qsA1HbiQXZjg1OM,11697
|
13
13
|
fractal_server/app/api/v1/workflow.py,sha256=A54m9PDD7MhT6s5-8kIrefEJ5uVxigxBohelT7XCLVo,9379
|
14
14
|
fractal_server/app/api/v1/workflowtask.py,sha256=TIsCSBFImoRq0rz16ZVlFwTL-Qd9Uqywbq-DT4OxYh0,5421
|
15
|
-
fractal_server/app/db/__init__.py,sha256=
|
15
|
+
fractal_server/app/db/__init__.py,sha256=4DP-jOZDLawKwPTToOVTR0vO2bc-R7s0lwZUXFv7k5s,3464
|
16
16
|
fractal_server/app/models/__init__.py,sha256=RuxWH8fsmkTWsjLhYjrxSt-mvk74coCilAQlX2Q6OO0,353
|
17
|
-
fractal_server/app/models/dataset.py,sha256=
|
17
|
+
fractal_server/app/models/dataset.py,sha256=TK2-tnEMfC3lAwSJMMJre-e2Ao6AbEqnhWTcVvcrJxo,1482
|
18
18
|
fractal_server/app/models/job.py,sha256=eU1RIWg4C2n9BzjGdvOxqP-eueNNYdo6tpW2RBqZYrE,3753
|
19
19
|
fractal_server/app/models/linkuserproject.py,sha256=RVtl25Q_N99uoVDE7wx0IN0SgFjc7Id5XbScsgrjv_E,309
|
20
20
|
fractal_server/app/models/project.py,sha256=U3MvLVBestug5dCCw22VA-l8BeUnGoaNye5OPexsqoA,1191
|
@@ -23,7 +23,7 @@ fractal_server/app/models/state.py,sha256=0L4lcaaMyQE43Tk1DVYbvPFcsQ9OZDSzsEwJO-
|
|
23
23
|
fractal_server/app/models/task.py,sha256=APndtea9A7EF7TtpVK8kWapBM01a6nk3FFCrQbbioI8,2632
|
24
24
|
fractal_server/app/models/workflow.py,sha256=VlX-MNfqw3z-EVKMvwVR9HbnOFNHRnO-5sDYFQOSijQ,5191
|
25
25
|
fractal_server/app/runner/.gitignore,sha256=ytzN_oyHWXrGU7iFAtoHSTUbM6Rn6kG0Zkddg0xZk6s,16
|
26
|
-
fractal_server/app/runner/__init__.py,sha256=
|
26
|
+
fractal_server/app/runner/__init__.py,sha256=Y-B9lpw3Hbuc9uTCG0qcDDz5uMvnWXyxd3B5nfpbEdw,12978
|
27
27
|
fractal_server/app/runner/_common.py,sha256=XjyE8DZE6WECeFXI6i0vHVD6JywZQxkJgZrL-ep1USQ,22642
|
28
28
|
fractal_server/app/runner/_local/__init__.py,sha256=mSJzpF6u6rgsSYO25szNVr2B296h7_iKD1eqS3o87Qo,6532
|
29
29
|
fractal_server/app/runner/_local/_local_config.py,sha256=-oNTsjEUmytHlsYpWfw2CrPvSxDFeEhZSdQvI_wf3Mk,3245
|
@@ -52,26 +52,26 @@ fractal_server/app/schemas/task_collection.py,sha256=mPk6E1LK2UvnHkhIQWHmTztsVT9
|
|
52
52
|
fractal_server/app/schemas/user.py,sha256=zhB-2WfJ30hNcHaW2V126v5i7rHl66fX_SRmIWCQrjM,1587
|
53
53
|
fractal_server/app/schemas/workflow.py,sha256=oFoO62JH5hfMJjKoicdpyC5hd2O9XgqoAm5RN9YjXAI,4238
|
54
54
|
fractal_server/app/security/__init__.py,sha256=DCUIaIgzatnmtABAO4bR9jISVSoGowHlIQIHuV5oLUU,10880
|
55
|
-
fractal_server/config.py,sha256=
|
55
|
+
fractal_server/config.py,sha256=UF9K_IfJU7-VPjCJbMDcdV11_DsmMxqq4ezTNBQASq8,14126
|
56
56
|
fractal_server/logger.py,sha256=keri8i960WHT8Zz9Rm2MwfnrA2dw9TsrfCmojqtGDLs,4562
|
57
57
|
fractal_server/main.py,sha256=9_T_cMqf0EfbfYwkYhKeU36v9PFi95BoydapKpmaTKc,5932
|
58
58
|
fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
|
59
59
|
fractal_server/migrations/env.py,sha256=05EoWw0p43ojTNiz7UVG4lsl057B4ImSgXiHmiU-M80,2690
|
60
|
-
fractal_server/migrations/script.py.mako,sha256=
|
60
|
+
fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhRWiYdS5YgNbI,526
|
61
61
|
fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
|
62
62
|
fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=zwXegXs9J40eyCWi3w0c_iIBVJjXNn4VdVnQaT3KxDg,8770
|
63
|
-
fractal_server/migrations/versions/5c4ff7526508_add_dataset_history.py,sha256=92f4Cial0SuGqoZrjW9j3U5jg5gWmw2Z2-SGOzWL-TM,990
|
64
63
|
fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=Q-DsMzG3IcUV2Ol1dhJWosDvKERamBE6QvA2zzS5zpQ,1632
|
65
64
|
fractal_server/migrations/versions/8f79bd162e35_add_docs_info_and_docs_link_to_task_.py,sha256=6pgODDtyAxevZvAJBj9IJ41inhV1RpwbpZr_qfPPu1A,1115
|
65
|
+
fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0im6TxDr53sKKcjiPgeH4ftVRGnRXZSh2lPbRQ1Ir9w,883
|
66
66
|
fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py,sha256=ekDUML7ILpmdoqEclKbEUdyLi4uw9HSG_sTjG2hp_JE,867
|
67
67
|
fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.py,sha256=9BwqUS9Gf7UW_KjrzHbtViC880qhD452KAytkHWWZyk,746
|
68
68
|
fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
69
69
|
fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
|
70
70
|
fractal_server/tasks/__init__.py,sha256=Wzuxf5EoH1v0fYzRpAZHG_S-Z9f6DmbIsuSvllBCGvc,72
|
71
|
-
fractal_server/tasks/collection.py,sha256=
|
71
|
+
fractal_server/tasks/collection.py,sha256=POKvQyS5G5ySybH0r0v21I_ZQ5AREe9kAqr_uFfGyaU,17627
|
72
72
|
fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
|
73
|
-
fractal_server-1.3.
|
74
|
-
fractal_server-1.3.
|
75
|
-
fractal_server-1.3.
|
76
|
-
fractal_server-1.3.
|
77
|
-
fractal_server-1.3.
|
73
|
+
fractal_server-1.3.13.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
74
|
+
fractal_server-1.3.13.dist-info/METADATA,sha256=iMmIl4MHkNcyk9UrwvRy4Qk9zOjEJP_s-6JxPGLg-7w,3770
|
75
|
+
fractal_server-1.3.13.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
76
|
+
fractal_server-1.3.13.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
77
|
+
fractal_server-1.3.13.dist-info/RECORD,,
|
@@ -1,35 +0,0 @@
|
|
1
|
-
"""Add Dataset.history
|
2
|
-
|
3
|
-
Revision ID: 5c4ff7526508
|
4
|
-
Revises: 8f79bd162e35
|
5
|
-
Create Date: 2023-10-13 14:26:07.955329
|
6
|
-
|
7
|
-
"""
|
8
|
-
import sqlalchemy as sa
|
9
|
-
from alembic import op
|
10
|
-
from sqlalchemy.sql import column
|
11
|
-
from sqlalchemy.sql import table
|
12
|
-
|
13
|
-
|
14
|
-
# revision identifiers, used by Alembic.
|
15
|
-
revision = "5c4ff7526508"
|
16
|
-
down_revision = "8f79bd162e35"
|
17
|
-
branch_labels = None
|
18
|
-
depends_on = None
|
19
|
-
|
20
|
-
|
21
|
-
def upgrade() -> None:
|
22
|
-
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
23
|
-
batch_op.add_column(sa.Column("history", sa.JSON(), nullable=True))
|
24
|
-
|
25
|
-
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
26
|
-
dataset = table("dataset", column("history"))
|
27
|
-
batch_op.execute(dataset.update().values(history=sa.JSON([])))
|
28
|
-
|
29
|
-
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
30
|
-
batch_op.alter_column("dataset", nullable=False)
|
31
|
-
|
32
|
-
|
33
|
-
def downgrade() -> None:
|
34
|
-
with op.batch_alter_table("dataset", schema=None) as batch_op:
|
35
|
-
batch_op.drop_column("history")
|
File without changes
|
File without changes
|
File without changes
|