fractal-server 2.13.1__py3-none-any.whl → 2.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +3 -1
- fractal_server/app/models/linkusergroup.py +6 -2
- fractal_server/app/models/v2/__init__.py +7 -1
- fractal_server/app/models/v2/dataset.py +1 -11
- fractal_server/app/models/v2/history.py +78 -0
- fractal_server/app/models/v2/job.py +10 -3
- fractal_server/app/models/v2/task_group.py +2 -2
- fractal_server/app/models/v2/workflow.py +1 -1
- fractal_server/app/models/v2/workflowtask.py +1 -1
- fractal_server/app/routes/admin/v2/accounting.py +18 -28
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/admin/v2/task_group.py +0 -17
- fractal_server/app/routes/api/__init__.py +1 -1
- fractal_server/app/routes/api/v2/__init__.py +8 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +66 -0
- fractal_server/app/routes/api/v2/_aux_functions_history.py +166 -0
- fractal_server/app/routes/api/v2/dataset.py +0 -17
- fractal_server/app/routes/api/v2/history.py +544 -0
- fractal_server/app/routes/api/v2/images.py +31 -43
- fractal_server/app/routes/api/v2/job.py +30 -0
- fractal_server/app/routes/api/v2/project.py +1 -53
- fractal_server/app/routes/api/v2/{status.py → status_legacy.py} +6 -6
- fractal_server/app/routes/api/v2/submit.py +16 -14
- fractal_server/app/routes/api/v2/task.py +3 -10
- fractal_server/app/routes/api/v2/task_collection_custom.py +4 -9
- fractal_server/app/routes/api/v2/task_group.py +0 -17
- fractal_server/app/routes/api/v2/verify_image_types.py +61 -0
- fractal_server/app/routes/api/v2/workflow.py +28 -69
- fractal_server/app/routes/api/v2/workflowtask.py +53 -50
- fractal_server/app/routes/auth/group.py +0 -16
- fractal_server/app/routes/auth/oauth.py +5 -3
- fractal_server/app/routes/pagination.py +47 -0
- fractal_server/app/runner/components.py +0 -3
- fractal_server/app/runner/compress_folder.py +57 -29
- fractal_server/app/runner/exceptions.py +4 -0
- fractal_server/app/runner/executors/base_runner.py +157 -0
- fractal_server/app/runner/{v2/_local/_local_config.py → executors/local/get_local_config.py} +7 -9
- fractal_server/app/runner/executors/local/runner.py +248 -0
- fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
- fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +9 -7
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +868 -0
- fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +48 -17
- fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +36 -47
- fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +134 -0
- fractal_server/app/runner/executors/slurm_ssh/runner.py +268 -0
- fractal_server/app/runner/executors/slurm_sudo/__init__.py +0 -0
- fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -83
- fractal_server/app/runner/executors/slurm_sudo/runner.py +193 -0
- fractal_server/app/runner/extract_archive.py +1 -3
- fractal_server/app/runner/task_files.py +134 -87
- fractal_server/app/runner/v2/__init__.py +0 -399
- fractal_server/app/runner/v2/_local.py +88 -0
- fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +20 -19
- fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +17 -15
- fractal_server/app/runner/v2/db_tools.py +119 -0
- fractal_server/app/runner/v2/runner.py +206 -95
- fractal_server/app/runner/v2/runner_functions.py +488 -187
- fractal_server/app/runner/v2/runner_functions_low_level.py +40 -43
- fractal_server/app/runner/v2/submit_workflow.py +358 -0
- fractal_server/app/runner/v2/task_interface.py +31 -0
- fractal_server/app/schemas/_validators.py +13 -24
- fractal_server/app/schemas/user.py +10 -7
- fractal_server/app/schemas/user_settings.py +9 -21
- fractal_server/app/schemas/v2/__init__.py +9 -1
- fractal_server/app/schemas/v2/dataset.py +12 -94
- fractal_server/app/schemas/v2/dumps.py +26 -9
- fractal_server/app/schemas/v2/history.py +80 -0
- fractal_server/app/schemas/v2/job.py +15 -8
- fractal_server/app/schemas/v2/manifest.py +14 -7
- fractal_server/app/schemas/v2/project.py +9 -7
- fractal_server/app/schemas/v2/status_legacy.py +35 -0
- fractal_server/app/schemas/v2/task.py +72 -77
- fractal_server/app/schemas/v2/task_collection.py +14 -32
- fractal_server/app/schemas/v2/task_group.py +10 -9
- fractal_server/app/schemas/v2/workflow.py +10 -11
- fractal_server/app/schemas/v2/workflowtask.py +2 -21
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/app/security/signup_email.py +2 -2
- fractal_server/config.py +41 -46
- fractal_server/images/tools.py +23 -0
- fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
- fractal_server/migrations/versions/9db60297b8b2_set_ondelete.py +250 -0
- fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py +41 -0
- fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
- fractal_server/migrations/versions/f37aceb45062_make_historyunit_logfile_required.py +39 -0
- fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
- fractal_server/ssh/_fabric.py +28 -14
- fractal_server/tasks/v2/local/collect.py +2 -2
- fractal_server/tasks/v2/ssh/collect.py +2 -2
- fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
- fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
- fractal_server/tasks/v2/utils_background.py +0 -19
- fractal_server/tasks/v2/utils_database.py +30 -17
- fractal_server/tasks/v2/utils_templates.py +6 -0
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/METADATA +4 -4
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/RECORD +106 -96
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/WHEEL +1 -1
- fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +0 -126
- fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +0 -116
- fractal_server/app/runner/executors/slurm/ssh/executor.py +0 -1386
- fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +0 -71
- fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +0 -130
- fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
- fractal_server/app/runner/v2/_local/__init__.py +0 -132
- fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
- fractal_server/app/runner/v2/_local/executor.py +0 -100
- fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
- fractal_server/app/runner/v2/handle_failed_job.py +0 -59
- fractal_server/app/schemas/v2/status.py +0 -16
- /fractal_server/app/{runner/executors/slurm → history}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/ssh → local}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{slurm/sudo → slurm_common}/__init__.py +0 -0
- /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
- /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
- /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_ssh}/__init__.py +0 -0
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/entry_points.txt +0 -0
fractal_server/config.py
CHANGED
@@ -28,6 +28,7 @@ from pydantic import EmailStr
|
|
28
28
|
from pydantic import Field
|
29
29
|
from pydantic import field_validator
|
30
30
|
from pydantic import model_validator
|
31
|
+
from pydantic import SecretStr
|
31
32
|
from pydantic_settings import BaseSettings
|
32
33
|
from pydantic_settings import SettingsConfigDict
|
33
34
|
from sqlalchemy.engine import URL
|
@@ -54,8 +55,8 @@ class MailSettings(BaseModel):
|
|
54
55
|
recipients: list[EmailStr] = Field(min_length=1)
|
55
56
|
smtp_server: str
|
56
57
|
port: int
|
57
|
-
encrypted_password: Optional[
|
58
|
-
encryption_key: Optional[
|
58
|
+
encrypted_password: Optional[SecretStr] = None
|
59
|
+
encryption_key: Optional[SecretStr] = None
|
59
60
|
instance_name: str
|
60
61
|
use_starttls: bool
|
61
62
|
use_login: bool
|
@@ -97,7 +98,7 @@ class OAuthClientConfig(BaseModel):
|
|
97
98
|
|
98
99
|
CLIENT_NAME: str
|
99
100
|
CLIENT_ID: str
|
100
|
-
CLIENT_SECRET:
|
101
|
+
CLIENT_SECRET: SecretStr
|
101
102
|
OIDC_CONFIGURATION_ENDPOINT: Optional[str] = None
|
102
103
|
REDIRECT_URL: Optional[str] = None
|
103
104
|
|
@@ -137,7 +138,7 @@ class Settings(BaseSettings):
|
|
137
138
|
JWT token lifetime, in seconds.
|
138
139
|
"""
|
139
140
|
|
140
|
-
JWT_SECRET_KEY: Optional[
|
141
|
+
JWT_SECRET_KEY: Optional[SecretStr] = None
|
141
142
|
"""
|
142
143
|
JWT secret
|
143
144
|
|
@@ -204,7 +205,7 @@ class Settings(BaseSettings):
|
|
204
205
|
"""
|
205
206
|
User to use when connecting to the PostgreSQL database.
|
206
207
|
"""
|
207
|
-
POSTGRES_PASSWORD: Optional[
|
208
|
+
POSTGRES_PASSWORD: Optional[SecretStr] = None
|
208
209
|
"""
|
209
210
|
Password to use when connecting to the PostgreSQL database.
|
210
211
|
"""
|
@@ -223,10 +224,15 @@ class Settings(BaseSettings):
|
|
223
224
|
|
224
225
|
@property
|
225
226
|
def DATABASE_ASYNC_URL(self) -> URL:
|
227
|
+
if self.POSTGRES_PASSWORD is None:
|
228
|
+
password = None
|
229
|
+
else:
|
230
|
+
password = self.POSTGRES_PASSWORD.get_secret_value()
|
231
|
+
|
226
232
|
url = URL.create(
|
227
233
|
drivername="postgresql+psycopg",
|
228
234
|
username=self.POSTGRES_USER,
|
229
|
-
password=
|
235
|
+
password=password,
|
230
236
|
host=self.POSTGRES_HOST,
|
231
237
|
port=self.POSTGRES_PORT,
|
232
238
|
database=self.POSTGRES_DB,
|
@@ -250,7 +256,7 @@ class Settings(BaseSettings):
|
|
250
256
|
default admin credentials.
|
251
257
|
"""
|
252
258
|
|
253
|
-
FRACTAL_DEFAULT_ADMIN_PASSWORD:
|
259
|
+
FRACTAL_DEFAULT_ADMIN_PASSWORD: SecretStr = "1234"
|
254
260
|
"""
|
255
261
|
Admin default password, used upon creation of the first superuser during
|
256
262
|
server startup.
|
@@ -483,8 +489,12 @@ class Settings(BaseSettings):
|
|
483
489
|
FRACTAL_SLURM_POLL_INTERVAL: int = 5
|
484
490
|
"""
|
485
491
|
Interval to wait (in seconds) before checking whether unfinished job are
|
486
|
-
still running on SLURM
|
487
|
-
|
492
|
+
still running on SLURM.
|
493
|
+
"""
|
494
|
+
|
495
|
+
FRACTAL_SLURM_INTERVAL_BEFORE_RETRIEVAL: int = 2
|
496
|
+
"""
|
497
|
+
NOTE: see issue 2444
|
488
498
|
"""
|
489
499
|
|
490
500
|
FRACTAL_SLURM_SBATCH_SLEEP: float = 0
|
@@ -502,14 +512,6 @@ class Settings(BaseSettings):
|
|
502
512
|
`JobExecutionError`.
|
503
513
|
"""
|
504
514
|
|
505
|
-
FRACTAL_RUNNER_TASKS_INCLUDE_IMAGE: str = (
|
506
|
-
"Copy OME-Zarr structure;Convert Metadata Components from 2D to 3D"
|
507
|
-
)
|
508
|
-
"""
|
509
|
-
`;`-separated list of names for task that require the `metadata["image"]`
|
510
|
-
attribute in their input-arguments JSON file.
|
511
|
-
"""
|
512
|
-
|
513
515
|
FRACTAL_PIP_CACHE_DIR: Optional[str] = None
|
514
516
|
"""
|
515
517
|
Absolute path to the cache directory for `pip`; if unset,
|
@@ -587,21 +589,21 @@ class Settings(BaseSettings):
|
|
587
589
|
"""
|
588
590
|
Address of the OAuth-signup email sender.
|
589
591
|
"""
|
590
|
-
FRACTAL_EMAIL_PASSWORD: Optional[
|
592
|
+
FRACTAL_EMAIL_PASSWORD: Optional[SecretStr] = None
|
591
593
|
"""
|
592
594
|
Password for the OAuth-signup email sender.
|
593
595
|
"""
|
594
|
-
FRACTAL_EMAIL_PASSWORD_KEY: Optional[
|
596
|
+
FRACTAL_EMAIL_PASSWORD_KEY: Optional[SecretStr] = None
|
595
597
|
"""
|
596
598
|
Key value for `cryptography.fernet` decrypt
|
597
599
|
"""
|
598
600
|
FRACTAL_EMAIL_SMTP_SERVER: Optional[str] = None
|
599
601
|
"""
|
600
|
-
|
602
|
+
SMTP server for the OAuth-signup emails.
|
601
603
|
"""
|
602
604
|
FRACTAL_EMAIL_SMTP_PORT: Optional[int] = None
|
603
605
|
"""
|
604
|
-
|
606
|
+
SMTP server port for the OAuth-signup emails.
|
605
607
|
"""
|
606
608
|
FRACTAL_EMAIL_INSTANCE_NAME: Optional[str] = None
|
607
609
|
"""
|
@@ -661,8 +663,12 @@ class Settings(BaseSettings):
|
|
661
663
|
)
|
662
664
|
try:
|
663
665
|
(
|
664
|
-
Fernet(
|
665
|
-
|
666
|
+
Fernet(
|
667
|
+
self.FRACTAL_EMAIL_PASSWORD_KEY.get_secret_value()
|
668
|
+
)
|
669
|
+
.decrypt(
|
670
|
+
self.FRACTAL_EMAIL_PASSWORD.get_secret_value()
|
671
|
+
)
|
666
672
|
.decode("utf-8")
|
667
673
|
)
|
668
674
|
except Exception as e:
|
@@ -671,14 +677,22 @@ class Settings(BaseSettings):
|
|
671
677
|
"FRACTAL_EMAIL_PASSWORD_KEY). "
|
672
678
|
f"Original error: {str(e)}."
|
673
679
|
)
|
680
|
+
password = self.FRACTAL_EMAIL_PASSWORD.get_secret_value()
|
681
|
+
else:
|
682
|
+
password = None
|
683
|
+
|
684
|
+
if self.FRACTAL_EMAIL_PASSWORD_KEY is not None:
|
685
|
+
key = self.FRACTAL_EMAIL_PASSWORD_KEY.get_secret_value()
|
686
|
+
else:
|
687
|
+
key = None
|
674
688
|
|
675
689
|
self.email_settings = MailSettings(
|
676
690
|
sender=self.FRACTAL_EMAIL_SENDER,
|
677
691
|
recipients=self.FRACTAL_EMAIL_RECIPIENTS.split(","),
|
678
692
|
smtp_server=self.FRACTAL_EMAIL_SMTP_SERVER,
|
679
693
|
port=self.FRACTAL_EMAIL_SMTP_PORT,
|
680
|
-
encrypted_password=
|
681
|
-
encryption_key=
|
694
|
+
encrypted_password=password,
|
695
|
+
encryption_key=key,
|
682
696
|
instance_name=self.FRACTAL_EMAIL_INSTANCE_NAME,
|
683
697
|
use_starttls=use_starttls,
|
684
698
|
use_login=use_login,
|
@@ -705,7 +719,7 @@ class Settings(BaseSettings):
|
|
705
719
|
|
706
720
|
info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
|
707
721
|
if self.FRACTAL_RUNNER_BACKEND == "slurm":
|
708
|
-
from fractal_server.app.runner.executors.
|
722
|
+
from fractal_server.app.runner.executors.slurm_common._slurm_config import ( # noqa: E501
|
709
723
|
load_slurm_config_file,
|
710
724
|
)
|
711
725
|
|
@@ -735,7 +749,7 @@ class Settings(BaseSettings):
|
|
735
749
|
f"Must set FRACTAL_SLURM_WORKER_PYTHON when {info}"
|
736
750
|
)
|
737
751
|
|
738
|
-
from fractal_server.app.runner.executors.
|
752
|
+
from fractal_server.app.runner.executors.slurm_common._slurm_config import ( # noqa: E501
|
739
753
|
load_slurm_config_file,
|
740
754
|
)
|
741
755
|
|
@@ -796,25 +810,6 @@ class Settings(BaseSettings):
|
|
796
810
|
self.check_db()
|
797
811
|
self.check_runner()
|
798
812
|
|
799
|
-
def get_sanitized(self) -> dict:
|
800
|
-
def _must_be_sanitized(string) -> bool:
|
801
|
-
if not string.upper().startswith("FRACTAL") or any(
|
802
|
-
s in string.upper()
|
803
|
-
for s in ["PASSWORD", "SECRET", "PWD", "TOKEN", "KEY"]
|
804
|
-
):
|
805
|
-
return True
|
806
|
-
else:
|
807
|
-
return False
|
808
|
-
|
809
|
-
sanitized_settings = {}
|
810
|
-
for k, v in self.model_dump().items():
|
811
|
-
if _must_be_sanitized(k):
|
812
|
-
sanitized_settings[k] = "***"
|
813
|
-
else:
|
814
|
-
sanitized_settings[k] = v
|
815
|
-
|
816
|
-
return sanitized_settings
|
817
|
-
|
818
813
|
|
819
814
|
def get_settings(settings=Settings()) -> Settings:
|
820
815
|
return settings
|
fractal_server/images/tools.py
CHANGED
@@ -121,3 +121,26 @@ def merge_type_filters(
|
|
121
121
|
merged_dict = task_input_types
|
122
122
|
merged_dict.update(wftask_type_filters)
|
123
123
|
return merged_dict
|
124
|
+
|
125
|
+
|
126
|
+
def aggregate_attributes(images: list[dict[str, Any]]) -> dict[str, list[Any]]:
|
127
|
+
"""
|
128
|
+
Given a list of images, this function returns a dictionary of all image
|
129
|
+
attributes, each mapped to a list of present values.
|
130
|
+
"""
|
131
|
+
attributes = {}
|
132
|
+
for image in images:
|
133
|
+
for k, v in image["attributes"].items():
|
134
|
+
attributes.setdefault(k, []).append(v)
|
135
|
+
for k, v in attributes.items():
|
136
|
+
attributes[k] = list(set(v))
|
137
|
+
return attributes
|
138
|
+
|
139
|
+
|
140
|
+
def aggregate_types(images: list[dict[str, Any]]) -> list[str]:
|
141
|
+
"""
|
142
|
+
Given a list of images, this function returns a list of all image types.
|
143
|
+
"""
|
144
|
+
return list(
|
145
|
+
set(type for image in images for type in image["types"].keys())
|
146
|
+
)
|
@@ -0,0 +1,50 @@
|
|
1
|
+
"""Drop dataset filters
|
2
|
+
|
3
|
+
Revision ID: 47351f8c7ebc
|
4
|
+
Revises: fbce16ff4e47
|
5
|
+
Create Date: 2025-03-26 11:10:17.869028
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
from sqlalchemy.dialects import postgresql
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "47351f8c7ebc"
|
14
|
+
down_revision = "fbce16ff4e47"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("datasetv2", schema=None) as batch_op:
|
22
|
+
batch_op.drop_column("type_filters")
|
23
|
+
batch_op.drop_column("attribute_filters")
|
24
|
+
|
25
|
+
# ### end Alembic commands ###
|
26
|
+
|
27
|
+
|
28
|
+
def downgrade() -> None:
|
29
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
30
|
+
with op.batch_alter_table("datasetv2", schema=None) as batch_op:
|
31
|
+
batch_op.add_column(
|
32
|
+
sa.Column(
|
33
|
+
"attribute_filters",
|
34
|
+
postgresql.JSON(astext_type=sa.Text()),
|
35
|
+
server_default=sa.text("'{}'::json"),
|
36
|
+
autoincrement=False,
|
37
|
+
nullable=False,
|
38
|
+
)
|
39
|
+
)
|
40
|
+
batch_op.add_column(
|
41
|
+
sa.Column(
|
42
|
+
"type_filters",
|
43
|
+
postgresql.JSON(astext_type=sa.Text()),
|
44
|
+
server_default=sa.text("'{}'::json"),
|
45
|
+
autoincrement=False,
|
46
|
+
nullable=False,
|
47
|
+
)
|
48
|
+
)
|
49
|
+
|
50
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,250 @@
|
|
1
|
+
"""Set ondelete
|
2
|
+
|
3
|
+
Revision ID: 9db60297b8b2
|
4
|
+
Revises: e81103413827
|
5
|
+
Create Date: 2025-04-07 13:13:14.596394
|
6
|
+
|
7
|
+
"""
|
8
|
+
from alembic import op
|
9
|
+
|
10
|
+
|
11
|
+
# revision identifiers, used by Alembic.
|
12
|
+
revision = "9db60297b8b2"
|
13
|
+
down_revision = "e81103413827"
|
14
|
+
branch_labels = None
|
15
|
+
depends_on = None
|
16
|
+
|
17
|
+
|
18
|
+
def upgrade() -> None:
|
19
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
20
|
+
with op.batch_alter_table("datasetv2", schema=None) as batch_op:
|
21
|
+
batch_op.drop_constraint(
|
22
|
+
"fk_datasetv2_project_id_projectv2", type_="foreignkey"
|
23
|
+
)
|
24
|
+
batch_op.create_foreign_key(
|
25
|
+
batch_op.f("fk_datasetv2_project_id_projectv2"),
|
26
|
+
"projectv2",
|
27
|
+
["project_id"],
|
28
|
+
["id"],
|
29
|
+
ondelete="CASCADE",
|
30
|
+
)
|
31
|
+
|
32
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
33
|
+
batch_op.drop_constraint(
|
34
|
+
"fk_jobv2_dataset_id_datasetv2", type_="foreignkey"
|
35
|
+
)
|
36
|
+
batch_op.drop_constraint(
|
37
|
+
"fk_jobv2_project_id_projectv2", type_="foreignkey"
|
38
|
+
)
|
39
|
+
batch_op.drop_constraint(
|
40
|
+
"fk_jobv2_workflow_id_workflowv2", type_="foreignkey"
|
41
|
+
)
|
42
|
+
batch_op.create_foreign_key(
|
43
|
+
batch_op.f("fk_jobv2_workflow_id_workflowv2"),
|
44
|
+
"workflowv2",
|
45
|
+
["workflow_id"],
|
46
|
+
["id"],
|
47
|
+
ondelete="SET NULL",
|
48
|
+
)
|
49
|
+
batch_op.create_foreign_key(
|
50
|
+
batch_op.f("fk_jobv2_dataset_id_datasetv2"),
|
51
|
+
"datasetv2",
|
52
|
+
["dataset_id"],
|
53
|
+
["id"],
|
54
|
+
ondelete="SET NULL",
|
55
|
+
)
|
56
|
+
batch_op.create_foreign_key(
|
57
|
+
batch_op.f("fk_jobv2_project_id_projectv2"),
|
58
|
+
"projectv2",
|
59
|
+
["project_id"],
|
60
|
+
["id"],
|
61
|
+
ondelete="SET NULL",
|
62
|
+
)
|
63
|
+
|
64
|
+
with op.batch_alter_table("linkusergroup", schema=None) as batch_op:
|
65
|
+
batch_op.drop_constraint(
|
66
|
+
"fk_linkusergroup_user_id_user_oauth", type_="foreignkey"
|
67
|
+
)
|
68
|
+
batch_op.drop_constraint(
|
69
|
+
"fk_linkusergroup_group_id_usergroup", type_="foreignkey"
|
70
|
+
)
|
71
|
+
batch_op.create_foreign_key(
|
72
|
+
batch_op.f("fk_linkusergroup_group_id_usergroup"),
|
73
|
+
"usergroup",
|
74
|
+
["group_id"],
|
75
|
+
["id"],
|
76
|
+
ondelete="CASCADE",
|
77
|
+
)
|
78
|
+
batch_op.create_foreign_key(
|
79
|
+
batch_op.f("fk_linkusergroup_user_id_user_oauth"),
|
80
|
+
"user_oauth",
|
81
|
+
["user_id"],
|
82
|
+
["id"],
|
83
|
+
ondelete="CASCADE",
|
84
|
+
)
|
85
|
+
|
86
|
+
with op.batch_alter_table("taskgroupactivityv2", schema=None) as batch_op:
|
87
|
+
batch_op.drop_constraint(
|
88
|
+
"fk_taskgroupactivityv2_taskgroupv2_id_taskgroupv2",
|
89
|
+
type_="foreignkey",
|
90
|
+
)
|
91
|
+
batch_op.create_foreign_key(
|
92
|
+
batch_op.f("fk_taskgroupactivityv2_taskgroupv2_id_taskgroupv2"),
|
93
|
+
"taskgroupv2",
|
94
|
+
["taskgroupv2_id"],
|
95
|
+
["id"],
|
96
|
+
ondelete="SET NULL",
|
97
|
+
)
|
98
|
+
|
99
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
100
|
+
batch_op.drop_constraint(
|
101
|
+
"fk_taskgroupv2_user_group_id_usergroup", type_="foreignkey"
|
102
|
+
)
|
103
|
+
batch_op.create_foreign_key(
|
104
|
+
batch_op.f("fk_taskgroupv2_user_group_id_usergroup"),
|
105
|
+
"usergroup",
|
106
|
+
["user_group_id"],
|
107
|
+
["id"],
|
108
|
+
ondelete="SET NULL",
|
109
|
+
)
|
110
|
+
|
111
|
+
with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
|
112
|
+
batch_op.drop_constraint(
|
113
|
+
"fk_workflowtaskv2_workflow_id_workflowv2", type_="foreignkey"
|
114
|
+
)
|
115
|
+
batch_op.create_foreign_key(
|
116
|
+
batch_op.f("fk_workflowtaskv2_workflow_id_workflowv2"),
|
117
|
+
"workflowv2",
|
118
|
+
["workflow_id"],
|
119
|
+
["id"],
|
120
|
+
ondelete="CASCADE",
|
121
|
+
)
|
122
|
+
|
123
|
+
with op.batch_alter_table("workflowv2", schema=None) as batch_op:
|
124
|
+
batch_op.drop_constraint(
|
125
|
+
"fk_workflowv2_project_id_projectv2", type_="foreignkey"
|
126
|
+
)
|
127
|
+
batch_op.create_foreign_key(
|
128
|
+
batch_op.f("fk_workflowv2_project_id_projectv2"),
|
129
|
+
"projectv2",
|
130
|
+
["project_id"],
|
131
|
+
["id"],
|
132
|
+
ondelete="CASCADE",
|
133
|
+
)
|
134
|
+
|
135
|
+
# ### end Alembic commands ###
|
136
|
+
|
137
|
+
|
138
|
+
def downgrade() -> None:
|
139
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
140
|
+
with op.batch_alter_table("workflowv2", schema=None) as batch_op:
|
141
|
+
batch_op.drop_constraint(
|
142
|
+
batch_op.f("fk_workflowv2_project_id_projectv2"),
|
143
|
+
type_="foreignkey",
|
144
|
+
)
|
145
|
+
batch_op.create_foreign_key(
|
146
|
+
"fk_workflowv2_project_id_projectv2",
|
147
|
+
"projectv2",
|
148
|
+
["project_id"],
|
149
|
+
["id"],
|
150
|
+
)
|
151
|
+
|
152
|
+
with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
|
153
|
+
batch_op.drop_constraint(
|
154
|
+
batch_op.f("fk_workflowtaskv2_workflow_id_workflowv2"),
|
155
|
+
type_="foreignkey",
|
156
|
+
)
|
157
|
+
batch_op.create_foreign_key(
|
158
|
+
"fk_workflowtaskv2_workflow_id_workflowv2",
|
159
|
+
"workflowv2",
|
160
|
+
["workflow_id"],
|
161
|
+
["id"],
|
162
|
+
)
|
163
|
+
|
164
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
165
|
+
batch_op.drop_constraint(
|
166
|
+
batch_op.f("fk_taskgroupv2_user_group_id_usergroup"),
|
167
|
+
type_="foreignkey",
|
168
|
+
)
|
169
|
+
batch_op.create_foreign_key(
|
170
|
+
"fk_taskgroupv2_user_group_id_usergroup",
|
171
|
+
"usergroup",
|
172
|
+
["user_group_id"],
|
173
|
+
["id"],
|
174
|
+
)
|
175
|
+
|
176
|
+
with op.batch_alter_table("taskgroupactivityv2", schema=None) as batch_op:
|
177
|
+
batch_op.drop_constraint(
|
178
|
+
batch_op.f("fk_taskgroupactivityv2_taskgroupv2_id_taskgroupv2"),
|
179
|
+
type_="foreignkey",
|
180
|
+
)
|
181
|
+
batch_op.create_foreign_key(
|
182
|
+
"fk_taskgroupactivityv2_taskgroupv2_id_taskgroupv2",
|
183
|
+
"taskgroupv2",
|
184
|
+
["taskgroupv2_id"],
|
185
|
+
["id"],
|
186
|
+
)
|
187
|
+
|
188
|
+
with op.batch_alter_table("linkusergroup", schema=None) as batch_op:
|
189
|
+
batch_op.drop_constraint(
|
190
|
+
batch_op.f("fk_linkusergroup_user_id_user_oauth"),
|
191
|
+
type_="foreignkey",
|
192
|
+
)
|
193
|
+
batch_op.drop_constraint(
|
194
|
+
batch_op.f("fk_linkusergroup_group_id_usergroup"),
|
195
|
+
type_="foreignkey",
|
196
|
+
)
|
197
|
+
batch_op.create_foreign_key(
|
198
|
+
"fk_linkusergroup_group_id_usergroup",
|
199
|
+
"usergroup",
|
200
|
+
["group_id"],
|
201
|
+
["id"],
|
202
|
+
)
|
203
|
+
batch_op.create_foreign_key(
|
204
|
+
"fk_linkusergroup_user_id_user_oauth",
|
205
|
+
"user_oauth",
|
206
|
+
["user_id"],
|
207
|
+
["id"],
|
208
|
+
)
|
209
|
+
|
210
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
211
|
+
batch_op.drop_constraint(
|
212
|
+
batch_op.f("fk_jobv2_project_id_projectv2"), type_="foreignkey"
|
213
|
+
)
|
214
|
+
batch_op.drop_constraint(
|
215
|
+
batch_op.f("fk_jobv2_dataset_id_datasetv2"), type_="foreignkey"
|
216
|
+
)
|
217
|
+
batch_op.drop_constraint(
|
218
|
+
batch_op.f("fk_jobv2_workflow_id_workflowv2"), type_="foreignkey"
|
219
|
+
)
|
220
|
+
batch_op.create_foreign_key(
|
221
|
+
"fk_jobv2_workflow_id_workflowv2",
|
222
|
+
"workflowv2",
|
223
|
+
["workflow_id"],
|
224
|
+
["id"],
|
225
|
+
)
|
226
|
+
batch_op.create_foreign_key(
|
227
|
+
"fk_jobv2_project_id_projectv2",
|
228
|
+
"projectv2",
|
229
|
+
["project_id"],
|
230
|
+
["id"],
|
231
|
+
)
|
232
|
+
batch_op.create_foreign_key(
|
233
|
+
"fk_jobv2_dataset_id_datasetv2",
|
234
|
+
"datasetv2",
|
235
|
+
["dataset_id"],
|
236
|
+
["id"],
|
237
|
+
)
|
238
|
+
|
239
|
+
with op.batch_alter_table("datasetv2", schema=None) as batch_op:
|
240
|
+
batch_op.drop_constraint(
|
241
|
+
batch_op.f("fk_datasetv2_project_id_projectv2"), type_="foreignkey"
|
242
|
+
)
|
243
|
+
batch_op.create_foreign_key(
|
244
|
+
"fk_datasetv2_project_id_projectv2",
|
245
|
+
"projectv2",
|
246
|
+
["project_id"],
|
247
|
+
["id"],
|
248
|
+
)
|
249
|
+
|
250
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,41 @@
|
|
1
|
+
"""job id in history run
|
2
|
+
|
3
|
+
Revision ID: c90a7c76e996
|
4
|
+
Revises: f37aceb45062
|
5
|
+
Create Date: 2025-04-16 10:44:30.219309
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "c90a7c76e996"
|
14
|
+
down_revision = "f37aceb45062"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("historyrun", schema=None) as batch_op:
|
22
|
+
batch_op.add_column(sa.Column("job_id", sa.Integer(), nullable=False))
|
23
|
+
batch_op.create_foreign_key(
|
24
|
+
batch_op.f("fk_historyrun_job_id_jobv2"),
|
25
|
+
"jobv2",
|
26
|
+
["job_id"],
|
27
|
+
["id"],
|
28
|
+
)
|
29
|
+
|
30
|
+
# ### end Alembic commands ###
|
31
|
+
|
32
|
+
|
33
|
+
def downgrade() -> None:
|
34
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
35
|
+
with op.batch_alter_table("historyrun", schema=None) as batch_op:
|
36
|
+
batch_op.drop_constraint(
|
37
|
+
batch_op.f("fk_historyrun_job_id_jobv2"), type_="foreignkey"
|
38
|
+
)
|
39
|
+
batch_op.drop_column("job_id")
|
40
|
+
|
41
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"""Add job.type_filters
|
2
|
+
|
3
|
+
Revision ID: e81103413827
|
4
|
+
Revises: 47351f8c7ebc
|
5
|
+
Create Date: 2025-03-26 11:10:41.748248
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "e81103413827"
|
14
|
+
down_revision = "47351f8c7ebc"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
22
|
+
batch_op.add_column(
|
23
|
+
sa.Column(
|
24
|
+
"type_filters", sa.JSON(), server_default="{}", nullable=False
|
25
|
+
)
|
26
|
+
)
|
27
|
+
|
28
|
+
# ### end Alembic commands ###
|
29
|
+
|
30
|
+
|
31
|
+
def downgrade() -> None:
|
32
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
33
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
34
|
+
batch_op.drop_column("type_filters")
|
35
|
+
|
36
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,39 @@
|
|
1
|
+
"""Make HistoryUnit.logfile required
|
2
|
+
|
3
|
+
Revision ID: f37aceb45062
|
4
|
+
Revises: 9db60297b8b2
|
5
|
+
Create Date: 2025-04-14 13:49:40.910342
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
from alembic import op
|
10
|
+
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "f37aceb45062"
|
14
|
+
down_revision = "9db60297b8b2"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
with op.batch_alter_table("historyunit", schema=None) as batch_op:
|
22
|
+
batch_op.alter_column(
|
23
|
+
"logfile",
|
24
|
+
existing_type=sa.VARCHAR(),
|
25
|
+
nullable=False,
|
26
|
+
server_default="__LOGFILE_PLACEHOLDER__",
|
27
|
+
)
|
28
|
+
|
29
|
+
# ### end Alembic commands ###
|
30
|
+
|
31
|
+
|
32
|
+
def downgrade() -> None:
|
33
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
34
|
+
with op.batch_alter_table("historyunit", schema=None) as batch_op:
|
35
|
+
batch_op.alter_column(
|
36
|
+
"logfile", existing_type=sa.VARCHAR(), nullable=True
|
37
|
+
)
|
38
|
+
|
39
|
+
# ### end Alembic commands ###
|