fractal-server 1.4.10__py3-none-any.whl → 2.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +4 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +10 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +275 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +37 -37
  24. fractal_server/app/routes/api/v1/job.py +12 -12
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/apply.py +220 -0
  33. fractal_server/app/routes/api/v2/dataset.py +310 -0
  34. fractal_server/app/routes/api/v2/images.py +212 -0
  35. fractal_server/app/routes/api/v2/job.py +200 -0
  36. fractal_server/app/routes/api/v2/project.py +205 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +398 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/exceptions.py +129 -0
  44. fractal_server/app/runner/executors/local/__init__.py +3 -0
  45. fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
  46. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  48. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  51. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  52. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
  53. fractal_server/app/runner/filenames.py +6 -0
  54. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  55. fractal_server/app/runner/task_files.py +105 -0
  56. fractal_server/app/runner/{__init__.py → v1/__init__.py} +24 -22
  57. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  58. fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
  59. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  60. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  61. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  62. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  63. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  64. fractal_server/app/runner/v1/common.py +117 -0
  65. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  66. fractal_server/app/runner/v2/__init__.py +337 -0
  67. fractal_server/app/runner/v2/_local/__init__.py +169 -0
  68. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  69. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  70. fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/components.py +5 -0
  74. fractal_server/app/runner/v2/deduplicate_list.py +24 -0
  75. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  76. fractal_server/app/runner/v2/merge_outputs.py +41 -0
  77. fractal_server/app/runner/v2/runner.py +264 -0
  78. fractal_server/app/runner/v2/runner_functions.py +339 -0
  79. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  80. fractal_server/app/runner/v2/task_interface.py +43 -0
  81. fractal_server/app/runner/v2/v1_compat.py +21 -0
  82. fractal_server/app/schemas/__init__.py +4 -42
  83. fractal_server/app/schemas/v1/__init__.py +42 -0
  84. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  85. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  86. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  87. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  88. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  89. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  90. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  91. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  92. fractal_server/app/schemas/v2/__init__.py +34 -0
  93. fractal_server/app/schemas/v2/dataset.py +88 -0
  94. fractal_server/app/schemas/v2/dumps.py +87 -0
  95. fractal_server/app/schemas/v2/job.py +113 -0
  96. fractal_server/app/schemas/v2/manifest.py +109 -0
  97. fractal_server/app/schemas/v2/project.py +36 -0
  98. fractal_server/app/schemas/v2/task.py +121 -0
  99. fractal_server/app/schemas/v2/task_collection.py +105 -0
  100. fractal_server/app/schemas/v2/workflow.py +78 -0
  101. fractal_server/app/schemas/v2/workflowtask.py +118 -0
  102. fractal_server/config.py +5 -4
  103. fractal_server/images/__init__.py +50 -0
  104. fractal_server/images/tools.py +86 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
  107. fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
  108. fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
  109. fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
  110. fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
  111. fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
  112. fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
  113. fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
  114. fractal_server/tasks/__init__.py +0 -5
  115. fractal_server/tasks/endpoint_operations.py +13 -19
  116. fractal_server/tasks/utils.py +35 -0
  117. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  118. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  119. fractal_server/tasks/v1/get_collection_data.py +14 -0
  120. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  121. fractal_server/tasks/v2/background_operations.py +382 -0
  122. fractal_server/tasks/v2/get_collection_data.py +14 -0
  123. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +1 -1
  124. fractal_server-2.0.0a0.dist-info/RECORD +166 -0
  125. fractal_server/app/runner/_slurm/.gitignore +0 -2
  126. fractal_server/app/runner/_slurm/__init__.py +0 -150
  127. fractal_server/app/runner/common.py +0 -311
  128. fractal_server-1.4.10.dist-info/RECORD +0 -98
  129. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  130. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
  131. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
  132. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,86 @@
1
+ from copy import copy
2
+ from typing import Any
3
+ from typing import Optional
4
+ from typing import Union
5
+
6
+ from fractal_server.images import Filters
7
+ from fractal_server.images import SingleImage
8
+
9
+
10
+ def find_image_by_path(
11
+ *,
12
+ images: list[dict[str, Any]],
13
+ path: str,
14
+ ) -> Optional[dict[str, Union[int, dict[str, Any]]]]:
15
+ """
16
+ Return a copy of the image with a given path and its positional index.
17
+
18
+ Args:
19
+ images: List of images.
20
+ path: Path that the returned image must have.
21
+
22
+ Returns:
23
+ The first image from `images` which has path equal to `path`.
24
+ """
25
+ image_paths = [img["path"] for img in images]
26
+ try:
27
+ ind = image_paths.index(path)
28
+ except ValueError:
29
+ return None
30
+ return dict(image=copy(images[ind]), index=ind)
31
+
32
+
33
+ def match_filter(image: dict[str, Any], filters: Filters):
34
+ for key, value in filters.types.items():
35
+ if image["types"].get(key, False) != value:
36
+ return False
37
+ for key, value in filters.attributes.items():
38
+ if value is None:
39
+ continue
40
+ if image["attributes"].get(key) != value:
41
+ return False
42
+ return True
43
+
44
+
45
+ def _filter_image_list(
46
+ images: list[dict[str, Any]],
47
+ filters: Filters,
48
+ ) -> list[dict[str, Any]]:
49
+
50
+ # When no filter is provided, return all images
51
+ if filters.attributes == {} and filters.types == {}:
52
+ return images
53
+
54
+ filtered_images = []
55
+ for this_image in images:
56
+ if match_filter(this_image, filters=filters):
57
+ filtered_images.append(copy(this_image))
58
+ return filtered_images
59
+
60
+
61
+ def match_filter_SingleImage(image: SingleImage, filters: Filters):
62
+ for key, value in filters.types.items():
63
+ if image.types.get(key, False) != value:
64
+ return False
65
+ for key, value in filters.attributes.items():
66
+ if value is None:
67
+ continue
68
+ if image.attributes.get(key) != value:
69
+ return False
70
+ return True
71
+
72
+
73
+ def _filter_image_list_SingleImage(
74
+ images: list[SingleImage],
75
+ filters: Filters,
76
+ ) -> list[SingleImage]:
77
+
78
+ # When no filter is provided, return all images
79
+ if filters.attributes == {} and filters.types == {}:
80
+ return images
81
+
82
+ filtered_images = []
83
+ for this_image in images:
84
+ if match_filter_SingleImage(this_image, filters=filters):
85
+ filtered_images.append(copy(this_image))
86
+ return filtered_images
fractal_server/main.py CHANGED
@@ -32,13 +32,21 @@ def collect_routers(app: FastAPI) -> None:
32
32
  """
33
33
  from .app.routes.api import router_api
34
34
  from .app.routes.api.v1 import router_api_v1
35
- from .app.routes.admin import router_admin
35
+ from .app.routes.api.v2 import router_api_v2
36
+ from .app.routes.admin.v1 import router_admin_v1
37
+ from .app.routes.admin.v2 import router_admin_v2
36
38
  from .app.routes.auth import router_auth
37
39
 
38
40
  app.include_router(router_api, prefix="/api")
39
41
  app.include_router(router_api_v1, prefix="/api/v1")
40
- app.include_router(router_admin, prefix="/admin", tags=["Admin area"])
41
- app.include_router(router_auth, prefix="/auth", tags=["auth"])
42
+ app.include_router(router_api_v2, prefix="/api/v2")
43
+ app.include_router(
44
+ router_admin_v1, prefix="/admin/v1", tags=["V1 Admin area"]
45
+ )
46
+ app.include_router(
47
+ router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"]
48
+ )
49
+ app.include_router(router_auth, prefix="/auth", tags=["Authentication"])
42
50
 
43
51
 
44
52
  def check_settings() -> None:
@@ -0,0 +1,39 @@
1
+ """TMP - is_v2_compatible
2
+
3
+ Revision ID: 4b35c5cefbe3
4
+ Revises: 876f28db9d4e
5
+ Create Date: 2024-03-28 15:26:33.436724
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "4b35c5cefbe3"
14
+ down_revision = "876f28db9d4e"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("task", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column(
24
+ "is_v2_compatible",
25
+ sa.Boolean(),
26
+ server_default=sa.text("false"),
27
+ nullable=False,
28
+ )
29
+ )
30
+
31
+ # ### end Alembic commands ###
32
+
33
+
34
+ def downgrade() -> None:
35
+ # ### commands auto generated by Alembic - please adjust! ###
36
+ with op.batch_alter_table("task", schema=None) as batch_op:
37
+ batch_op.drop_column("is_v2_compatible")
38
+
39
+ # ### end Alembic commands ###
@@ -0,0 +1,217 @@
1
+ """v2
2
+
3
+ Revision ID: 56af171b0159
4
+ Revises: 9fd26a2b0de4
5
+ Create Date: 2024-03-22 11:09:02.458011
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ import sqlmodel
10
+ from alembic import op
11
+
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "56af171b0159"
15
+ down_revision = "9fd26a2b0de4"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ # ### commands auto generated by Alembic - please adjust! ###
22
+ op.create_table(
23
+ "projectv2",
24
+ sa.Column("id", sa.Integer(), nullable=False),
25
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
26
+ sa.Column("read_only", sa.Boolean(), nullable=False),
27
+ sa.Column(
28
+ "timestamp_created", sa.DateTime(timezone=True), nullable=False
29
+ ),
30
+ sa.PrimaryKeyConstraint("id"),
31
+ )
32
+ op.create_table(
33
+ "taskv2",
34
+ sa.Column("id", sa.Integer(), nullable=False),
35
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
36
+ sa.Column(
37
+ "command_non_parallel",
38
+ sqlmodel.sql.sqltypes.AutoString(),
39
+ nullable=True,
40
+ ),
41
+ sa.Column(
42
+ "command_parallel",
43
+ sqlmodel.sql.sqltypes.AutoString(),
44
+ nullable=True,
45
+ ),
46
+ sa.Column(
47
+ "source", sqlmodel.sql.sqltypes.AutoString(), nullable=False
48
+ ),
49
+ sa.Column("meta", sa.JSON(), nullable=True),
50
+ sa.Column("owner", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
51
+ sa.Column(
52
+ "version", sqlmodel.sql.sqltypes.AutoString(), nullable=True
53
+ ),
54
+ sa.Column("args_schema", sa.JSON(), nullable=True),
55
+ sa.Column(
56
+ "args_schema_version",
57
+ sqlmodel.sql.sqltypes.AutoString(),
58
+ nullable=True,
59
+ ),
60
+ sa.Column(
61
+ "docs_info", sqlmodel.sql.sqltypes.AutoString(), nullable=True
62
+ ),
63
+ sa.Column(
64
+ "docs_link", sqlmodel.sql.sqltypes.AutoString(), nullable=True
65
+ ),
66
+ sa.Column("input_types", sa.JSON(), nullable=True),
67
+ sa.Column("output_types", sa.JSON(), nullable=True),
68
+ sa.PrimaryKeyConstraint("id"),
69
+ sa.UniqueConstraint("source"),
70
+ )
71
+ op.create_table(
72
+ "datasetv2",
73
+ sa.Column("id", sa.Integer(), nullable=False),
74
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
75
+ sa.Column("project_id", sa.Integer(), nullable=False),
76
+ sa.Column("history", sa.JSON(), server_default="[]", nullable=False),
77
+ sa.Column("read_only", sa.Boolean(), nullable=False),
78
+ sa.Column(
79
+ "timestamp_created", sa.DateTime(timezone=True), nullable=False
80
+ ),
81
+ sa.Column(
82
+ "zarr_dir", sqlmodel.sql.sqltypes.AutoString(), nullable=False
83
+ ),
84
+ sa.Column("images", sa.JSON(), server_default="[]", nullable=False),
85
+ sa.Column(
86
+ "filters",
87
+ sa.JSON(),
88
+ server_default='{"attributes": {}, "types": {}}',
89
+ nullable=False,
90
+ ),
91
+ sa.ForeignKeyConstraint(
92
+ ["project_id"],
93
+ ["projectv2.id"],
94
+ ),
95
+ sa.PrimaryKeyConstraint("id"),
96
+ )
97
+ op.create_table(
98
+ "linkuserprojectv2",
99
+ sa.Column("project_id", sa.Integer(), nullable=False),
100
+ sa.Column("user_id", sa.Integer(), nullable=False),
101
+ sa.ForeignKeyConstraint(
102
+ ["project_id"],
103
+ ["projectv2.id"],
104
+ ),
105
+ sa.ForeignKeyConstraint(
106
+ ["user_id"],
107
+ ["user_oauth.id"],
108
+ ),
109
+ sa.PrimaryKeyConstraint("project_id", "user_id"),
110
+ )
111
+ op.create_table(
112
+ "workflowv2",
113
+ sa.Column("id", sa.Integer(), nullable=False),
114
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
115
+ sa.Column("project_id", sa.Integer(), nullable=False),
116
+ sa.Column(
117
+ "timestamp_created", sa.DateTime(timezone=True), nullable=False
118
+ ),
119
+ sa.ForeignKeyConstraint(
120
+ ["project_id"],
121
+ ["projectv2.id"],
122
+ ),
123
+ sa.PrimaryKeyConstraint("id"),
124
+ )
125
+ op.create_table(
126
+ "jobv2",
127
+ sa.Column("id", sa.Integer(), nullable=False),
128
+ sa.Column("project_id", sa.Integer(), nullable=True),
129
+ sa.Column("workflow_id", sa.Integer(), nullable=True),
130
+ sa.Column("dataset_id", sa.Integer(), nullable=True),
131
+ sa.Column(
132
+ "user_email", sqlmodel.sql.sqltypes.AutoString(), nullable=False
133
+ ),
134
+ sa.Column(
135
+ "slurm_account", sqlmodel.sql.sqltypes.AutoString(), nullable=True
136
+ ),
137
+ sa.Column("dataset_dump", sa.JSON(), nullable=False),
138
+ sa.Column("workflow_dump", sa.JSON(), nullable=False),
139
+ sa.Column("project_dump", sa.JSON(), nullable=False),
140
+ sa.Column(
141
+ "worker_init", sqlmodel.sql.sqltypes.AutoString(), nullable=True
142
+ ),
143
+ sa.Column(
144
+ "working_dir", sqlmodel.sql.sqltypes.AutoString(), nullable=True
145
+ ),
146
+ sa.Column(
147
+ "working_dir_user",
148
+ sqlmodel.sql.sqltypes.AutoString(),
149
+ nullable=True,
150
+ ),
151
+ sa.Column("first_task_index", sa.Integer(), nullable=False),
152
+ sa.Column("last_task_index", sa.Integer(), nullable=False),
153
+ sa.Column(
154
+ "start_timestamp", sa.DateTime(timezone=True), nullable=False
155
+ ),
156
+ sa.Column("end_timestamp", sa.DateTime(timezone=True), nullable=True),
157
+ sa.Column(
158
+ "status", sqlmodel.sql.sqltypes.AutoString(), nullable=False
159
+ ),
160
+ sa.Column("log", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
161
+ sa.ForeignKeyConstraint(
162
+ ["dataset_id"],
163
+ ["datasetv2.id"],
164
+ ),
165
+ sa.ForeignKeyConstraint(
166
+ ["project_id"],
167
+ ["projectv2.id"],
168
+ ),
169
+ sa.ForeignKeyConstraint(
170
+ ["workflow_id"],
171
+ ["workflowv2.id"],
172
+ ),
173
+ sa.PrimaryKeyConstraint("id"),
174
+ )
175
+ op.create_table(
176
+ "workflowtaskv2",
177
+ sa.Column("id", sa.Integer(), nullable=False),
178
+ sa.Column("workflow_id", sa.Integer(), nullable=False),
179
+ sa.Column("order", sa.Integer(), nullable=True),
180
+ sa.Column("meta", sa.JSON(), nullable=True),
181
+ sa.Column("args", sa.JSON(), nullable=True),
182
+ sa.Column(
183
+ "input_filters",
184
+ sa.JSON(),
185
+ server_default='{"attributes": {}, "types": {}}',
186
+ nullable=False,
187
+ ),
188
+ sa.Column("is_legacy_task", sa.Boolean(), nullable=False),
189
+ sa.Column("task_id", sa.Integer(), nullable=True),
190
+ sa.Column("task_legacy_id", sa.Integer(), nullable=True),
191
+ sa.ForeignKeyConstraint(
192
+ ["task_id"],
193
+ ["taskv2.id"],
194
+ ),
195
+ sa.ForeignKeyConstraint(
196
+ ["task_legacy_id"],
197
+ ["task.id"],
198
+ ),
199
+ sa.ForeignKeyConstraint(
200
+ ["workflow_id"],
201
+ ["workflowv2.id"],
202
+ ),
203
+ sa.PrimaryKeyConstraint("id"),
204
+ )
205
+ # ### end Alembic commands ###
206
+
207
+
208
+ def downgrade() -> None:
209
+ # ### commands auto generated by Alembic - please adjust! ###
210
+ op.drop_table("workflowtaskv2")
211
+ op.drop_table("jobv2")
212
+ op.drop_table("workflowv2")
213
+ op.drop_table("linkuserprojectv2")
214
+ op.drop_table("datasetv2")
215
+ op.drop_table("taskv2")
216
+ op.drop_table("projectv2")
217
+ # ### end Alembic commands ###
@@ -0,0 +1,68 @@
1
+ """TMP - split Task and WFTask meta
2
+
3
+ Revision ID: 876f28db9d4e
4
+ Revises: b9e9eed9d442
5
+ Create Date: 2024-03-27 14:35:50.076725
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "876f28db9d4e"
14
+ down_revision = "b9e9eed9d442"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column("meta_non_parallel", sa.JSON(), nullable=True)
24
+ )
25
+ batch_op.add_column(
26
+ sa.Column("meta_parallel", sa.JSON(), nullable=True)
27
+ )
28
+ batch_op.drop_column("meta")
29
+
30
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
31
+ batch_op.add_column(
32
+ sa.Column("meta_parallel", sa.JSON(), nullable=True)
33
+ )
34
+ batch_op.add_column(
35
+ sa.Column("meta_non_parallel", sa.JSON(), nullable=True)
36
+ )
37
+ batch_op.drop_column("meta")
38
+
39
+ # ### end Alembic commands ###
40
+
41
+
42
+ def downgrade() -> None:
43
+ # ### commands auto generated by Alembic - please adjust! ###
44
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
45
+ batch_op.add_column(
46
+ sa.Column(
47
+ "meta",
48
+ postgresql.JSON(astext_type=sa.Text()),
49
+ autoincrement=False,
50
+ nullable=True,
51
+ )
52
+ )
53
+ batch_op.drop_column("meta_non_parallel")
54
+ batch_op.drop_column("meta_parallel")
55
+
56
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
57
+ batch_op.add_column(
58
+ sa.Column(
59
+ "meta",
60
+ postgresql.JSON(astext_type=sa.Text()),
61
+ autoincrement=False,
62
+ nullable=True,
63
+ )
64
+ )
65
+ batch_op.drop_column("meta_parallel")
66
+ batch_op.drop_column("meta_non_parallel")
67
+
68
+ # ### end Alembic commands ###
@@ -0,0 +1,37 @@
1
+ """TMP - WorkflowTaskV2.type in DB
2
+
3
+ Revision ID: 974c802f0dd0
4
+ Revises: e3e639454d4b
5
+ Create Date: 2024-04-02 11:21:20.212530
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ import sqlmodel
10
+ from alembic import op
11
+
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "974c802f0dd0"
15
+ down_revision = "e3e639454d4b"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ # ### commands auto generated by Alembic - please adjust! ###
22
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
23
+ batch_op.add_column(
24
+ sa.Column(
25
+ "task_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False
26
+ )
27
+ )
28
+
29
+ # ### end Alembic commands ###
30
+
31
+
32
+ def downgrade() -> None:
33
+ # ### commands auto generated by Alembic - please adjust! ###
34
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
35
+ batch_op.drop_column("task_type")
36
+
37
+ # ### end Alembic commands ###
@@ -0,0 +1,40 @@
1
+ """TMP - WorkflowTaskV2
2
+
3
+ Revision ID: 9cd305cd6023
4
+ Revises: a6231ed6273c
5
+ Create Date: 2024-03-26 09:15:00.188036
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import sqlite
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "9cd305cd6023"
14
+ down_revision = "a6231ed6273c"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column("args_parallel", sa.JSON(), nullable=True)
24
+ )
25
+ batch_op.add_column(
26
+ sa.Column("args_non_parallel", sa.JSON(), nullable=True)
27
+ )
28
+ batch_op.drop_column("args")
29
+
30
+ # ### end Alembic commands ###
31
+
32
+
33
+ def downgrade() -> None:
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
36
+ batch_op.add_column(sa.Column("args", sqlite.JSON(), nullable=True))
37
+ batch_op.drop_column("args_non_parallel")
38
+ batch_op.drop_column("args_parallel")
39
+
40
+ # ### end Alembic commands ###
@@ -0,0 +1,42 @@
1
+ """TMP - Args Schemas in TaskV2
2
+
3
+ Revision ID: a6231ed6273c
4
+ Revises: 56af171b0159
5
+ Create Date: 2024-03-26 08:49:51.870087
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import sqlite
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "a6231ed6273c"
14
+ down_revision = "56af171b0159"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column("args_schema_non_parallel", sa.JSON(), nullable=True)
24
+ )
25
+ batch_op.add_column(
26
+ sa.Column("args_schema_parallel", sa.JSON(), nullable=True)
27
+ )
28
+ batch_op.drop_column("args_schema")
29
+
30
+ # ### end Alembic commands ###
31
+
32
+
33
+ def downgrade() -> None:
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
36
+ batch_op.add_column(
37
+ sa.Column("args_schema", sqlite.JSON(), nullable=True)
38
+ )
39
+ batch_op.drop_column("args_schema_parallel")
40
+ batch_op.drop_column("args_schema_non_parallel")
41
+
42
+ # ### end Alembic commands ###
@@ -0,0 +1,37 @@
1
+ """TMP - TaskV2.type
2
+
3
+ Revision ID: b9e9eed9d442
4
+ Revises: 9cd305cd6023
5
+ Create Date: 2024-03-27 13:10:34.125503
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ import sqlmodel
10
+ from alembic import op
11
+
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "b9e9eed9d442"
15
+ down_revision = "9cd305cd6023"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ # ### commands auto generated by Alembic - please adjust! ###
22
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
23
+ batch_op.add_column(
24
+ sa.Column(
25
+ "type", sqlmodel.sql.sqltypes.AutoString(), nullable=False
26
+ )
27
+ )
28
+
29
+ # ### end Alembic commands ###
30
+
31
+
32
+ def downgrade() -> None:
33
+ # ### commands auto generated by Alembic - please adjust! ###
34
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
35
+ batch_op.drop_column("type")
36
+
37
+ # ### end Alembic commands ###
@@ -0,0 +1,50 @@
1
+ """TMP - make task.meta non optional
2
+
3
+ Revision ID: e3e639454d4b
4
+ Revises: 4b35c5cefbe3
5
+ Create Date: 2024-03-29 17:10:57.643561
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "e3e639454d4b"
14
+ down_revision = "4b35c5cefbe3"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
22
+ batch_op.alter_column(
23
+ "meta_non_parallel",
24
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
25
+ nullable=False,
26
+ )
27
+ batch_op.alter_column(
28
+ "meta_parallel",
29
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
30
+ nullable=False,
31
+ )
32
+
33
+ # ### end Alembic commands ###
34
+
35
+
36
+ def downgrade() -> None:
37
+ # ### commands auto generated by Alembic - please adjust! ###
38
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
39
+ batch_op.alter_column(
40
+ "meta_parallel",
41
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
42
+ nullable=True,
43
+ )
44
+ batch_op.alter_column(
45
+ "meta_non_parallel",
46
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
47
+ nullable=True,
48
+ )
49
+
50
+ # ### end Alembic commands ###
@@ -1,8 +1,3 @@
1
1
  """
2
2
  `tasks` module
3
-
4
- Submodules:
5
- * [Endpoint functions](background_operations)
6
- * [Background-task functions](endpoint_operations)
7
- * [Auxiliary functions](utils)
8
3
  """