zenml-nightly 0.70.0.dev20241122__py3-none-any.whl → 0.70.0.dev20241201__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zenml/VERSION +1 -1
- zenml/artifact_stores/base_artifact_store.py +2 -2
- zenml/artifacts/artifact_config.py +7 -1
- zenml/artifacts/utils.py +56 -31
- zenml/cli/__init__.py +18 -0
- zenml/cli/base.py +4 -4
- zenml/cli/login.py +26 -0
- zenml/cli/pipeline.py +80 -0
- zenml/cli/server.py +1 -1
- zenml/cli/service_connectors.py +3 -3
- zenml/cli/stack.py +0 -3
- zenml/cli/stack_components.py +0 -1
- zenml/cli/utils.py +0 -5
- zenml/client.py +8 -18
- zenml/config/compiler.py +12 -3
- zenml/config/pipeline_configurations.py +20 -0
- zenml/config/pipeline_run_configuration.py +1 -0
- zenml/config/step_configurations.py +21 -0
- zenml/constants.py +1 -0
- zenml/enums.py +1 -0
- zenml/image_builders/local_image_builder.py +13 -3
- zenml/integrations/__init__.py +1 -0
- zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +14 -6
- zenml/integrations/constants.py +1 -0
- zenml/integrations/feast/__init__.py +1 -1
- zenml/integrations/feast/feature_stores/feast_feature_store.py +13 -9
- zenml/integrations/kubernetes/orchestrators/kube_utils.py +54 -9
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +65 -3
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +14 -2
- zenml/integrations/kubernetes/orchestrators/manifest_utils.py +6 -5
- zenml/integrations/kubernetes/service_connectors/kubernetes_service_connector.py +2 -1
- zenml/integrations/kubernetes/step_operators/kubernetes_step_operator.py +3 -1
- zenml/integrations/modal/__init__.py +46 -0
- zenml/integrations/modal/flavors/__init__.py +26 -0
- zenml/integrations/modal/flavors/modal_step_operator_flavor.py +125 -0
- zenml/integrations/modal/step_operators/__init__.py +22 -0
- zenml/integrations/modal/step_operators/modal_step_operator.py +242 -0
- zenml/io/filesystem.py +2 -2
- zenml/io/local_filesystem.py +3 -3
- zenml/materializers/built_in_materializer.py +18 -1
- zenml/materializers/structured_string_materializer.py +8 -3
- zenml/model/model.py +11 -85
- zenml/model/utils.py +18 -16
- zenml/models/__init__.py +12 -1
- zenml/models/v2/core/artifact_version.py +6 -3
- zenml/models/v2/core/component.py +0 -22
- zenml/models/v2/core/model_version.py +6 -3
- zenml/models/v2/core/pipeline_run.py +19 -3
- zenml/models/v2/core/run_metadata.py +30 -9
- zenml/models/v2/core/service_connector.py +4 -0
- zenml/models/v2/core/step_run.py +6 -4
- zenml/models/v2/misc/run_metadata.py +38 -0
- zenml/models/v2/misc/server_models.py +23 -0
- zenml/orchestrators/input_utils.py +19 -6
- zenml/orchestrators/publish_utils.py +12 -5
- zenml/orchestrators/step_launcher.py +7 -3
- zenml/orchestrators/step_run_utils.py +26 -9
- zenml/orchestrators/step_runner.py +40 -3
- zenml/orchestrators/utils.py +24 -23
- zenml/pipelines/pipeline_decorator.py +4 -0
- zenml/pipelines/pipeline_definition.py +26 -8
- zenml/pipelines/run_utils.py +9 -5
- zenml/steps/base_step.py +11 -1
- zenml/steps/entrypoint_function_utils.py +7 -3
- zenml/steps/step_decorator.py +4 -0
- zenml/steps/utils.py +23 -7
- zenml/types.py +4 -0
- zenml/utils/metadata_utils.py +186 -153
- zenml/utils/string_utils.py +41 -16
- zenml/utils/visualization_utils.py +4 -1
- zenml/zen_server/cloud_utils.py +3 -1
- zenml/zen_server/deploy/helm/templates/_environment.tpl +117 -0
- zenml/zen_server/deploy/helm/templates/server-db-job.yaml +3 -14
- zenml/zen_server/deploy/helm/templates/server-deployment.yaml +16 -4
- zenml/zen_server/deploy/helm/templates/server-secret.yaml +2 -17
- zenml/zen_server/rbac/endpoint_utils.py +6 -4
- zenml/zen_server/rbac/models.py +3 -2
- zenml/zen_server/rbac/utils.py +4 -7
- zenml/zen_server/routers/server_endpoints.py +47 -0
- zenml/zen_server/routers/users_endpoints.py +35 -37
- zenml/zen_server/routers/workspaces_endpoints.py +44 -55
- zenml/zen_server/template_execution/utils.py +1 -0
- zenml/zen_server/zen_server_api.py +45 -6
- zenml/zen_stores/migrations/utils.py +40 -24
- zenml/zen_stores/migrations/versions/b73bc71f1106_remove_component_spec_path.py +36 -0
- zenml/zen_stores/migrations/versions/cc269488e5a9_separate_run_metadata.py +135 -0
- zenml/zen_stores/migrations/versions/ec6307720f92_simplify_model_version_links.py +7 -6
- zenml/zen_stores/rest_zen_store.py +38 -1
- zenml/zen_stores/schemas/__init__.py +5 -1
- zenml/zen_stores/schemas/artifact_schemas.py +12 -11
- zenml/zen_stores/schemas/component_schemas.py +0 -3
- zenml/zen_stores/schemas/model_schemas.py +13 -11
- zenml/zen_stores/schemas/pipeline_run_schemas.py +44 -16
- zenml/zen_stores/schemas/run_metadata_schemas.py +66 -31
- zenml/zen_stores/schemas/step_run_schemas.py +32 -12
- zenml/zen_stores/schemas/utils.py +47 -3
- zenml/zen_stores/sql_zen_store.py +130 -34
- {zenml_nightly-0.70.0.dev20241122.dist-info → zenml_nightly-0.70.0.dev20241201.dist-info}/METADATA +1 -1
- {zenml_nightly-0.70.0.dev20241122.dist-info → zenml_nightly-0.70.0.dev20241201.dist-info}/RECORD +102 -95
- zenml/utils/cloud_utils.py +0 -40
- {zenml_nightly-0.70.0.dev20241122.dist-info → zenml_nightly-0.70.0.dev20241201.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.70.0.dev20241122.dist-info → zenml_nightly-0.70.0.dev20241201.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.70.0.dev20241122.dist-info → zenml_nightly-0.70.0.dev20241201.dist-info}/entry_points.txt +0 -0
@@ -13,7 +13,7 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""Endpoint definitions for workspaces."""
|
15
15
|
|
16
|
-
from typing import Dict, List, Optional, Tuple, Union
|
16
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
17
17
|
from uuid import UUID
|
18
18
|
|
19
19
|
from fastapi import APIRouter, Depends, Security
|
@@ -98,13 +98,13 @@ from zenml.zen_server.feature_gate.endpoint_utils import (
|
|
98
98
|
)
|
99
99
|
from zenml.zen_server.rbac.endpoint_utils import (
|
100
100
|
verify_permissions_and_create_entity,
|
101
|
-
verify_permissions_and_delete_entity,
|
102
|
-
verify_permissions_and_get_entity,
|
103
101
|
verify_permissions_and_list_entities,
|
104
|
-
verify_permissions_and_update_entity,
|
105
102
|
)
|
106
103
|
from zenml.zen_server.rbac.models import Action, ResourceType
|
107
104
|
from zenml.zen_server.rbac.utils import (
|
105
|
+
batch_verify_permissions_for_models,
|
106
|
+
dehydrate_page,
|
107
|
+
dehydrate_response_model,
|
108
108
|
get_allowed_resource_ids,
|
109
109
|
verify_permission,
|
110
110
|
verify_permission_for_model,
|
@@ -146,12 +146,10 @@ def list_workspaces(
|
|
146
146
|
Returns:
|
147
147
|
A list of workspaces.
|
148
148
|
"""
|
149
|
-
|
150
|
-
|
151
|
-
resource_type=ResourceType.WORKSPACE,
|
152
|
-
list_method=zen_store().list_workspaces,
|
153
|
-
hydrate=hydrate,
|
149
|
+
workspaces = zen_store().list_workspaces(
|
150
|
+
workspace_filter_model, hydrate=hydrate
|
154
151
|
)
|
152
|
+
return dehydrate_page(workspaces)
|
155
153
|
|
156
154
|
|
157
155
|
@router.post(
|
@@ -160,7 +158,7 @@ def list_workspaces(
|
|
160
158
|
)
|
161
159
|
@handle_exceptions
|
162
160
|
def create_workspace(
|
163
|
-
|
161
|
+
workspace_request: WorkspaceRequest,
|
164
162
|
_: AuthContext = Security(authorize),
|
165
163
|
) -> WorkspaceResponse:
|
166
164
|
"""Creates a workspace based on the requestBody.
|
@@ -168,16 +166,13 @@ def create_workspace(
|
|
168
166
|
# noqa: DAR401
|
169
167
|
|
170
168
|
Args:
|
171
|
-
|
169
|
+
workspace_request: Workspace to create.
|
172
170
|
|
173
171
|
Returns:
|
174
172
|
The created workspace.
|
175
173
|
"""
|
176
|
-
|
177
|
-
|
178
|
-
resource_type=ResourceType.WORKSPACE,
|
179
|
-
create_method=zen_store().create_workspace,
|
180
|
-
)
|
174
|
+
workspace = zen_store().create_workspace(workspace_request)
|
175
|
+
return dehydrate_response_model(workspace)
|
181
176
|
|
182
177
|
|
183
178
|
@router.get(
|
@@ -203,11 +198,10 @@ def get_workspace(
|
|
203
198
|
Returns:
|
204
199
|
The requested workspace.
|
205
200
|
"""
|
206
|
-
|
207
|
-
|
208
|
-
get_method=zen_store().get_workspace,
|
209
|
-
hydrate=hydrate,
|
201
|
+
workspace = zen_store().get_workspace(
|
202
|
+
workspace_name_or_id, hydrate=hydrate
|
210
203
|
)
|
204
|
+
return dehydrate_response_model(workspace)
|
211
205
|
|
212
206
|
|
213
207
|
@router.put(
|
@@ -231,12 +225,11 @@ def update_workspace(
|
|
231
225
|
Returns:
|
232
226
|
The updated workspace.
|
233
227
|
"""
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
get_method=zen_store().get_workspace,
|
238
|
-
update_method=zen_store().update_workspace,
|
228
|
+
workspace = zen_store().get_workspace(workspace_name_or_id, hydrate=False)
|
229
|
+
updated_workspace = zen_store().update_workspace(
|
230
|
+
workspace_id=workspace.id, workspace_update=workspace_update
|
239
231
|
)
|
232
|
+
return dehydrate_response_model(updated_workspace)
|
240
233
|
|
241
234
|
|
242
235
|
@router.delete(
|
@@ -253,11 +246,7 @@ def delete_workspace(
|
|
253
246
|
Args:
|
254
247
|
workspace_name_or_id: Name or ID of the workspace.
|
255
248
|
"""
|
256
|
-
|
257
|
-
id=workspace_name_or_id,
|
258
|
-
get_method=zen_store().get_workspace,
|
259
|
-
delete_method=zen_store().delete_workspace,
|
260
|
-
)
|
249
|
+
zen_store().delete_workspace(workspace_name_or_id)
|
261
250
|
|
262
251
|
|
263
252
|
@router.get(
|
@@ -951,20 +940,21 @@ def get_or_create_pipeline_run(
|
|
951
940
|
"is not supported."
|
952
941
|
)
|
953
942
|
|
954
|
-
|
955
|
-
|
956
|
-
|
943
|
+
def _pre_creation_hook() -> None:
|
944
|
+
verify_permission(
|
945
|
+
resource_type=ResourceType.PIPELINE_RUN, action=Action.CREATE
|
946
|
+
)
|
947
|
+
check_entitlement(resource_type=ResourceType.PIPELINE_RUN)
|
957
948
|
|
958
949
|
run, created = zen_store().get_or_create_run(
|
959
|
-
pipeline_run=pipeline_run,
|
960
|
-
pre_creation_hook=lambda: check_entitlement(
|
961
|
-
resource_type=ResourceType.PIPELINE_RUN
|
962
|
-
),
|
950
|
+
pipeline_run=pipeline_run, pre_creation_hook=_pre_creation_hook
|
963
951
|
)
|
964
952
|
if created:
|
965
953
|
report_usage(
|
966
954
|
resource_type=ResourceType.PIPELINE_RUN, resource_id=run.id
|
967
955
|
)
|
956
|
+
else:
|
957
|
+
verify_permission_for_model(run, action=Action.READ)
|
968
958
|
|
969
959
|
return run, created
|
970
960
|
|
@@ -1009,24 +999,23 @@ def create_run_metadata(
|
|
1009
999
|
"is not supported."
|
1010
1000
|
)
|
1011
1001
|
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1022
|
-
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1027
|
-
|
1028
|
-
|
1029
|
-
)
|
1002
|
+
verify_models: List[Any] = []
|
1003
|
+
for resource in run_metadata.resources:
|
1004
|
+
if resource.type == MetadataResourceTypes.PIPELINE_RUN:
|
1005
|
+
verify_models.append(zen_store().get_run(resource.id))
|
1006
|
+
elif resource.type == MetadataResourceTypes.STEP_RUN:
|
1007
|
+
verify_models.append(zen_store().get_run_step(resource.id))
|
1008
|
+
elif resource.type == MetadataResourceTypes.ARTIFACT_VERSION:
|
1009
|
+
verify_models.append(zen_store().get_artifact_version(resource.id))
|
1010
|
+
elif resource.type == MetadataResourceTypes.MODEL_VERSION:
|
1011
|
+
verify_models.append(zen_store().get_model_version(resource.id))
|
1012
|
+
else:
|
1013
|
+
raise RuntimeError(f"Unknown resource type: {resource.type}")
|
1014
|
+
|
1015
|
+
batch_verify_permissions_for_models(
|
1016
|
+
models=verify_models,
|
1017
|
+
action=Action.UPDATE,
|
1018
|
+
)
|
1030
1019
|
|
1031
1020
|
verify_permission(
|
1032
1021
|
resource_type=ResourceType.RUN_METADATA, action=Action.CREATE
|
@@ -180,7 +180,15 @@ class RequestBodyLimit(BaseHTTPMiddleware):
|
|
180
180
|
if content_length := request.headers.get("content-length"):
|
181
181
|
if int(content_length) > self.max_bytes:
|
182
182
|
return Response(status_code=413) # Request Entity Too Large
|
183
|
-
|
183
|
+
|
184
|
+
try:
|
185
|
+
return await call_next(request)
|
186
|
+
except Exception:
|
187
|
+
logger.exception("An error occurred while processing the request")
|
188
|
+
return JSONResponse(
|
189
|
+
status_code=500,
|
190
|
+
content={"detail": "An unexpected error occurred."},
|
191
|
+
)
|
184
192
|
|
185
193
|
|
186
194
|
class RestrictFileUploadsMiddleware(BaseHTTPMiddleware):
|
@@ -220,7 +228,15 @@ class RestrictFileUploadsMiddleware(BaseHTTPMiddleware):
|
|
220
228
|
"detail": "File uploads are not allowed on this endpoint."
|
221
229
|
},
|
222
230
|
)
|
223
|
-
|
231
|
+
|
232
|
+
try:
|
233
|
+
return await call_next(request)
|
234
|
+
except Exception:
|
235
|
+
logger.exception("An error occurred while processing the request")
|
236
|
+
return JSONResponse(
|
237
|
+
status_code=500,
|
238
|
+
content={"detail": "An unexpected error occurred."},
|
239
|
+
)
|
224
240
|
|
225
241
|
|
226
242
|
ALLOWED_FOR_FILE_UPLOAD: Set[str] = set()
|
@@ -252,13 +268,21 @@ async def set_secure_headers(request: Request, call_next: Any) -> Any:
|
|
252
268
|
Returns:
|
253
269
|
The response with secure headers set.
|
254
270
|
"""
|
271
|
+
try:
|
272
|
+
response = await call_next(request)
|
273
|
+
except Exception:
|
274
|
+
logger.exception("An error occurred while processing the request")
|
275
|
+
response = JSONResponse(
|
276
|
+
status_code=500,
|
277
|
+
content={"detail": "An unexpected error occurred."},
|
278
|
+
)
|
279
|
+
|
255
280
|
# If the request is for the openAPI docs, don't set secure headers
|
256
281
|
if request.url.path.startswith("/docs") or request.url.path.startswith(
|
257
282
|
"/redoc"
|
258
283
|
):
|
259
|
-
return
|
284
|
+
return response
|
260
285
|
|
261
|
-
response = await call_next(request)
|
262
286
|
secure_headers().framework.fastapi(response)
|
263
287
|
return response
|
264
288
|
|
@@ -298,7 +322,15 @@ async def track_last_user_activity(request: Request, call_next: Any) -> Any:
|
|
298
322
|
zen_store()._update_last_user_activity_timestamp(
|
299
323
|
last_user_activity=last_user_activity
|
300
324
|
)
|
301
|
-
|
325
|
+
|
326
|
+
try:
|
327
|
+
return await call_next(request)
|
328
|
+
except Exception:
|
329
|
+
logger.exception("An error occurred while processing the request")
|
330
|
+
return JSONResponse(
|
331
|
+
status_code=500,
|
332
|
+
content={"detail": "An unexpected error occurred."},
|
333
|
+
)
|
302
334
|
|
303
335
|
|
304
336
|
@app.middleware("http")
|
@@ -330,7 +362,14 @@ async def infer_source_context(request: Request, call_next: Any) -> Any:
|
|
330
362
|
)
|
331
363
|
source_context.set(SourceContextTypes.API)
|
332
364
|
|
333
|
-
|
365
|
+
try:
|
366
|
+
return await call_next(request)
|
367
|
+
except Exception:
|
368
|
+
logger.exception("An error occurred while processing the request")
|
369
|
+
return JSONResponse(
|
370
|
+
status_code=500,
|
371
|
+
content={"detail": "An unexpected error occurred."},
|
372
|
+
)
|
334
373
|
|
335
374
|
|
336
375
|
@app.on_event("startup")
|
@@ -273,30 +273,25 @@ class MigrationUtils(BaseModel):
|
|
273
273
|
+ "\n);"
|
274
274
|
)
|
275
275
|
|
276
|
+
# Detect self-referential foreign keys from the table schema
|
277
|
+
has_self_referential_foreign_keys = False
|
278
|
+
for fk in table.foreign_keys:
|
279
|
+
# Check if the foreign key points to the same table
|
280
|
+
if fk.column.table == table:
|
281
|
+
has_self_referential_foreign_keys = True
|
282
|
+
break
|
283
|
+
|
276
284
|
# Store the table schema
|
277
285
|
store_db_info(
|
278
|
-
dict(
|
286
|
+
dict(
|
287
|
+
table=table.name,
|
288
|
+
create_stmt=create_table_stmt,
|
289
|
+
self_references=has_self_referential_foreign_keys,
|
290
|
+
)
|
279
291
|
)
|
280
292
|
|
281
293
|
# 2. extract the table data in batches
|
282
|
-
|
283
|
-
# If the table has a `created` column, we use it to sort
|
284
|
-
# the rows in the table starting with the oldest rows.
|
285
|
-
# This is to ensure that the rows are inserted in the
|
286
|
-
# correct order, since some tables have inner foreign key
|
287
|
-
# constraints.
|
288
|
-
if "created" in table.columns:
|
289
|
-
order_by = [table.columns["created"]]
|
290
|
-
else:
|
291
|
-
order_by = []
|
292
|
-
if "id" in table.columns:
|
293
|
-
# If the table has an `id` column, we also use it to sort
|
294
|
-
# the rows in the table, even if we already use "created"
|
295
|
-
# to sort the rows. We need a unique field to sort the rows,
|
296
|
-
# to break the tie between rows with the same "created"
|
297
|
-
# date, otherwise the same entry might end up multiple times
|
298
|
-
# in subsequent pages.
|
299
|
-
order_by.append(table.columns["id"])
|
294
|
+
order_by = [col for col in table.primary_key]
|
300
295
|
|
301
296
|
# Fetch the number of rows in the table
|
302
297
|
row_count = conn.scalar(
|
@@ -305,7 +300,7 @@ class MigrationUtils(BaseModel):
|
|
305
300
|
|
306
301
|
# Fetch the data from the table in batches
|
307
302
|
if row_count is not None:
|
308
|
-
batch_size =
|
303
|
+
batch_size = 100
|
309
304
|
for i in range(0, row_count, batch_size):
|
310
305
|
rows = conn.execute(
|
311
306
|
table.select()
|
@@ -349,6 +344,7 @@ class MigrationUtils(BaseModel):
|
|
349
344
|
|
350
345
|
with self.engine.begin() as connection:
|
351
346
|
# read the DB information one JSON object at a time
|
347
|
+
self_references: Dict[str, bool] = {}
|
352
348
|
for table_dump in load_db_info():
|
353
349
|
table_name = table_dump["table"]
|
354
350
|
if "create_stmt" in table_dump:
|
@@ -356,10 +352,22 @@ class MigrationUtils(BaseModel):
|
|
356
352
|
connection.execute(text(table_dump["create_stmt"]))
|
357
353
|
# Reload the database metadata after creating the table
|
358
354
|
metadata.reflect(bind=self.engine)
|
355
|
+
self_references[table_name] = table_dump.get(
|
356
|
+
"self_references", False
|
357
|
+
)
|
359
358
|
|
360
359
|
if "data" in table_dump:
|
361
360
|
# insert the data into the database
|
362
361
|
table = metadata.tables[table_name]
|
362
|
+
if self_references.get(table_name, False):
|
363
|
+
# If the table has self-referential foreign keys, we
|
364
|
+
# need to disable the foreign key checks before inserting
|
365
|
+
# the rows and re-enable them afterwards. This is because
|
366
|
+
# the rows need to be inserted in the correct order to
|
367
|
+
# satisfy the foreign key constraints and we don't sort
|
368
|
+
# the rows by creation time in the backup.
|
369
|
+
connection.execute(text("SET FOREIGN_KEY_CHECKS = 0"))
|
370
|
+
|
363
371
|
for row in table_dump["data"]:
|
364
372
|
# Convert column values to the correct type
|
365
373
|
for column in table.columns:
|
@@ -372,10 +380,18 @@ class MigrationUtils(BaseModel):
|
|
372
380
|
row[column.name], "utf-8"
|
373
381
|
)
|
374
382
|
|
375
|
-
# Insert the rows into the table
|
376
|
-
|
377
|
-
|
378
|
-
|
383
|
+
# Insert the rows into the table in batches
|
384
|
+
batch_size = 100
|
385
|
+
for i in range(0, len(table_dump["data"]), batch_size):
|
386
|
+
connection.execute(
|
387
|
+
table.insert().values(
|
388
|
+
table_dump["data"][i : i + batch_size]
|
389
|
+
)
|
390
|
+
)
|
391
|
+
|
392
|
+
if table_dump.get("self_references", False):
|
393
|
+
# Re-enable the foreign key checks after inserting the rows
|
394
|
+
connection.execute(text("SET FOREIGN_KEY_CHECKS = 1"))
|
379
395
|
|
380
396
|
def backup_database_to_file(self, dump_file: str) -> None:
|
381
397
|
"""Backup the database to a file.
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"""Remove component spec path [b73bc71f1106].
|
2
|
+
|
3
|
+
Revision ID: b73bc71f1106
|
4
|
+
Revises: ec6307720f92
|
5
|
+
Create Date: 2024-11-29 09:36:33.089945
|
6
|
+
|
7
|
+
"""
|
8
|
+
|
9
|
+
import sqlalchemy as sa
|
10
|
+
from alembic import op
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "b73bc71f1106"
|
14
|
+
down_revision = "ec6307720f92"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
"""Upgrade database schema and/or data, creating a new revision."""
|
21
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
22
|
+
with op.batch_alter_table("stack_component", schema=None) as batch_op:
|
23
|
+
batch_op.drop_column("component_spec_path")
|
24
|
+
|
25
|
+
# ### end Alembic commands ###
|
26
|
+
|
27
|
+
|
28
|
+
def downgrade() -> None:
|
29
|
+
"""Downgrade database schema and/or data back to the previous revision."""
|
30
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
31
|
+
with op.batch_alter_table("stack_component", schema=None) as batch_op:
|
32
|
+
batch_op.add_column(
|
33
|
+
sa.Column("component_spec_path", sa.VARCHAR(), nullable=True)
|
34
|
+
)
|
35
|
+
|
36
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,135 @@
|
|
1
|
+
"""Separate run metadata into resource link table with new UUIDs.
|
2
|
+
|
3
|
+
Revision ID: cc269488e5a9
|
4
|
+
Revises: b73bc71f1106
|
5
|
+
Create Date: 2024-11-12 09:46:46.587478
|
6
|
+
"""
|
7
|
+
|
8
|
+
import uuid
|
9
|
+
|
10
|
+
import sqlalchemy as sa
|
11
|
+
import sqlmodel
|
12
|
+
from alembic import op
|
13
|
+
|
14
|
+
# revision identifiers, used by Alembic.
|
15
|
+
revision = "cc269488e5a9"
|
16
|
+
down_revision = "b73bc71f1106"
|
17
|
+
branch_labels = None
|
18
|
+
depends_on = None
|
19
|
+
|
20
|
+
|
21
|
+
def upgrade() -> None:
|
22
|
+
"""Creates the 'run_metadata_resource' table and migrates data."""
|
23
|
+
op.create_table(
|
24
|
+
"run_metadata_resource",
|
25
|
+
sa.Column(
|
26
|
+
"id",
|
27
|
+
sqlmodel.sql.sqltypes.GUID(),
|
28
|
+
nullable=False,
|
29
|
+
primary_key=True,
|
30
|
+
),
|
31
|
+
sa.Column("resource_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
32
|
+
sa.Column("resource_type", sa.String(length=255), nullable=False),
|
33
|
+
sa.Column(
|
34
|
+
"run_metadata_id",
|
35
|
+
sqlmodel.sql.sqltypes.GUID(),
|
36
|
+
sa.ForeignKey("run_metadata.id", ondelete="CASCADE"),
|
37
|
+
nullable=False,
|
38
|
+
),
|
39
|
+
)
|
40
|
+
|
41
|
+
connection = op.get_bind()
|
42
|
+
|
43
|
+
run_metadata_data = connection.execute(
|
44
|
+
sa.text("""
|
45
|
+
SELECT id, resource_id, resource_type
|
46
|
+
FROM run_metadata
|
47
|
+
""")
|
48
|
+
).fetchall()
|
49
|
+
|
50
|
+
# Prepare data with new UUIDs for bulk insert
|
51
|
+
resource_data = [
|
52
|
+
{
|
53
|
+
"id": str(uuid.uuid4()), # Generate a new UUID for each row
|
54
|
+
"resource_id": row.resource_id,
|
55
|
+
"resource_type": row.resource_type,
|
56
|
+
"run_metadata_id": row.id,
|
57
|
+
}
|
58
|
+
for row in run_metadata_data
|
59
|
+
]
|
60
|
+
|
61
|
+
# Perform bulk insert into `run_metadata_resource`
|
62
|
+
if resource_data: # Only perform insert if there's data to migrate
|
63
|
+
op.bulk_insert(
|
64
|
+
sa.table(
|
65
|
+
"run_metadata_resource",
|
66
|
+
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
67
|
+
sa.Column(
|
68
|
+
"resource_id", sqlmodel.sql.sqltypes.GUID(), nullable=False
|
69
|
+
),
|
70
|
+
sa.Column(
|
71
|
+
"resource_type", sa.String(length=255), nullable=False
|
72
|
+
),
|
73
|
+
sa.Column(
|
74
|
+
"run_metadata_id",
|
75
|
+
sqlmodel.sql.sqltypes.GUID(),
|
76
|
+
nullable=False,
|
77
|
+
), # Changed to BIGINT
|
78
|
+
),
|
79
|
+
resource_data,
|
80
|
+
)
|
81
|
+
|
82
|
+
op.drop_column("run_metadata", "resource_id")
|
83
|
+
op.drop_column("run_metadata", "resource_type")
|
84
|
+
|
85
|
+
op.add_column(
|
86
|
+
"run_metadata",
|
87
|
+
sa.Column(
|
88
|
+
"publisher_step_id", sqlmodel.sql.sqltypes.GUID(), nullable=True
|
89
|
+
),
|
90
|
+
)
|
91
|
+
|
92
|
+
|
93
|
+
def downgrade() -> None:
|
94
|
+
"""Reverts the 'run_metadata_resource' table and migrates data back."""
|
95
|
+
# Recreate the `resource_id` and `resource_type` columns in `run_metadata`
|
96
|
+
op.add_column(
|
97
|
+
"run_metadata",
|
98
|
+
sa.Column("resource_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
|
99
|
+
)
|
100
|
+
op.add_column(
|
101
|
+
"run_metadata",
|
102
|
+
sa.Column("resource_type", sa.String(length=255), nullable=True),
|
103
|
+
)
|
104
|
+
|
105
|
+
# Migrate data back from `run_metadata_resource` to `run_metadata`
|
106
|
+
connection = op.get_bind()
|
107
|
+
|
108
|
+
# Fetch data from `run_metadata_resource`
|
109
|
+
run_metadata_resource_data = connection.execute(
|
110
|
+
sa.text("""
|
111
|
+
SELECT resource_id, resource_type, run_metadata_id
|
112
|
+
FROM run_metadata_resource
|
113
|
+
""")
|
114
|
+
).fetchall()
|
115
|
+
|
116
|
+
# Update `run_metadata` with the data from `run_metadata_resource`
|
117
|
+
for row in run_metadata_resource_data:
|
118
|
+
connection.execute(
|
119
|
+
sa.text("""
|
120
|
+
UPDATE run_metadata
|
121
|
+
SET resource_id = :resource_id, resource_type = :resource_type
|
122
|
+
WHERE id = :run_metadata_id
|
123
|
+
"""),
|
124
|
+
{
|
125
|
+
"resource_id": row.resource_id,
|
126
|
+
"resource_type": row.resource_type,
|
127
|
+
"run_metadata_id": row.run_metadata_id,
|
128
|
+
},
|
129
|
+
)
|
130
|
+
|
131
|
+
# Drop the `run_metadata_resource` table
|
132
|
+
op.drop_table("run_metadata_resource")
|
133
|
+
|
134
|
+
# Drop the cached column
|
135
|
+
op.drop_column("run_metadata", "publisher_step_id")
|
@@ -59,12 +59,13 @@ def _migrate_artifact_type() -> None:
|
|
59
59
|
{"id_": artifact_version_id, "type": "ServiceArtifact"}
|
60
60
|
)
|
61
61
|
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
62
|
+
if updates:
|
63
|
+
connection.execute(
|
64
|
+
sa.update(artifact_version_table).where(
|
65
|
+
artifact_version_table.c.id == sa.bindparam("id_")
|
66
|
+
),
|
67
|
+
updates,
|
68
|
+
)
|
68
69
|
|
69
70
|
|
70
71
|
def upgrade() -> None:
|
@@ -4172,7 +4172,44 @@ class RestZenStore(BaseZenStore):
|
|
4172
4172
|
)
|
4173
4173
|
|
4174
4174
|
self._session = requests.Session()
|
4175
|
-
|
4175
|
+
# Retries are triggered for idempotent HTTP methods (GET, HEAD, PUT,
|
4176
|
+
# OPTIONS and DELETE) on specific HTTP status codes:
|
4177
|
+
#
|
4178
|
+
# 500: Internal Server Error.
|
4179
|
+
# 502: Bad Gateway.
|
4180
|
+
# 503: Service Unavailable.
|
4181
|
+
# 504: Gateway Timeout.
|
4182
|
+
#
|
4183
|
+
# This also handles connection level errors, if a connection attempt
|
4184
|
+
# fails due to transient issues like:
|
4185
|
+
#
|
4186
|
+
# DNS resolution errors.
|
4187
|
+
# Connection timeouts.
|
4188
|
+
# Network disruptions.
|
4189
|
+
#
|
4190
|
+
# Additional errors retried:
|
4191
|
+
#
|
4192
|
+
# Read Timeouts: If the server does not send a response within
|
4193
|
+
# the timeout period.
|
4194
|
+
# Connection Refused: If the server refuses the connection.
|
4195
|
+
#
|
4196
|
+
retries = Retry(
|
4197
|
+
connect=5,
|
4198
|
+
read=8,
|
4199
|
+
redirect=3,
|
4200
|
+
status=10,
|
4201
|
+
allowed_methods=["HEAD", "GET", "PUT", "DELETE", "OPTIONS"],
|
4202
|
+
status_forcelist=[
|
4203
|
+
408, # Request Timeout
|
4204
|
+
429, # Too Many Requests
|
4205
|
+
500, # Internal Server Error
|
4206
|
+
502, # Bad Gateway
|
4207
|
+
503, # Service Unavailable
|
4208
|
+
504, # Gateway Timeout
|
4209
|
+
],
|
4210
|
+
other=3,
|
4211
|
+
backoff_factor=0.5,
|
4212
|
+
)
|
4176
4213
|
self._session.mount("https://", HTTPAdapter(max_retries=retries))
|
4177
4214
|
self._session.mount("http://", HTTPAdapter(max_retries=retries))
|
4178
4215
|
self._session.verify = self.config.verify_ssl
|
@@ -39,7 +39,10 @@ from zenml.zen_stores.schemas.pipeline_deployment_schemas import (
|
|
39
39
|
from zenml.zen_stores.schemas.pipeline_run_schemas import PipelineRunSchema
|
40
40
|
from zenml.zen_stores.schemas.pipeline_schemas import PipelineSchema
|
41
41
|
from zenml.zen_stores.schemas.workspace_schemas import WorkspaceSchema
|
42
|
-
from zenml.zen_stores.schemas.run_metadata_schemas import
|
42
|
+
from zenml.zen_stores.schemas.run_metadata_schemas import (
|
43
|
+
RunMetadataResourceSchema,
|
44
|
+
RunMetadataSchema,
|
45
|
+
)
|
43
46
|
from zenml.zen_stores.schemas.schedule_schema import ScheduleSchema
|
44
47
|
from zenml.zen_stores.schemas.secret_schemas import SecretSchema
|
45
48
|
from zenml.zen_stores.schemas.service_schemas import ServiceSchema
|
@@ -90,6 +93,7 @@ __all__ = [
|
|
90
93
|
"PipelineDeploymentSchema",
|
91
94
|
"PipelineRunSchema",
|
92
95
|
"PipelineSchema",
|
96
|
+
"RunMetadataResourceSchema",
|
93
97
|
"RunMetadataSchema",
|
94
98
|
"ScheduleSchema",
|
95
99
|
"SecretSchema",
|