zenml-nightly 0.80.2.dev20250414__py3-none-any.whl → 0.80.2.dev20250416__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zenml/VERSION +1 -1
- zenml/artifacts/utils.py +7 -2
- zenml/cli/utils.py +13 -11
- zenml/config/compiler.py +1 -0
- zenml/config/global_config.py +1 -1
- zenml/config/pipeline_configurations.py +1 -0
- zenml/config/pipeline_run_configuration.py +1 -0
- zenml/config/server_config.py +7 -0
- zenml/constants.py +8 -0
- zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +47 -5
- zenml/integrations/gcp/vertex_custom_job_parameters.py +15 -1
- zenml/integrations/kubernetes/flavors/kubernetes_step_operator_flavor.py +12 -0
- zenml/integrations/kubernetes/orchestrators/kube_utils.py +92 -0
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +12 -3
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +11 -65
- zenml/integrations/kubernetes/step_operators/kubernetes_step_operator.py +11 -3
- zenml/logging/step_logging.py +41 -21
- zenml/login/credentials_store.py +31 -0
- zenml/materializers/path_materializer.py +17 -2
- zenml/models/v2/base/base.py +8 -4
- zenml/models/v2/base/filter.py +1 -1
- zenml/models/v2/core/pipeline_run.py +19 -0
- zenml/orchestrators/step_launcher.py +2 -3
- zenml/orchestrators/step_runner.py +2 -2
- zenml/orchestrators/utils.py +2 -5
- zenml/pipelines/pipeline_context.py +1 -0
- zenml/pipelines/pipeline_decorator.py +4 -0
- zenml/pipelines/pipeline_definition.py +83 -22
- zenml/pipelines/run_utils.py +4 -0
- zenml/steps/utils.py +1 -1
- zenml/utils/io_utils.py +23 -0
- zenml/zen_server/auth.py +96 -64
- zenml/zen_server/cloud_utils.py +7 -1
- zenml/zen_server/download_utils.py +123 -0
- zenml/zen_server/jwt.py +0 -14
- zenml/zen_server/rbac/rbac_interface.py +10 -3
- zenml/zen_server/rbac/utils.py +13 -3
- zenml/zen_server/rbac/zenml_cloud_rbac.py +14 -8
- zenml/zen_server/routers/artifact_version_endpoints.py +86 -3
- zenml/zen_server/routers/auth_endpoints.py +5 -36
- zenml/zen_server/routers/pipeline_deployments_endpoints.py +63 -26
- zenml/zen_server/routers/runs_endpoints.py +57 -0
- zenml/zen_server/routers/users_endpoints.py +13 -8
- zenml/zen_server/template_execution/utils.py +3 -3
- zenml/zen_stores/migrations/versions/ff538a321a92_migrate_onboarding_state.py +123 -0
- zenml/zen_stores/rest_zen_store.py +16 -13
- zenml/zen_stores/schemas/pipeline_run_schemas.py +1 -0
- zenml/zen_stores/schemas/server_settings_schemas.py +4 -1
- zenml/zen_stores/sql_zen_store.py +18 -0
- {zenml_nightly-0.80.2.dev20250414.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/METADATA +2 -1
- {zenml_nightly-0.80.2.dev20250414.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/RECORD +54 -52
- {zenml_nightly-0.80.2.dev20250414.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.80.2.dev20250414.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.80.2.dev20250414.dist-info → zenml_nightly-0.80.2.dev20250416.dist-info}/entry_points.txt +0 -0
@@ -13,13 +13,26 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""Endpoint definitions for artifact versions."""
|
15
15
|
|
16
|
+
import os
|
16
17
|
from typing import List, Union
|
17
18
|
from uuid import UUID
|
18
19
|
|
19
20
|
from fastapi import APIRouter, Depends, Security
|
21
|
+
from fastapi.responses import FileResponse
|
22
|
+
from starlette.background import BackgroundTask
|
20
23
|
|
21
|
-
from zenml.artifacts.utils import
|
22
|
-
|
24
|
+
from zenml.artifacts.utils import (
|
25
|
+
load_artifact_visualization,
|
26
|
+
)
|
27
|
+
from zenml.constants import (
|
28
|
+
API,
|
29
|
+
ARTIFACT_VERSIONS,
|
30
|
+
BATCH,
|
31
|
+
DATA,
|
32
|
+
DOWNLOAD_TOKEN,
|
33
|
+
VERSION_1,
|
34
|
+
VISUALIZE,
|
35
|
+
)
|
23
36
|
from zenml.models import (
|
24
37
|
ArtifactVersionFilter,
|
25
38
|
ArtifactVersionRequest,
|
@@ -28,7 +41,16 @@ from zenml.models import (
|
|
28
41
|
LoadedVisualization,
|
29
42
|
Page,
|
30
43
|
)
|
31
|
-
from zenml.zen_server.auth import
|
44
|
+
from zenml.zen_server.auth import (
|
45
|
+
AuthContext,
|
46
|
+
authorize,
|
47
|
+
generate_artifact_download_token,
|
48
|
+
verify_artifact_download_token,
|
49
|
+
)
|
50
|
+
from zenml.zen_server.download_utils import (
|
51
|
+
create_artifact_archive,
|
52
|
+
verify_artifact_is_downloadable,
|
53
|
+
)
|
32
54
|
from zenml.zen_server.exceptions import error_response
|
33
55
|
from zenml.zen_server.rbac.endpoint_utils import (
|
34
56
|
verify_permissions_and_batch_create_entity,
|
@@ -275,3 +297,64 @@ def get_artifact_visualization(
|
|
275
297
|
return load_artifact_visualization(
|
276
298
|
artifact=artifact, index=index, zen_store=store, encode_image=True
|
277
299
|
)
|
300
|
+
|
301
|
+
|
302
|
+
@artifact_version_router.get(
|
303
|
+
"/{artifact_version_id}" + DOWNLOAD_TOKEN,
|
304
|
+
responses={401: error_response, 404: error_response, 422: error_response},
|
305
|
+
)
|
306
|
+
@handle_exceptions
|
307
|
+
def get_artifact_download_token(
|
308
|
+
artifact_version_id: UUID,
|
309
|
+
_: AuthContext = Security(authorize),
|
310
|
+
) -> str:
|
311
|
+
"""Get a download token for the artifact data.
|
312
|
+
|
313
|
+
Args:
|
314
|
+
artifact_version_id: ID of the artifact version for which to get the data.
|
315
|
+
|
316
|
+
Returns:
|
317
|
+
The download token for the artifact data.
|
318
|
+
"""
|
319
|
+
artifact = verify_permissions_and_get_entity(
|
320
|
+
id=artifact_version_id, get_method=zen_store().get_artifact_version
|
321
|
+
)
|
322
|
+
verify_artifact_is_downloadable(artifact)
|
323
|
+
|
324
|
+
# The artifact download is handled in a separate tab by the browser. In this
|
325
|
+
# tab, we do not have the ability to set any headers and therefore cannot
|
326
|
+
# include the CSRF token in the request. To handle this, we instead generate
|
327
|
+
# a JWT token in this endpoint (which includes CSRF and RBAC checks) and
|
328
|
+
# then use that token to download the artifact data in a separate endpoint
|
329
|
+
# which only verifies this short-lived token.
|
330
|
+
return generate_artifact_download_token(artifact_version_id)
|
331
|
+
|
332
|
+
|
333
|
+
@artifact_version_router.get(
|
334
|
+
"/{artifact_version_id}" + DATA,
|
335
|
+
responses={401: error_response, 404: error_response, 422: error_response},
|
336
|
+
)
|
337
|
+
@handle_exceptions
|
338
|
+
def download_artifact_data(
|
339
|
+
artifact_version_id: UUID, token: str
|
340
|
+
) -> FileResponse:
|
341
|
+
"""Download the artifact data.
|
342
|
+
|
343
|
+
Args:
|
344
|
+
artifact_version_id: ID of the artifact version for which to get the data.
|
345
|
+
token: The token to authenticate the artifact download.
|
346
|
+
|
347
|
+
Returns:
|
348
|
+
The artifact data.
|
349
|
+
"""
|
350
|
+
verify_artifact_download_token(token, artifact_version_id)
|
351
|
+
|
352
|
+
artifact = zen_store().get_artifact_version(artifact_version_id)
|
353
|
+
archive_path = create_artifact_archive(artifact)
|
354
|
+
|
355
|
+
return FileResponse(
|
356
|
+
archive_path,
|
357
|
+
media_type="application/gzip",
|
358
|
+
filename=f"{artifact.name}-{artifact.version}.tar.gz",
|
359
|
+
background=BackgroundTask(os.remove, archive_path),
|
360
|
+
)
|
@@ -477,7 +477,6 @@ def api_token(
|
|
477
477
|
expires_in: Optional[int] = None,
|
478
478
|
schedule_id: Optional[UUID] = None,
|
479
479
|
pipeline_run_id: Optional[UUID] = None,
|
480
|
-
step_run_id: Optional[UUID] = None,
|
481
480
|
auth_context: AuthContext = Security(authorize),
|
482
481
|
) -> str:
|
483
482
|
"""Generate an API token for the current user.
|
@@ -506,7 +505,6 @@ def api_token(
|
|
506
505
|
schedule_id: The ID of the schedule to scope the workload API token to.
|
507
506
|
pipeline_run_id: The ID of the pipeline run to scope the workload API
|
508
507
|
token to.
|
509
|
-
step_run_id: The ID of the step run to scope the workload API token to.
|
510
508
|
auth_context: The authentication context.
|
511
509
|
|
512
510
|
Returns:
|
@@ -522,10 +520,10 @@ def api_token(
|
|
522
520
|
raise AuthorizationException("Not authenticated.")
|
523
521
|
|
524
522
|
if token_type == APITokenType.GENERIC:
|
525
|
-
if schedule_id or pipeline_run_id
|
523
|
+
if schedule_id or pipeline_run_id:
|
526
524
|
raise ValueError(
|
527
|
-
"Generic API tokens cannot be scoped to a schedule
|
528
|
-
"run
|
525
|
+
"Generic API tokens cannot be scoped to a schedule or pipeline "
|
526
|
+
"run."
|
529
527
|
)
|
530
528
|
|
531
529
|
config = server_config()
|
@@ -549,12 +547,10 @@ def api_token(
|
|
549
547
|
|
550
548
|
schedule_id = schedule_id or token.schedule_id
|
551
549
|
pipeline_run_id = pipeline_run_id or token.pipeline_run_id
|
552
|
-
step_run_id = step_run_id or token.step_run_id
|
553
550
|
|
554
|
-
if not pipeline_run_id and not schedule_id
|
551
|
+
if not pipeline_run_id and not schedule_id:
|
555
552
|
raise ValueError(
|
556
|
-
"Workload API tokens must be scoped to a schedule
|
557
|
-
"or step run."
|
553
|
+
"Workload API tokens must be scoped to a schedule or pipeline run."
|
558
554
|
)
|
559
555
|
|
560
556
|
if schedule_id and token.schedule_id and schedule_id != token.schedule_id:
|
@@ -575,13 +571,6 @@ def api_token(
|
|
575
571
|
f"pipeline run {token.pipeline_run_id}."
|
576
572
|
)
|
577
573
|
|
578
|
-
if step_run_id and token.step_run_id and step_run_id != token.step_run_id:
|
579
|
-
raise AuthorizationException(
|
580
|
-
f"Unable to scope API token to step run {step_run_id}. The "
|
581
|
-
f"token used to authorize this request is already scoped to "
|
582
|
-
f"step run {token.step_run_id}."
|
583
|
-
)
|
584
|
-
|
585
574
|
project_id: Optional[UUID] = None
|
586
575
|
|
587
576
|
if schedule_id:
|
@@ -623,25 +612,6 @@ def api_token(
|
|
623
612
|
"for security reasons."
|
624
613
|
)
|
625
614
|
|
626
|
-
if step_run_id:
|
627
|
-
# The step run must exist and the step must not be concluded
|
628
|
-
try:
|
629
|
-
step_run = zen_store().get_run_step(step_run_id, hydrate=False)
|
630
|
-
except KeyError:
|
631
|
-
raise ValueError(
|
632
|
-
f"Step run {step_run_id} does not exist and API tokens cannot "
|
633
|
-
"be generated for non-existent step runs for security reasons."
|
634
|
-
)
|
635
|
-
|
636
|
-
project_id = step_run.project.id
|
637
|
-
|
638
|
-
if step_run.status.is_finished:
|
639
|
-
raise ValueError(
|
640
|
-
f"The execution of step run {step_run_id} has already "
|
641
|
-
"concluded and API tokens can no longer be generated for it "
|
642
|
-
"for security reasons."
|
643
|
-
)
|
644
|
-
|
645
615
|
assert project_id is not None
|
646
616
|
verify_permission(
|
647
617
|
resource_type=ResourceType.PIPELINE_RUN,
|
@@ -656,7 +626,6 @@ def api_token(
|
|
656
626
|
device=auth_context.device,
|
657
627
|
schedule_id=schedule_id,
|
658
628
|
pipeline_run_id=pipeline_run_id,
|
659
|
-
step_run_id=step_run_id,
|
660
629
|
# Don't include the access token as a cookie in the response
|
661
630
|
response=None,
|
662
631
|
# Never expire the token
|
@@ -19,11 +19,13 @@ from uuid import UUID
|
|
19
19
|
from fastapi import APIRouter, Depends, Security
|
20
20
|
|
21
21
|
from zenml.constants import API, PIPELINE_DEPLOYMENTS, VERSION_1
|
22
|
+
from zenml.logging.step_logging import fetch_logs
|
22
23
|
from zenml.models import (
|
23
24
|
Page,
|
24
25
|
PipelineDeploymentFilter,
|
25
26
|
PipelineDeploymentRequest,
|
26
27
|
PipelineDeploymentResponse,
|
28
|
+
PipelineRunFilter,
|
27
29
|
)
|
28
30
|
from zenml.zen_server.auth import AuthContext, authorize
|
29
31
|
from zenml.zen_server.exceptions import error_response
|
@@ -179,33 +181,68 @@ def delete_deployment(
|
|
179
181
|
)
|
180
182
|
|
181
183
|
|
182
|
-
|
184
|
+
@router.get(
|
185
|
+
"/{deployment_id}/logs",
|
186
|
+
responses={
|
187
|
+
401: error_response,
|
188
|
+
404: error_response,
|
189
|
+
422: error_response,
|
190
|
+
},
|
191
|
+
)
|
192
|
+
@handle_exceptions
|
193
|
+
def deployment_logs(
|
194
|
+
deployment_id: UUID,
|
195
|
+
offset: int = 0,
|
196
|
+
length: int = 1024 * 1024 * 16, # Default to 16MiB of data
|
197
|
+
_: AuthContext = Security(authorize),
|
198
|
+
) -> str:
|
199
|
+
"""Get deployment logs.
|
200
|
+
|
201
|
+
Args:
|
202
|
+
deployment_id: ID of the deployment.
|
203
|
+
offset: The offset from which to start reading.
|
204
|
+
length: The amount of bytes that should be read.
|
205
|
+
|
206
|
+
Returns:
|
207
|
+
The deployment logs.
|
208
|
+
|
209
|
+
Raises:
|
210
|
+
KeyError: If no logs are available for the deployment.
|
211
|
+
"""
|
212
|
+
store = zen_store()
|
183
213
|
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
404: error_response,
|
189
|
-
422: error_response,
|
190
|
-
},
|
214
|
+
deployment = verify_permissions_and_get_entity(
|
215
|
+
id=deployment_id,
|
216
|
+
get_method=store.get_deployment,
|
217
|
+
hydrate=True,
|
191
218
|
)
|
192
|
-
@handle_exceptions
|
193
|
-
def deployment_logs(
|
194
|
-
deployment_id: UUID,
|
195
|
-
_: AuthContext = Security(authorize),
|
196
|
-
) -> str:
|
197
|
-
"""Get deployment logs.
|
198
|
-
|
199
|
-
Args:
|
200
|
-
deployment_id: ID of the deployment.
|
201
|
-
|
202
|
-
Returns:
|
203
|
-
The deployment logs.
|
204
|
-
"""
|
205
|
-
deployment = verify_permissions_and_get_entity(
|
206
|
-
id=deployment_id,
|
207
|
-
get_method=zen_store().get_deployment,
|
208
|
-
hydrate=True,
|
209
|
-
)
|
210
219
|
|
220
|
+
if deployment.template_id and server_config().workload_manager_enabled:
|
211
221
|
return workload_manager().get_logs(workload_id=deployment.id)
|
222
|
+
|
223
|
+
# Get the last pipeline run for this deployment
|
224
|
+
pipeline_runs = store.list_runs(
|
225
|
+
runs_filter_model=PipelineRunFilter(
|
226
|
+
project=deployment.project.id,
|
227
|
+
sort_by="asc:created",
|
228
|
+
size=1,
|
229
|
+
deployment_id=deployment.id,
|
230
|
+
)
|
231
|
+
)
|
232
|
+
|
233
|
+
if len(pipeline_runs.items) == 0:
|
234
|
+
return ""
|
235
|
+
|
236
|
+
run = pipeline_runs.items[0]
|
237
|
+
|
238
|
+
logs = run.logs
|
239
|
+
if logs is None:
|
240
|
+
raise KeyError("No logs available for this deployment")
|
241
|
+
|
242
|
+
return fetch_logs(
|
243
|
+
zen_store=store,
|
244
|
+
artifact_store_id=logs.artifact_store_id,
|
245
|
+
logs_uri=logs.uri,
|
246
|
+
offset=offset,
|
247
|
+
length=length,
|
248
|
+
)
|
@@ -29,6 +29,7 @@ from zenml.constants import (
|
|
29
29
|
)
|
30
30
|
from zenml.enums import ExecutionStatus, StackComponentType
|
31
31
|
from zenml.logger import get_logger
|
32
|
+
from zenml.logging.step_logging import fetch_logs
|
32
33
|
from zenml.models import (
|
33
34
|
Page,
|
34
35
|
PipelineRunFilter,
|
@@ -55,6 +56,8 @@ from zenml.zen_server.routers.projects_endpoints import workspace_router
|
|
55
56
|
from zenml.zen_server.utils import (
|
56
57
|
handle_exceptions,
|
57
58
|
make_dependable,
|
59
|
+
server_config,
|
60
|
+
workload_manager,
|
58
61
|
zen_store,
|
59
62
|
)
|
60
63
|
|
@@ -375,3 +378,57 @@ def refresh_run_status(
|
|
375
378
|
f"The stack, the run '{run.id}' was executed on, is deleted."
|
376
379
|
)
|
377
380
|
run.refresh_run_status()
|
381
|
+
|
382
|
+
|
383
|
+
@router.get(
|
384
|
+
"/{run_id}/logs",
|
385
|
+
responses={
|
386
|
+
401: error_response,
|
387
|
+
404: error_response,
|
388
|
+
422: error_response,
|
389
|
+
},
|
390
|
+
)
|
391
|
+
@handle_exceptions
|
392
|
+
def run_logs(
|
393
|
+
run_id: UUID,
|
394
|
+
offset: int = 0,
|
395
|
+
length: int = 1024 * 1024 * 16, # Default to 16MiB of data
|
396
|
+
_: AuthContext = Security(authorize),
|
397
|
+
) -> str:
|
398
|
+
"""Get pipeline run logs.
|
399
|
+
|
400
|
+
Args:
|
401
|
+
run_id: ID of the pipeline run.
|
402
|
+
offset: The offset from which to start reading.
|
403
|
+
length: The amount of bytes that should be read.
|
404
|
+
|
405
|
+
Returns:
|
406
|
+
The pipeline run logs.
|
407
|
+
|
408
|
+
Raises:
|
409
|
+
KeyError: If no logs are available for the pipeline run.
|
410
|
+
"""
|
411
|
+
store = zen_store()
|
412
|
+
|
413
|
+
run = verify_permissions_and_get_entity(
|
414
|
+
id=run_id,
|
415
|
+
get_method=store.get_run,
|
416
|
+
hydrate=True,
|
417
|
+
)
|
418
|
+
|
419
|
+
if run.deployment_id:
|
420
|
+
deployment = store.get_deployment(run.deployment_id)
|
421
|
+
if deployment.template_id and server_config().workload_manager_enabled:
|
422
|
+
return workload_manager().get_logs(workload_id=deployment.id)
|
423
|
+
|
424
|
+
logs = run.logs
|
425
|
+
if logs is None:
|
426
|
+
raise KeyError("No logs available for this pipeline run")
|
427
|
+
|
428
|
+
return fetch_logs(
|
429
|
+
zen_store=store,
|
430
|
+
artifact_store_id=logs.artifact_store_id,
|
431
|
+
logs_uri=logs.uri,
|
432
|
+
offset=offset,
|
433
|
+
length=length,
|
434
|
+
)
|
@@ -698,7 +698,7 @@ if server_config().auth_scheme != AuthScheme.EXTERNAL:
|
|
698
698
|
if server_config().rbac_enabled:
|
699
699
|
|
700
700
|
@router.post(
|
701
|
-
"/
|
701
|
+
"/resource_membership",
|
702
702
|
responses={
|
703
703
|
401: error_response,
|
704
704
|
404: error_response,
|
@@ -707,16 +707,16 @@ if server_config().rbac_enabled:
|
|
707
707
|
)
|
708
708
|
@handle_exceptions
|
709
709
|
def update_user_resource_membership(
|
710
|
-
user_name_or_id: Union[str, UUID],
|
711
710
|
resource_type: str,
|
712
711
|
resource_id: UUID,
|
713
712
|
actions: List[str],
|
713
|
+
user_id: Optional[str] = None,
|
714
|
+
team_id: Optional[str] = None,
|
714
715
|
auth_context: AuthContext = Security(authorize),
|
715
716
|
) -> None:
|
716
717
|
"""Updates resource memberships of a user.
|
717
718
|
|
718
719
|
Args:
|
719
|
-
user_name_or_id: Name or ID of the user.
|
720
720
|
resource_type: Type of the resource for which to update the
|
721
721
|
membership.
|
722
722
|
resource_id: ID of the resource for which to update the membership.
|
@@ -724,16 +724,19 @@ if server_config().rbac_enabled:
|
|
724
724
|
the resource. If the user currently has permissions to perform
|
725
725
|
actions which are not passed in this list, the permissions will
|
726
726
|
be removed.
|
727
|
+
user_id: ID of the user for which to update the membership.
|
728
|
+
team_id: ID of the team for which to update the membership.
|
727
729
|
auth_context: Authentication context.
|
728
730
|
|
729
731
|
Raises:
|
730
732
|
ValueError: If a user tries to update their own membership.
|
731
733
|
KeyError: If no resource with the given type and ID exists.
|
732
734
|
"""
|
733
|
-
|
734
|
-
|
735
|
-
|
736
|
-
|
735
|
+
if (
|
736
|
+
user_id
|
737
|
+
and auth_context.user.external_user_id
|
738
|
+
and user_id == str(auth_context.user.external_user_id)
|
739
|
+
):
|
737
740
|
raise ValueError(
|
738
741
|
"Not allowed to call endpoint with the authenticated user."
|
739
742
|
)
|
@@ -765,7 +768,9 @@ if server_config().rbac_enabled:
|
|
765
768
|
verify_permission_for_model(model=model, action=Action(action))
|
766
769
|
|
767
770
|
update_resource_membership(
|
768
|
-
|
771
|
+
sharing_user=auth_context.user,
|
769
772
|
resource=resource,
|
770
773
|
actions=[Action(action) for action in actions],
|
774
|
+
user_id=user_id,
|
775
|
+
team_id=team_id,
|
771
776
|
)
|
@@ -373,7 +373,7 @@ def deployment_request_from_template(
|
|
373
373
|
)
|
374
374
|
|
375
375
|
step_config_dict_base = pipeline_configuration.model_dump(
|
376
|
-
exclude={"name", "parameters", "tags"}
|
376
|
+
exclude={"name", "parameters", "tags", "enable_pipeline_logs"}
|
377
377
|
)
|
378
378
|
steps = {}
|
379
379
|
for invocation_id, step in deployment.step_configurations.items():
|
@@ -411,14 +411,14 @@ def deployment_request_from_template(
|
|
411
411
|
if unknown_parameters:
|
412
412
|
raise ValueError(
|
413
413
|
"Run configuration contains the following unknown "
|
414
|
-
f"parameters for step {
|
414
|
+
f"parameters for step {invocation_id}: {unknown_parameters}."
|
415
415
|
)
|
416
416
|
|
417
417
|
missing_parameters = required_parameters - configured_parameters
|
418
418
|
if missing_parameters:
|
419
419
|
raise ValueError(
|
420
420
|
"Run configuration is missing the following required "
|
421
|
-
f"parameters for step {
|
421
|
+
f"parameters for step {invocation_id}: {missing_parameters}."
|
422
422
|
)
|
423
423
|
|
424
424
|
step_config = StepConfiguration.model_validate(step_config_dict)
|
@@ -0,0 +1,123 @@
|
|
1
|
+
"""Migrate onboarding state [ff538a321a92].
|
2
|
+
|
3
|
+
Revision ID: ff538a321a92
|
4
|
+
Revises: 0.80.2
|
5
|
+
Create Date: 2025-04-11 09:30:03.324310
|
6
|
+
|
7
|
+
"""
|
8
|
+
|
9
|
+
import json
|
10
|
+
|
11
|
+
import sqlalchemy as sa
|
12
|
+
from alembic import op
|
13
|
+
|
14
|
+
# revision identifiers, used by Alembic.
|
15
|
+
revision = "ff538a321a92"
|
16
|
+
down_revision = "0.80.2"
|
17
|
+
branch_labels = None
|
18
|
+
depends_on = None
|
19
|
+
|
20
|
+
|
21
|
+
def upgrade() -> None:
|
22
|
+
"""Upgrade database schema and/or data, creating a new revision."""
|
23
|
+
with op.batch_alter_table("server_settings", schema=None) as batch_op:
|
24
|
+
batch_op.alter_column(
|
25
|
+
"onboarding_state",
|
26
|
+
existing_type=sa.VARCHAR(),
|
27
|
+
type_=sa.TEXT(),
|
28
|
+
existing_nullable=True,
|
29
|
+
)
|
30
|
+
|
31
|
+
connection = op.get_bind()
|
32
|
+
|
33
|
+
meta = sa.MetaData()
|
34
|
+
meta.reflect(only=("server_settings",), bind=connection)
|
35
|
+
|
36
|
+
server_settings_table = sa.Table("server_settings", meta)
|
37
|
+
|
38
|
+
existing_onboarding_state = connection.execute(
|
39
|
+
sa.select(server_settings_table.c.onboarding_state)
|
40
|
+
).scalar_one_or_none()
|
41
|
+
|
42
|
+
if not existing_onboarding_state:
|
43
|
+
return
|
44
|
+
|
45
|
+
state = json.loads(existing_onboarding_state)
|
46
|
+
|
47
|
+
meta = sa.MetaData()
|
48
|
+
meta.reflect(
|
49
|
+
only=(
|
50
|
+
"pipeline_run",
|
51
|
+
"stack_component",
|
52
|
+
"stack",
|
53
|
+
"stack_composition",
|
54
|
+
"pipeline_deployment",
|
55
|
+
),
|
56
|
+
bind=connection,
|
57
|
+
)
|
58
|
+
|
59
|
+
pipeline_run_table = sa.Table("pipeline_run", meta)
|
60
|
+
stack_component_table = sa.Table("stack_component", meta)
|
61
|
+
stack_table = sa.Table("stack", meta)
|
62
|
+
stack_composition_table = sa.Table("stack_composition", meta)
|
63
|
+
pipeline_deployment_table = sa.Table("pipeline_deployment", meta)
|
64
|
+
|
65
|
+
stack_with_remote_artifact_store_count = connection.execute(
|
66
|
+
sa.select(sa.func.count(stack_table.c.id))
|
67
|
+
.where(stack_composition_table.c.stack_id == stack_table.c.id)
|
68
|
+
.where(
|
69
|
+
stack_composition_table.c.component_id
|
70
|
+
== stack_component_table.c.id
|
71
|
+
)
|
72
|
+
.where(stack_component_table.c.flavor != "local")
|
73
|
+
.where(stack_component_table.c.type == "artifact_store")
|
74
|
+
).scalar()
|
75
|
+
if (
|
76
|
+
stack_with_remote_artifact_store_count
|
77
|
+
and stack_with_remote_artifact_store_count > 0
|
78
|
+
):
|
79
|
+
state.append("stack_with_remote_artifact_store_created")
|
80
|
+
|
81
|
+
pipeline_run_with_remote_artifact_store_count = connection.execute(
|
82
|
+
sa.select(sa.func.count(pipeline_run_table.c.id))
|
83
|
+
.where(
|
84
|
+
pipeline_run_table.c.deployment_id
|
85
|
+
== pipeline_deployment_table.c.id
|
86
|
+
)
|
87
|
+
.where(pipeline_deployment_table.c.stack_id == stack_table.c.id)
|
88
|
+
.where(stack_composition_table.c.stack_id == stack_table.c.id)
|
89
|
+
.where(
|
90
|
+
stack_composition_table.c.component_id
|
91
|
+
== stack_component_table.c.id
|
92
|
+
)
|
93
|
+
.where(stack_component_table.c.flavor != "local")
|
94
|
+
.where(stack_component_table.c.type == "artifact_store")
|
95
|
+
).scalar()
|
96
|
+
if (
|
97
|
+
pipeline_run_with_remote_artifact_store_count
|
98
|
+
and pipeline_run_with_remote_artifact_store_count > 0
|
99
|
+
):
|
100
|
+
state.append("pipeline_run_with_remote_artifact_store")
|
101
|
+
state.append("production_setup_completed")
|
102
|
+
|
103
|
+
# Remove duplicate keys
|
104
|
+
state = list(set(state))
|
105
|
+
|
106
|
+
connection.execute(
|
107
|
+
sa.update(server_settings_table).values(
|
108
|
+
onboarding_state=json.dumps(state)
|
109
|
+
)
|
110
|
+
)
|
111
|
+
|
112
|
+
|
113
|
+
def downgrade() -> None:
|
114
|
+
"""Downgrade database schema and/or data back to the previous revision."""
|
115
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
116
|
+
with op.batch_alter_table("server_settings", schema=None) as batch_op:
|
117
|
+
batch_op.alter_column(
|
118
|
+
"onboarding_state",
|
119
|
+
existing_type=sa.TEXT(),
|
120
|
+
type_=sa.VARCHAR(),
|
121
|
+
existing_nullable=True,
|
122
|
+
)
|
123
|
+
# ### end Alembic commands ###
|
@@ -4074,15 +4074,6 @@ class RestZenStore(BaseZenStore):
|
|
4074
4074
|
# Check if username and password are configured
|
4075
4075
|
username, password = credentials_store.get_password(self.url)
|
4076
4076
|
|
4077
|
-
api_key_hint = (
|
4078
|
-
"\nHint: If you're getting this error in an automated, "
|
4079
|
-
"non-interactive workload like a pipeline run or a CI/CD job, "
|
4080
|
-
"you should use a service account API key to authenticate to "
|
4081
|
-
"the server instead of temporary CLI login credentials. For "
|
4082
|
-
"more information, see "
|
4083
|
-
"https://docs.zenml.io/how-to/project-setup-and-management/connecting-to-zenml/connect-with-a-service-account"
|
4084
|
-
)
|
4085
|
-
|
4086
4077
|
if api_key is not None:
|
4087
4078
|
# An API key is configured. Use it as a password to
|
4088
4079
|
# authenticate.
|
@@ -4119,14 +4110,12 @@ class RestZenStore(BaseZenStore):
|
|
4119
4110
|
"You need to be logged in to ZenML Pro in order to "
|
4120
4111
|
f"access the ZenML Pro server '{self.url}'. Please run "
|
4121
4112
|
"'zenml login' to log in or choose a different server."
|
4122
|
-
+ api_key_hint
|
4123
4113
|
)
|
4124
4114
|
|
4125
4115
|
elif pro_token.expired:
|
4126
4116
|
raise CredentialsNotValid(
|
4127
4117
|
"Your ZenML Pro login session has expired. "
|
4128
4118
|
"Please log in again using 'zenml login'."
|
4129
|
-
+ api_key_hint
|
4130
4119
|
)
|
4131
4120
|
|
4132
4121
|
data = {
|
@@ -4140,13 +4129,12 @@ class RestZenStore(BaseZenStore):
|
|
4140
4129
|
raise CredentialsNotValid(
|
4141
4130
|
"No valid credentials found. Please run 'zenml login "
|
4142
4131
|
f"--url {self.url}' to connect to the current server."
|
4143
|
-
+ api_key_hint
|
4144
4132
|
)
|
4145
4133
|
elif token.expired:
|
4146
4134
|
raise CredentialsNotValid(
|
4147
4135
|
"Your authentication to the current server has expired. "
|
4148
4136
|
"Please log in again using 'zenml login --url "
|
4149
|
-
f"{self.url}'."
|
4137
|
+
f"{self.url}'."
|
4150
4138
|
)
|
4151
4139
|
|
4152
4140
|
response = self._handle_response(
|
@@ -4405,6 +4393,7 @@ class RestZenStore(BaseZenStore):
|
|
4405
4393
|
# explicitly indicates that the credentials are not valid and
|
4406
4394
|
# they can be thrown away or when the request is not
|
4407
4395
|
# authenticated at all.
|
4396
|
+
credentials_store = get_credentials_store()
|
4408
4397
|
|
4409
4398
|
if self._api_token is None:
|
4410
4399
|
# The last request was not authenticated with an API
|
@@ -4416,6 +4405,20 @@ class RestZenStore(BaseZenStore):
|
|
4416
4405
|
"Re-authenticating and retrying..."
|
4417
4406
|
)
|
4418
4407
|
self.authenticate()
|
4408
|
+
elif not credentials_store.can_login(self.url):
|
4409
|
+
# The request failed either because we're not
|
4410
|
+
# authenticated or our current credentials are not valid
|
4411
|
+
# anymore.
|
4412
|
+
logger.error(
|
4413
|
+
"The current token is no longer valid, and "
|
4414
|
+
"it is not possible to generate a new token using the "
|
4415
|
+
"configured credentials. Please run "
|
4416
|
+
f"`zenml login --url {self.url}` to re-authenticate to "
|
4417
|
+
"the server or authenticate using an API key. See "
|
4418
|
+
"https://docs.zenml.io/how-to/project-setup-and-management/connecting-to-zenml/connect-with-a-service-account "
|
4419
|
+
"for more information."
|
4420
|
+
)
|
4421
|
+
raise e
|
4419
4422
|
elif not re_authenticated:
|
4420
4423
|
# The last request was authenticated with an API token
|
4421
4424
|
# that was rejected by the server. We attempt a
|
@@ -412,6 +412,7 @@ class PipelineRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
412
412
|
resources = PipelineRunResponseResources(
|
413
413
|
model_version=model_version,
|
414
414
|
tags=[tag.to_model() for tag in self.tags],
|
415
|
+
logs=self.logs.to_model() if self.logs else None,
|
415
416
|
)
|
416
417
|
|
417
418
|
return PipelineRunResponse(
|