zenml-nightly 0.82.0.dev20250512__py3-none-any.whl → 0.82.0.dev20250514__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zenml/VERSION +1 -1
- zenml/cli/login.py +1 -1
- zenml/config/build_configuration.py +9 -0
- zenml/config/docker_settings.py +97 -16
- zenml/constants.py +1 -0
- zenml/integrations/kubernetes/flavors/kubernetes_orchestrator_flavor.py +12 -1
- zenml/integrations/kubernetes/orchestrators/kube_utils.py +24 -1
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator.py +3 -0
- zenml/integrations/kubernetes/orchestrators/kubernetes_orchestrator_entrypoint.py +21 -5
- zenml/integrations/kubernetes/orchestrators/manifest_utils.py +13 -0
- zenml/orchestrators/step_launcher.py +4 -0
- zenml/orchestrators/step_run_utils.py +30 -1
- zenml/pipelines/build_utils.py +8 -0
- zenml/utils/pipeline_docker_image_builder.py +133 -7
- zenml/utils/tag_utils.py +3 -3
- zenml/zen_server/feature_gate/endpoint_utils.py +9 -12
- zenml/zen_server/feature_gate/feature_gate_interface.py +4 -6
- zenml/zen_server/feature_gate/zenml_cloud_feature_gate.py +8 -9
- zenml/zen_server/rbac/endpoint_utils.py +1 -1
- zenml/zen_server/rbac/rbac_sql_zen_store.py +2 -2
- zenml/zen_server/routers/run_templates_endpoints.py +10 -1
- zenml/zen_server/template_execution/utils.py +12 -1
- zenml/zen_stores/migrations/alembic.py +37 -13
- zenml/zen_stores/rest_zen_store.py +6 -2
- zenml/zen_stores/sql_zen_store.py +3 -6
- {zenml_nightly-0.82.0.dev20250512.dist-info → zenml_nightly-0.82.0.dev20250514.dist-info}/METADATA +2 -2
- {zenml_nightly-0.82.0.dev20250512.dist-info → zenml_nightly-0.82.0.dev20250514.dist-info}/RECORD +30 -30
- {zenml_nightly-0.82.0.dev20250512.dist-info → zenml_nightly-0.82.0.dev20250514.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.82.0.dev20250512.dist-info → zenml_nightly-0.82.0.dev20250514.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.82.0.dev20250512.dist-info → zenml_nightly-0.82.0.dev20250514.dist-info}/entry_points.txt +0 -0
@@ -68,6 +68,11 @@ PIP_DEFAULT_ARGS = {
|
|
68
68
|
}
|
69
69
|
UV_DEFAULT_ARGS = {"no-cache-dir": None}
|
70
70
|
|
71
|
+
DEFAULT_PYPROJECT_EXPORT_COMMANDS = [
|
72
|
+
"uv export --format=requirements-txt --directory={directory} --no-hashes --no-emit-project",
|
73
|
+
"poetry export --format=requirements.txt --directory={directory} --without-hashes",
|
74
|
+
]
|
75
|
+
|
71
76
|
|
72
77
|
class PipelineDockerImageBuilder:
|
73
78
|
"""Builds Docker images to run a ZenML pipeline."""
|
@@ -156,10 +161,11 @@ class PipelineDockerImageBuilder:
|
|
156
161
|
requires_zenml_build = any(
|
157
162
|
[
|
158
163
|
docker_settings.requirements,
|
164
|
+
docker_settings.pyproject_path,
|
159
165
|
docker_settings.required_integrations,
|
160
|
-
docker_settings.required_hub_plugins,
|
161
166
|
docker_settings.replicate_local_python_environment,
|
162
167
|
docker_settings.install_stack_requirements,
|
168
|
+
docker_settings.local_project_install_command,
|
163
169
|
docker_settings.apt_packages,
|
164
170
|
docker_settings.environment,
|
165
171
|
include_files,
|
@@ -434,8 +440,56 @@ class PipelineDockerImageBuilder:
|
|
434
440
|
- Packages installed in the local Python environment
|
435
441
|
- Requirements defined by stack integrations
|
436
442
|
- Requirements defined by user integrations
|
443
|
+
- Requirements exported from a pyproject.toml
|
437
444
|
- User-defined requirements
|
438
445
|
"""
|
446
|
+
implicit_requirements = False
|
447
|
+
pyproject_path = docker_settings.pyproject_path
|
448
|
+
requirements = docker_settings.requirements
|
449
|
+
|
450
|
+
if not any(
|
451
|
+
[
|
452
|
+
docker_settings.replicate_local_python_environment,
|
453
|
+
requirements,
|
454
|
+
pyproject_path,
|
455
|
+
]
|
456
|
+
):
|
457
|
+
root = source_utils.get_source_root()
|
458
|
+
requirements_path = os.path.join(root, "requirements.txt")
|
459
|
+
pyproject_file_path = os.path.join(root, "pyproject.toml")
|
460
|
+
|
461
|
+
if os.path.exists(requirements_path):
|
462
|
+
implicit_requirements = True
|
463
|
+
requirements = requirements_path
|
464
|
+
elif os.path.exists(pyproject_file_path):
|
465
|
+
implicit_requirements = True
|
466
|
+
pyproject_path = pyproject_file_path
|
467
|
+
|
468
|
+
if (
|
469
|
+
implicit_requirements
|
470
|
+
and docker_settings.disable_automatic_requirements_detection
|
471
|
+
):
|
472
|
+
# TODO: This is only temporary to log a warning notifying users
|
473
|
+
# that we will soon switch the default behavior to
|
474
|
+
# `disable_automatic_requirements_detection=False`. Remove and
|
475
|
+
# adjust the logic once we've made this change.
|
476
|
+
if log:
|
477
|
+
logger.warning(
|
478
|
+
"Detected `requirements.txt` or `pyproject.toml` files in "
|
479
|
+
"the source root. In future versions of ZenML, these will "
|
480
|
+
"be automatically picked up and installed in Docker images "
|
481
|
+
"by default. To disable this behavior and keep the "
|
482
|
+
"current behavior, set "
|
483
|
+
"`DockerSettings.disable_automatic_requirements_detection` "
|
484
|
+
"to `True`. If you want to enable this behavior right away, "
|
485
|
+
"you can do so by setting "
|
486
|
+
"`DockerSettings.disable_automatic_requirements_detection` "
|
487
|
+
"to `False`."
|
488
|
+
)
|
489
|
+
implicit_requirements = False
|
490
|
+
requirements = None
|
491
|
+
pyproject_path = None
|
492
|
+
|
439
493
|
requirements_files: List[Tuple[str, str, List[str]]] = []
|
440
494
|
|
441
495
|
# Generate requirements file for the local environment if configured
|
@@ -447,6 +501,10 @@ class PipelineDockerImageBuilder:
|
|
447
501
|
command = (
|
448
502
|
docker_settings.replicate_local_python_environment.command
|
449
503
|
)
|
504
|
+
elif isinstance(
|
505
|
+
docker_settings.replicate_local_python_environment, bool
|
506
|
+
):
|
507
|
+
command = PythonEnvironmentExportMethod.PIP_FREEZE.command
|
450
508
|
else:
|
451
509
|
command = " ".join(
|
452
510
|
docker_settings.replicate_local_python_environment
|
@@ -520,9 +578,69 @@ class PipelineDockerImageBuilder:
|
|
520
578
|
", ".join(f"`{r}`" for r in integration_requirements_list),
|
521
579
|
)
|
522
580
|
|
581
|
+
if pyproject_path:
|
582
|
+
path = os.path.abspath(pyproject_path)
|
583
|
+
|
584
|
+
if not os.path.exists(path):
|
585
|
+
raise FileNotFoundError(
|
586
|
+
f"Pyproject file {path} does not exist."
|
587
|
+
)
|
588
|
+
|
589
|
+
def _run_command(command: str) -> str:
|
590
|
+
command = command.format(directory=os.path.dirname(path))
|
591
|
+
|
592
|
+
result = subprocess.run(
|
593
|
+
command,
|
594
|
+
capture_output=True,
|
595
|
+
check=True,
|
596
|
+
shell=True, # nosec
|
597
|
+
)
|
598
|
+
return result.stdout.decode()
|
599
|
+
|
600
|
+
if docker_settings.pyproject_export_command:
|
601
|
+
command = " ".join(docker_settings.pyproject_export_command)
|
602
|
+
|
603
|
+
try:
|
604
|
+
pyproject_requirements = _run_command(command)
|
605
|
+
except subprocess.CalledProcessError as e:
|
606
|
+
raise RuntimeError(
|
607
|
+
"Failed to export pyproject dependencies with "
|
608
|
+
f"command `{command}`: {e.stderr.decode()}"
|
609
|
+
)
|
610
|
+
else:
|
611
|
+
command_errors = {}
|
612
|
+
|
613
|
+
for command in DEFAULT_PYPROJECT_EXPORT_COMMANDS:
|
614
|
+
try:
|
615
|
+
pyproject_requirements = _run_command(command)
|
616
|
+
except subprocess.CalledProcessError as e:
|
617
|
+
command_errors[command] = e.stderr.decode()
|
618
|
+
else:
|
619
|
+
break
|
620
|
+
else:
|
621
|
+
raise RuntimeError(
|
622
|
+
"Failed to export pyproject dependencies with the "
|
623
|
+
f"following commands: {command_errors}. Please specify "
|
624
|
+
"a working command to export your pyproject.toml "
|
625
|
+
"dependencies to a requirements.txt formatted file "
|
626
|
+
"using `DockerSettings.pyproject_export_command`."
|
627
|
+
)
|
628
|
+
|
629
|
+
requirements_files.append(
|
630
|
+
(".zenml_pyproject_requirements", pyproject_requirements, [])
|
631
|
+
)
|
632
|
+
if log:
|
633
|
+
logger.info(
|
634
|
+
"- %s python packages from file `%s`",
|
635
|
+
"Implicitly including"
|
636
|
+
if implicit_requirements
|
637
|
+
else "Including",
|
638
|
+
path,
|
639
|
+
)
|
640
|
+
|
523
641
|
# Generate/Read requirements file for user-defined requirements
|
524
|
-
if isinstance(
|
525
|
-
path = os.path.abspath(
|
642
|
+
if isinstance(requirements, str):
|
643
|
+
path = os.path.abspath(requirements)
|
526
644
|
try:
|
527
645
|
user_requirements = io_utils.read_file_contents_as_string(path)
|
528
646
|
except FileNotFoundError as e:
|
@@ -531,15 +649,18 @@ class PipelineDockerImageBuilder:
|
|
531
649
|
) from e
|
532
650
|
if log:
|
533
651
|
logger.info(
|
534
|
-
"-
|
652
|
+
"- %s user-defined requirements from file `%s`",
|
653
|
+
"Implicitly including"
|
654
|
+
if implicit_requirements
|
655
|
+
else "Including",
|
535
656
|
path,
|
536
657
|
)
|
537
|
-
elif isinstance(
|
538
|
-
user_requirements = "\n".join(
|
658
|
+
elif isinstance(requirements, List):
|
659
|
+
user_requirements = "\n".join(requirements)
|
539
660
|
if log:
|
540
661
|
logger.info(
|
541
662
|
"- Including user-defined requirements: %s",
|
542
|
-
", ".join(f"`{r}`" for r in
|
663
|
+
", ".join(f"`{r}`" for r in requirements),
|
543
664
|
)
|
544
665
|
else:
|
545
666
|
user_requirements = None
|
@@ -638,6 +759,11 @@ class PipelineDockerImageBuilder:
|
|
638
759
|
lines.append("COPY . .")
|
639
760
|
lines.append("RUN chmod -R a+rw .")
|
640
761
|
|
762
|
+
if docker_settings.local_project_install_command:
|
763
|
+
lines.append(
|
764
|
+
f"RUN {docker_settings.local_project_install_command}"
|
765
|
+
)
|
766
|
+
|
641
767
|
if docker_settings.user:
|
642
768
|
# Change file ownership to specified user
|
643
769
|
lines.append(f"RUN chown -R {docker_settings.user} .")
|
zenml/utils/tag_utils.py
CHANGED
@@ -348,10 +348,10 @@ def add_tags(
|
|
348
348
|
if isinstance(tag, Tag):
|
349
349
|
tag_model = client.get_tag(tag.name)
|
350
350
|
|
351
|
-
if tag.exclusive != tag_model.exclusive:
|
351
|
+
if bool(tag.exclusive) != tag_model.exclusive:
|
352
352
|
raise ValueError(
|
353
|
-
f"The tag `{tag.name}` is
|
354
|
-
f"{'exclusive' if tag_model.exclusive else 'non-exclusive'} "
|
353
|
+
f"The tag `{tag.name}` is "
|
354
|
+
f"{'an exclusive' if tag_model.exclusive else 'a non-exclusive'} "
|
355
355
|
"tag. Please update it before attaching it to a resource."
|
356
356
|
)
|
357
357
|
if tag.cascade is not None:
|
@@ -15,47 +15,44 @@
|
|
15
15
|
|
16
16
|
from uuid import UUID
|
17
17
|
|
18
|
-
from zenml.zen_server.rbac.models import ResourceType
|
19
18
|
from zenml.zen_server.utils import feature_gate, server_config
|
20
19
|
|
21
20
|
|
22
|
-
def check_entitlement(
|
21
|
+
def check_entitlement(feature: str) -> None:
|
23
22
|
"""Queries the feature gate to see if the operation falls within the Pro workspaces entitlements.
|
24
23
|
|
25
24
|
Raises an exception if the user is not entitled to create an instance of the
|
26
25
|
resource. Otherwise, simply returns.
|
27
26
|
|
28
27
|
Args:
|
29
|
-
|
28
|
+
feature: The feature to check for.
|
30
29
|
"""
|
31
30
|
if not server_config().feature_gate_enabled:
|
32
31
|
return
|
33
|
-
return feature_gate().check_entitlement(
|
32
|
+
return feature_gate().check_entitlement(feature=feature)
|
34
33
|
|
35
34
|
|
36
|
-
def report_usage(
|
35
|
+
def report_usage(feature: str, resource_id: UUID) -> None:
|
37
36
|
"""Reports the creation/usage of a feature/resource.
|
38
37
|
|
39
38
|
Args:
|
40
|
-
|
39
|
+
feature: The feature to report a usage for.
|
41
40
|
resource_id: ID of the resource that was created.
|
42
41
|
"""
|
43
42
|
if not server_config().feature_gate_enabled:
|
44
43
|
return
|
45
|
-
feature_gate().report_event(
|
46
|
-
resource=resource_type, resource_id=resource_id
|
47
|
-
)
|
44
|
+
feature_gate().report_event(feature=feature, resource_id=resource_id)
|
48
45
|
|
49
46
|
|
50
|
-
def report_decrement(
|
47
|
+
def report_decrement(feature: str, resource_id: UUID) -> None:
|
51
48
|
"""Reports the deletion/deactivation of a feature/resource.
|
52
49
|
|
53
50
|
Args:
|
54
|
-
|
51
|
+
feature: The feature to report a decrement in count for.
|
55
52
|
resource_id: ID of the resource that was deleted.
|
56
53
|
"""
|
57
54
|
if not server_config().feature_gate_enabled:
|
58
55
|
return
|
59
56
|
feature_gate().report_event(
|
60
|
-
|
57
|
+
feature=feature, resource_id=resource_id, is_decrement=True
|
61
58
|
)
|
@@ -16,18 +16,16 @@
|
|
16
16
|
from abc import ABC, abstractmethod
|
17
17
|
from uuid import UUID
|
18
18
|
|
19
|
-
from zenml.zen_server.rbac.models import ResourceType
|
20
|
-
|
21
19
|
|
22
20
|
class FeatureGateInterface(ABC):
|
23
21
|
"""Feature gate interface definition."""
|
24
22
|
|
25
23
|
@abstractmethod
|
26
|
-
def check_entitlement(self,
|
24
|
+
def check_entitlement(self, feature: str) -> None:
|
27
25
|
"""Checks if a user is entitled to create a resource.
|
28
26
|
|
29
27
|
Args:
|
30
|
-
|
28
|
+
feature: The feature the user wants to use.
|
31
29
|
|
32
30
|
Raises:
|
33
31
|
UpgradeRequiredError in case a subscription limit is reached
|
@@ -36,14 +34,14 @@ class FeatureGateInterface(ABC):
|
|
36
34
|
@abstractmethod
|
37
35
|
def report_event(
|
38
36
|
self,
|
39
|
-
|
37
|
+
feature: str,
|
40
38
|
resource_id: UUID,
|
41
39
|
is_decrement: bool = False,
|
42
40
|
) -> None:
|
43
41
|
"""Reports the usage of a feature to the aggregator backend.
|
44
42
|
|
45
43
|
Args:
|
46
|
-
|
44
|
+
feature: The feature the user used.
|
47
45
|
resource_id: ID of the resource that was created/deleted.
|
48
46
|
is_decrement: In case this event reports an actual decrement of usage
|
49
47
|
"""
|
@@ -25,7 +25,6 @@ from zenml.zen_server.cloud_utils import cloud_connection
|
|
25
25
|
from zenml.zen_server.feature_gate.feature_gate_interface import (
|
26
26
|
FeatureGateInterface,
|
27
27
|
)
|
28
|
-
from zenml.zen_server.rbac.models import ResourceType
|
29
28
|
|
30
29
|
logger = get_logger(__name__)
|
31
30
|
|
@@ -47,7 +46,7 @@ class RawUsageEvent(BaseModel):
|
|
47
46
|
organization_id: str = Field(
|
48
47
|
description="The organization that this usage can be attributed to.",
|
49
48
|
)
|
50
|
-
feature:
|
49
|
+
feature: str = Field(
|
51
50
|
description="The feature whose usage is being reported.",
|
52
51
|
)
|
53
52
|
total: int = Field(
|
@@ -66,22 +65,22 @@ class ZenMLCloudFeatureGateInterface(FeatureGateInterface):
|
|
66
65
|
"""Initialize the object."""
|
67
66
|
self._connection = cloud_connection()
|
68
67
|
|
69
|
-
def check_entitlement(self,
|
68
|
+
def check_entitlement(self, feature: str) -> None:
|
70
69
|
"""Checks if a user is entitled to create a resource.
|
71
70
|
|
72
71
|
Args:
|
73
|
-
|
72
|
+
feature: The feature the user wants to use.
|
74
73
|
|
75
74
|
Raises:
|
76
75
|
SubscriptionUpgradeRequiredError: in case a subscription limit is reached
|
77
76
|
"""
|
78
77
|
try:
|
79
78
|
response = self._connection.get(
|
80
|
-
endpoint=ENTITLEMENT_ENDPOINT + "/" +
|
79
|
+
endpoint=ENTITLEMENT_ENDPOINT + "/" + feature, params=None
|
81
80
|
)
|
82
81
|
except SubscriptionUpgradeRequiredError:
|
83
82
|
raise SubscriptionUpgradeRequiredError(
|
84
|
-
f"Your subscription reached its `{
|
83
|
+
f"Your subscription reached its `{feature}` limit. Please "
|
85
84
|
f"upgrade your subscription or reach out to us."
|
86
85
|
)
|
87
86
|
|
@@ -94,20 +93,20 @@ class ZenMLCloudFeatureGateInterface(FeatureGateInterface):
|
|
94
93
|
|
95
94
|
def report_event(
|
96
95
|
self,
|
97
|
-
|
96
|
+
feature: str,
|
98
97
|
resource_id: UUID,
|
99
98
|
is_decrement: bool = False,
|
100
99
|
) -> None:
|
101
100
|
"""Reports the usage of a feature to the aggregator backend.
|
102
101
|
|
103
102
|
Args:
|
104
|
-
|
103
|
+
feature: The feature the user used.
|
105
104
|
resource_id: ID of the resource that was created/deleted.
|
106
105
|
is_decrement: In case this event reports an actual decrement of usage
|
107
106
|
"""
|
108
107
|
data = RawUsageEvent(
|
109
108
|
organization_id=ORGANIZATION_ID,
|
110
|
-
feature=
|
109
|
+
feature=feature,
|
111
110
|
total=1 if not is_decrement else -1,
|
112
111
|
metadata={
|
113
112
|
"workspace_id": str(server_config.get_external_server_id()),
|
@@ -182,7 +182,7 @@ def verify_permissions_and_get_or_create_entity(
|
|
182
182
|
def _pre_creation_hook() -> None:
|
183
183
|
verify_permission_for_model(model=request_model, action=Action.CREATE)
|
184
184
|
if resource_type and needs_usage_increment:
|
185
|
-
check_entitlement(
|
185
|
+
check_entitlement(feature=resource_type)
|
186
186
|
|
187
187
|
model, created = get_or_create_method(request_model, _pre_creation_hook)
|
188
188
|
|
@@ -67,7 +67,7 @@ class RBACSqlZenStore(SqlZenStore):
|
|
67
67
|
action=Action.CREATE,
|
68
68
|
project_id=model_request.project,
|
69
69
|
)
|
70
|
-
check_entitlement(
|
70
|
+
check_entitlement(feature=ResourceType.MODEL)
|
71
71
|
except Exception as e:
|
72
72
|
allow_model_creation = False
|
73
73
|
error = e
|
@@ -92,7 +92,7 @@ class RBACSqlZenStore(SqlZenStore):
|
|
92
92
|
|
93
93
|
if created:
|
94
94
|
report_usage(
|
95
|
-
|
95
|
+
feature=ResourceType.MODEL, resource_id=model_response.id
|
96
96
|
)
|
97
97
|
else:
|
98
98
|
verify_permission_for_model(model_response, action=Action.READ)
|
@@ -25,7 +25,12 @@ from fastapi import (
|
|
25
25
|
from zenml.analytics.enums import AnalyticsEvent
|
26
26
|
from zenml.analytics.utils import track_handler
|
27
27
|
from zenml.config.pipeline_run_configuration import PipelineRunConfiguration
|
28
|
-
from zenml.constants import
|
28
|
+
from zenml.constants import (
|
29
|
+
API,
|
30
|
+
RUN_TEMPLATE_TRIGGERS_FEATURE_NAME,
|
31
|
+
RUN_TEMPLATES,
|
32
|
+
VERSION_1,
|
33
|
+
)
|
29
34
|
from zenml.models import (
|
30
35
|
Page,
|
31
36
|
PipelineRunResponse,
|
@@ -36,6 +41,9 @@ from zenml.models import (
|
|
36
41
|
)
|
37
42
|
from zenml.zen_server.auth import AuthContext, authorize
|
38
43
|
from zenml.zen_server.exceptions import error_response
|
44
|
+
from zenml.zen_server.feature_gate.endpoint_utils import (
|
45
|
+
check_entitlement,
|
46
|
+
)
|
39
47
|
from zenml.zen_server.rbac.endpoint_utils import (
|
40
48
|
verify_permissions_and_create_entity,
|
41
49
|
verify_permissions_and_delete_entity,
|
@@ -269,6 +277,7 @@ if server_config().workload_manager_enabled:
|
|
269
277
|
action=Action.CREATE,
|
270
278
|
project_id=template.project.id,
|
271
279
|
)
|
280
|
+
check_entitlement(feature=RUN_TEMPLATE_TRIGGERS_FEATURE_NAME)
|
272
281
|
|
273
282
|
return run_template(
|
274
283
|
template=template,
|
@@ -24,6 +24,7 @@ from zenml.constants import (
|
|
24
24
|
ENV_ZENML_RUNNER_IMAGE_DISABLE_UV,
|
25
25
|
ENV_ZENML_RUNNER_PARENT_IMAGE,
|
26
26
|
ENV_ZENML_RUNNER_POD_TIMEOUT,
|
27
|
+
RUN_TEMPLATE_TRIGGERS_FEATURE_NAME,
|
27
28
|
handle_bool_env_var,
|
28
29
|
handle_int_env_var,
|
29
30
|
)
|
@@ -49,7 +50,11 @@ from zenml.pipelines.run_utils import (
|
|
49
50
|
)
|
50
51
|
from zenml.stack.flavor import Flavor
|
51
52
|
from zenml.utils import pydantic_utils, requirements_utils, settings_utils
|
53
|
+
from zenml.utils.time_utils import utc_now
|
52
54
|
from zenml.zen_server.auth import AuthContext, generate_access_token
|
55
|
+
from zenml.zen_server.feature_gate.endpoint_utils import (
|
56
|
+
report_usage,
|
57
|
+
)
|
53
58
|
from zenml.zen_server.template_execution.runner_entrypoint_configuration import (
|
54
59
|
RunnerEntrypointConfiguration,
|
55
60
|
)
|
@@ -190,6 +195,11 @@ def run_template(
|
|
190
195
|
placeholder_run = create_placeholder_run(deployment=new_deployment)
|
191
196
|
assert placeholder_run
|
192
197
|
|
198
|
+
report_usage(
|
199
|
+
feature=RUN_TEMPLATE_TRIGGERS_FEATURE_NAME,
|
200
|
+
resource_id=placeholder_run.id,
|
201
|
+
)
|
202
|
+
|
193
203
|
# We create an API token scoped to the pipeline run that never expires
|
194
204
|
api_token = generate_access_token(
|
195
205
|
user_id=auth_context.user.id,
|
@@ -300,7 +310,8 @@ def run_template(
|
|
300
310
|
zen_store().update_run(
|
301
311
|
run_id=placeholder_run.id,
|
302
312
|
run_update=PipelineRunUpdate(
|
303
|
-
status=ExecutionStatus.FAILED
|
313
|
+
status=ExecutionStatus.FAILED,
|
314
|
+
end_time=utc_now(),
|
304
315
|
),
|
305
316
|
)
|
306
317
|
raise
|
@@ -39,7 +39,7 @@ exclude_tables = ["sqlite_sequence"]
|
|
39
39
|
|
40
40
|
|
41
41
|
def include_object(
|
42
|
-
object: Any, name: str, type_: str, *args: Any, **kwargs: Any
|
42
|
+
object: Any, name: Optional[str], type_: str, *args: Any, **kwargs: Any
|
43
43
|
) -> bool:
|
44
44
|
"""Function used to exclude tables from the migration scripts.
|
45
45
|
|
@@ -135,6 +135,7 @@ class Alembic:
|
|
135
135
|
fn_context_args["fn"] = fn
|
136
136
|
|
137
137
|
with self.engine.connect() as connection:
|
138
|
+
# Configure the context with our metadata
|
138
139
|
self.environment_context.configure(
|
139
140
|
connection=connection,
|
140
141
|
target_metadata=self.metadata,
|
@@ -180,9 +181,15 @@ class Alembic:
|
|
180
181
|
def do_get_current_rev(rev: _RevIdType, context: Any) -> List[Any]:
|
181
182
|
nonlocal current_revisions
|
182
183
|
|
183
|
-
|
184
|
-
|
185
|
-
):
|
184
|
+
# Handle rev parameter in a way that's compatible with different alembic versions
|
185
|
+
rev_input: Any
|
186
|
+
if isinstance(rev, str):
|
187
|
+
rev_input = rev
|
188
|
+
else:
|
189
|
+
rev_input = tuple(str(r) for r in rev)
|
190
|
+
|
191
|
+
# Get current revision(s)
|
192
|
+
for r in self.script_directory.get_all_current(rev_input):
|
186
193
|
if r is None:
|
187
194
|
continue
|
188
195
|
current_revisions.append(r.revision)
|
@@ -200,7 +207,13 @@ class Alembic:
|
|
200
207
|
"""
|
201
208
|
|
202
209
|
def do_stamp(rev: _RevIdType, context: Any) -> List[Any]:
|
203
|
-
|
210
|
+
# Handle rev parameter in a way that's compatible with different alembic versions
|
211
|
+
if isinstance(rev, str):
|
212
|
+
return self.script_directory._stamp_revs(revision, rev)
|
213
|
+
else:
|
214
|
+
# Convert to tuple for compatibility
|
215
|
+
rev_tuple = tuple(str(r) for r in rev)
|
216
|
+
return self.script_directory._stamp_revs(revision, rev_tuple)
|
204
217
|
|
205
218
|
self.run_migrations(do_stamp)
|
206
219
|
|
@@ -212,10 +225,16 @@ class Alembic:
|
|
212
225
|
"""
|
213
226
|
|
214
227
|
def do_upgrade(rev: _RevIdType, context: Any) -> List[Any]:
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
228
|
+
# Handle rev parameter in a way that's compatible with different alembic versions
|
229
|
+
if isinstance(rev, str):
|
230
|
+
return self.script_directory._upgrade_revs(revision, rev)
|
231
|
+
else:
|
232
|
+
if rev:
|
233
|
+
# Use first element or revs for compatibility
|
234
|
+
return self.script_directory._upgrade_revs(
|
235
|
+
revision, str(rev[0])
|
236
|
+
)
|
237
|
+
return []
|
219
238
|
|
220
239
|
self.run_migrations(do_upgrade)
|
221
240
|
|
@@ -227,9 +246,14 @@ class Alembic:
|
|
227
246
|
"""
|
228
247
|
|
229
248
|
def do_downgrade(rev: _RevIdType, context: Any) -> List[Any]:
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
249
|
+
# Handle rev parameter in a way that's compatible with different alembic versions
|
250
|
+
if isinstance(rev, str):
|
251
|
+
return self.script_directory._downgrade_revs(revision, rev)
|
252
|
+
else:
|
253
|
+
if rev:
|
254
|
+
return self.script_directory._downgrade_revs(
|
255
|
+
revision, str(rev[0])
|
256
|
+
)
|
257
|
+
return self.script_directory._downgrade_revs(revision, None)
|
234
258
|
|
235
259
|
self.run_migrations(do_downgrade)
|
@@ -4413,11 +4413,15 @@ class RestZenStore(BaseZenStore):
|
|
4413
4413
|
"The current token is no longer valid, and "
|
4414
4414
|
"it is not possible to generate a new token using the "
|
4415
4415
|
"configured credentials. Please run "
|
4416
|
-
f"`zenml login
|
4417
|
-
"the server or authenticate using
|
4416
|
+
f"`zenml login {self.url}` to "
|
4417
|
+
"re-authenticate to the server or authenticate using "
|
4418
|
+
"an API key. See "
|
4418
4419
|
"https://docs.zenml.io/how-to/project-setup-and-management/connecting-to-zenml/connect-with-a-service-account "
|
4419
4420
|
"for more information."
|
4420
4421
|
)
|
4422
|
+
# Clear the current token from the credentials store to
|
4423
|
+
# force a new authentication flow next time.
|
4424
|
+
get_credentials_store().clear_token(self.url)
|
4421
4425
|
raise e
|
4422
4426
|
elif not re_authenticated:
|
4423
4427
|
# The last request was authenticated with an API token
|
@@ -11354,13 +11354,10 @@ class SqlZenStore(BaseZenStore):
|
|
11354
11354
|
except EntityExistsError:
|
11355
11355
|
if isinstance(tag, tag_utils.Tag):
|
11356
11356
|
tag_schema = self._get_tag_schema(tag.name, session)
|
11357
|
-
if (
|
11358
|
-
tag.exclusive is not None
|
11359
|
-
and tag.exclusive != tag_schema.exclusive
|
11360
|
-
):
|
11357
|
+
if bool(tag.exclusive) != tag_schema.exclusive:
|
11361
11358
|
raise ValueError(
|
11362
|
-
f"Tag `{tag_schema.name}` has been defined as
|
11363
|
-
f"{'exclusive' if tag_schema.exclusive else 'non-exclusive'} "
|
11359
|
+
f"Tag `{tag_schema.name}` has been defined as "
|
11360
|
+
f"{'an exclusive' if tag_schema.exclusive else 'a non-exclusive'} "
|
11364
11361
|
"tag. Please update it before attaching it to resources."
|
11365
11362
|
)
|
11366
11363
|
else:
|
{zenml_nightly-0.82.0.dev20250512.dist-info → zenml_nightly-0.82.0.dev20250514.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: zenml-nightly
|
3
|
-
Version: 0.82.0.
|
3
|
+
Version: 0.82.0.dev20250514
|
4
4
|
Summary: ZenML: Write production-ready ML code.
|
5
5
|
License: Apache-2.0
|
6
6
|
Keywords: machine learning,production,pipeline,mlops,devops
|
@@ -41,7 +41,7 @@ Provides-Extra: terraform
|
|
41
41
|
Provides-Extra: vertex
|
42
42
|
Requires-Dist: Jinja2 ; extra == "server"
|
43
43
|
Requires-Dist: adlfs (>=2021.10.0) ; extra == "adlfs"
|
44
|
-
Requires-Dist: alembic (>=1.8.1
|
44
|
+
Requires-Dist: alembic (>=1.8.1,<=1.15.2)
|
45
45
|
Requires-Dist: aws-profile-manager (>=0.5.0) ; extra == "connectors-aws"
|
46
46
|
Requires-Dist: azure-ai-ml (==1.23.1) ; extra == "azureml"
|
47
47
|
Requires-Dist: azure-identity (>=1.4.0) ; extra == "secrets-azure" or extra == "connectors-azure"
|