zenml-nightly 0.83.1.dev20250625__py3-none-any.whl → 0.83.1.dev20250627__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zenml/VERSION +1 -1
- zenml/cli/base.py +3 -2
- zenml/cli/service_connectors.py +5 -12
- zenml/cli/stack.py +1 -5
- zenml/cli/utils.py +8 -52
- zenml/client.py +40 -42
- zenml/integrations/aws/container_registries/aws_container_registry.py +3 -1
- zenml/integrations/aws/flavors/sagemaker_orchestrator_flavor.py +1 -1
- zenml/integrations/databricks/orchestrators/databricks_orchestrator_entrypoint_config.py +8 -3
- zenml/integrations/integration.py +23 -58
- zenml/models/__init__.py +2 -0
- zenml/models/v2/core/pipeline_run.py +1 -0
- zenml/models/v2/core/service_connector.py +178 -108
- zenml/service_connectors/service_connector.py +11 -61
- zenml/service_connectors/service_connector_utils.py +4 -2
- zenml/stack/stack_component.py +1 -1
- zenml/utils/package_utils.py +111 -1
- zenml/zen_server/routers/service_connectors_endpoints.py +7 -22
- zenml/zen_stores/migrations/versions/5bb25e95849c_add_internal_secrets.py +62 -0
- zenml/zen_stores/rest_zen_store.py +57 -4
- zenml/zen_stores/schemas/pipeline_run_schemas.py +10 -10
- zenml/zen_stores/schemas/secret_schemas.py +5 -0
- zenml/zen_stores/schemas/service_connector_schemas.py +16 -14
- zenml/zen_stores/schemas/step_run_schemas.py +44 -14
- zenml/zen_stores/secrets_stores/service_connector_secrets_store.py +4 -1
- zenml/zen_stores/sql_zen_store.py +238 -122
- zenml/zen_stores/zen_store_interface.py +9 -1
- {zenml_nightly-0.83.1.dev20250625.dist-info → zenml_nightly-0.83.1.dev20250627.dist-info}/METADATA +1 -1
- {zenml_nightly-0.83.1.dev20250625.dist-info → zenml_nightly-0.83.1.dev20250627.dist-info}/RECORD +32 -32
- zenml/utils/integration_utils.py +0 -34
- {zenml_nightly-0.83.1.dev20250625.dist-info → zenml_nightly-0.83.1.dev20250627.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.83.1.dev20250625.dist-info → zenml_nightly-0.83.1.dev20250627.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.83.1.dev20250625.dist-info → zenml_nightly-0.83.1.dev20250627.dist-info}/entry_points.txt +0 -0
zenml/utils/package_utils.py
CHANGED
@@ -13,10 +13,26 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""Utility functions for the package."""
|
15
15
|
|
16
|
-
|
16
|
+
import sys
|
17
|
+
from typing import Dict, List, Optional, Union, cast
|
17
18
|
|
18
19
|
import requests
|
19
20
|
from packaging import version
|
21
|
+
from packaging.markers import default_environment
|
22
|
+
from packaging.requirements import Requirement
|
23
|
+
|
24
|
+
if sys.version_info < (3, 10):
|
25
|
+
from importlib_metadata import (
|
26
|
+
PackageNotFoundError,
|
27
|
+
distribution,
|
28
|
+
distributions,
|
29
|
+
)
|
30
|
+
else:
|
31
|
+
from importlib.metadata import (
|
32
|
+
PackageNotFoundError,
|
33
|
+
distribution,
|
34
|
+
distributions,
|
35
|
+
)
|
20
36
|
|
21
37
|
|
22
38
|
def is_latest_zenml_version() -> bool:
|
@@ -87,3 +103,97 @@ def clean_requirements(requirements: List[str]) -> List[str]:
|
|
87
103
|
):
|
88
104
|
cleaned[package] = req
|
89
105
|
return sorted(cleaned.values())
|
106
|
+
|
107
|
+
|
108
|
+
def requirement_installed(requirement: Union[str, Requirement]) -> bool:
|
109
|
+
"""Check if a requirement is installed.
|
110
|
+
|
111
|
+
Args:
|
112
|
+
requirement: A requirement string.
|
113
|
+
|
114
|
+
Returns:
|
115
|
+
True if the requirement is installed, False otherwise.
|
116
|
+
"""
|
117
|
+
if isinstance(requirement, str):
|
118
|
+
requirement = Requirement(requirement)
|
119
|
+
|
120
|
+
try:
|
121
|
+
dist = distribution(requirement.name)
|
122
|
+
except PackageNotFoundError:
|
123
|
+
return False
|
124
|
+
|
125
|
+
return requirement.specifier.contains(dist.version)
|
126
|
+
|
127
|
+
|
128
|
+
def get_dependencies(
|
129
|
+
requirement: Requirement, recursive: bool = False
|
130
|
+
) -> List[Requirement]:
|
131
|
+
"""Get the dependencies of a requirement.
|
132
|
+
|
133
|
+
Args:
|
134
|
+
requirement: A requirement string.
|
135
|
+
recursive: Whether to include recursive dependencies.
|
136
|
+
|
137
|
+
Returns:
|
138
|
+
A list of requirements.
|
139
|
+
"""
|
140
|
+
dist = distribution(requirement.name)
|
141
|
+
marker_environment = cast(Dict[str, str], default_environment())
|
142
|
+
|
143
|
+
dependencies = []
|
144
|
+
|
145
|
+
for req in dist.requires or []:
|
146
|
+
parsed_req = Requirement(req)
|
147
|
+
|
148
|
+
if parsed_req.marker:
|
149
|
+
should_include = False
|
150
|
+
|
151
|
+
marker_environment["extra"] = ""
|
152
|
+
if parsed_req.marker.evaluate(environment=marker_environment):
|
153
|
+
should_include = True
|
154
|
+
|
155
|
+
if not should_include:
|
156
|
+
# Not required without extras, so check if it's required with
|
157
|
+
# any of the requested extras
|
158
|
+
for extra in requirement.extras:
|
159
|
+
marker_environment["extra"] = extra
|
160
|
+
if parsed_req.marker.evaluate(
|
161
|
+
environment=marker_environment
|
162
|
+
):
|
163
|
+
should_include = True
|
164
|
+
break
|
165
|
+
|
166
|
+
if should_include:
|
167
|
+
dependencies.append(parsed_req)
|
168
|
+
else:
|
169
|
+
# No marker means always include
|
170
|
+
dependencies.append(parsed_req)
|
171
|
+
|
172
|
+
if recursive:
|
173
|
+
for dependency in dependencies:
|
174
|
+
dependencies.extend(get_dependencies(dependency, recursive=True))
|
175
|
+
|
176
|
+
return dependencies
|
177
|
+
|
178
|
+
|
179
|
+
def get_package_information(
|
180
|
+
package_names: Optional[List[str]] = None,
|
181
|
+
) -> Dict[str, str]:
|
182
|
+
"""Get package information.
|
183
|
+
|
184
|
+
Args:
|
185
|
+
package_names: Filter for specific package names. If no package names
|
186
|
+
are provided, all installed packages are returned.
|
187
|
+
|
188
|
+
Returns:
|
189
|
+
A dictionary of the name:version for the package names passed in or
|
190
|
+
all packages and their respective versions.
|
191
|
+
"""
|
192
|
+
if package_names:
|
193
|
+
return {
|
194
|
+
dist.name: dist.version
|
195
|
+
for dist in distributions()
|
196
|
+
if dist.name in package_names
|
197
|
+
}
|
198
|
+
|
199
|
+
return {dist.name: dist.version for dist in distributions()}
|
@@ -152,6 +152,7 @@ def list_service_connectors(
|
|
152
152
|
resource_type=ResourceType.SERVICE_CONNECTOR,
|
153
153
|
list_method=zen_store().list_service_connectors,
|
154
154
|
hydrate=hydrate,
|
155
|
+
expand_secrets=expand_secrets,
|
155
156
|
)
|
156
157
|
|
157
158
|
if expand_secrets:
|
@@ -163,9 +164,6 @@ def list_service_connectors(
|
|
163
164
|
)
|
164
165
|
|
165
166
|
for connector in connectors.items:
|
166
|
-
if not connector.secret_id:
|
167
|
-
continue
|
168
|
-
|
169
167
|
if allowed_ids is None or is_owned_by_authenticated_user(
|
170
168
|
connector
|
171
169
|
):
|
@@ -174,14 +172,8 @@ def list_service_connectors(
|
|
174
172
|
pass
|
175
173
|
elif connector.id not in allowed_ids:
|
176
174
|
# The user is not allowed to read secret values for this
|
177
|
-
# connector. We
|
178
|
-
|
179
|
-
continue
|
180
|
-
|
181
|
-
secret = zen_store().get_secret(secret_id=connector.secret_id)
|
182
|
-
|
183
|
-
# Update the connector configuration with the secret.
|
184
|
-
connector.configuration.update(secret.secret_values)
|
175
|
+
# connector. We remove the secrets from the connector.
|
176
|
+
connector.remove_secrets()
|
185
177
|
|
186
178
|
return connectors
|
187
179
|
|
@@ -253,21 +245,14 @@ def get_service_connector(
|
|
253
245
|
The requested service connector.
|
254
246
|
"""
|
255
247
|
connector = zen_store().get_service_connector(
|
256
|
-
connector_id, hydrate=hydrate
|
248
|
+
connector_id, hydrate=hydrate, expand_secrets=expand_secrets
|
257
249
|
)
|
258
250
|
verify_permission_for_model(connector, action=Action.READ)
|
259
251
|
|
260
|
-
if (
|
261
|
-
|
262
|
-
and connector.secret_id
|
263
|
-
and has_permissions_for_model(
|
264
|
-
connector, action=Action.READ_SECRET_VALUE
|
265
|
-
)
|
252
|
+
if expand_secrets and not has_permissions_for_model(
|
253
|
+
connector, action=Action.READ_SECRET_VALUE
|
266
254
|
):
|
267
|
-
|
268
|
-
|
269
|
-
# Update the connector configuration with the secret.
|
270
|
-
connector.configuration.update(secret.secret_values)
|
255
|
+
connector.remove_secrets()
|
271
256
|
|
272
257
|
return dehydrate_response_model(connector)
|
273
258
|
|
@@ -0,0 +1,62 @@
|
|
1
|
+
"""add internal secrets [5bb25e95849c].
|
2
|
+
|
3
|
+
Revision ID: 5bb25e95849c
|
4
|
+
Revises: 0.83.1
|
5
|
+
Create Date: 2025-06-23 20:49:44.184630
|
6
|
+
|
7
|
+
"""
|
8
|
+
|
9
|
+
import sqlalchemy as sa
|
10
|
+
from alembic import op
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "5bb25e95849c"
|
14
|
+
down_revision = "0.83.1"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
"""Upgrade database schema and/or data, creating a new revision."""
|
21
|
+
# Step 1: Add internal column as nullable
|
22
|
+
with op.batch_alter_table("secret", schema=None) as batch_op:
|
23
|
+
batch_op.add_column(sa.Column("internal", sa.Boolean(), nullable=True))
|
24
|
+
|
25
|
+
# Step 2: Update existing records based on service connector references
|
26
|
+
# If a secret is referenced by a service_connector.secret_id, make it internal=True
|
27
|
+
# Otherwise, set it to internal=False
|
28
|
+
connection = op.get_bind()
|
29
|
+
|
30
|
+
# Update secrets that are referenced by service connectors to be internal
|
31
|
+
connection.execute(
|
32
|
+
sa.text("""
|
33
|
+
UPDATE secret
|
34
|
+
SET internal = TRUE
|
35
|
+
WHERE id IN (
|
36
|
+
SELECT DISTINCT secret_id
|
37
|
+
FROM service_connector
|
38
|
+
WHERE secret_id IS NOT NULL
|
39
|
+
);
|
40
|
+
""")
|
41
|
+
)
|
42
|
+
|
43
|
+
# Update all other secrets to be not internal
|
44
|
+
connection.execute(
|
45
|
+
sa.text("""
|
46
|
+
UPDATE secret
|
47
|
+
SET internal = FALSE
|
48
|
+
WHERE internal IS NULL;
|
49
|
+
""")
|
50
|
+
)
|
51
|
+
|
52
|
+
# Step 3: Make internal column non-nullable
|
53
|
+
with op.batch_alter_table("secret", schema=None) as batch_op:
|
54
|
+
batch_op.alter_column(
|
55
|
+
"internal", existing_type=sa.Boolean(), nullable=False
|
56
|
+
)
|
57
|
+
|
58
|
+
|
59
|
+
def downgrade() -> None:
|
60
|
+
"""Downgrade database schema and/or data back to the previous revision."""
|
61
|
+
with op.batch_alter_table("secret", schema=None) as batch_op:
|
62
|
+
batch_op.drop_column("internal")
|
@@ -2413,10 +2413,21 @@ class RestZenStore(BaseZenStore):
|
|
2413
2413
|
response_model=ServiceConnectorResponse,
|
2414
2414
|
)
|
2415
2415
|
self._populate_connector_type(connector_model)
|
2416
|
+
# Call this to properly split the secrets from the configuration
|
2417
|
+
try:
|
2418
|
+
connector_model.validate_configuration()
|
2419
|
+
except ValueError as e:
|
2420
|
+
logger.error(
|
2421
|
+
f"Error validating connector configuration for "
|
2422
|
+
f"{connector_model.name}: {e}"
|
2423
|
+
)
|
2416
2424
|
return connector_model
|
2417
2425
|
|
2418
2426
|
def get_service_connector(
|
2419
|
-
self,
|
2427
|
+
self,
|
2428
|
+
service_connector_id: UUID,
|
2429
|
+
hydrate: bool = True,
|
2430
|
+
expand_secrets: bool = False,
|
2420
2431
|
) -> ServiceConnectorResponse:
|
2421
2432
|
"""Gets a specific service connector.
|
2422
2433
|
|
@@ -2424,6 +2435,8 @@ class RestZenStore(BaseZenStore):
|
|
2424
2435
|
service_connector_id: The ID of the service connector to get.
|
2425
2436
|
hydrate: Flag deciding whether to hydrate the output model(s)
|
2426
2437
|
by including metadata fields in the response.
|
2438
|
+
expand_secrets: Flag deciding whether to include the secrets
|
2439
|
+
associated with the service connector.
|
2427
2440
|
|
2428
2441
|
Returns:
|
2429
2442
|
The requested service connector, if it was found.
|
@@ -2432,15 +2445,25 @@ class RestZenStore(BaseZenStore):
|
|
2432
2445
|
resource_id=service_connector_id,
|
2433
2446
|
route=SERVICE_CONNECTORS,
|
2434
2447
|
response_model=ServiceConnectorResponse,
|
2435
|
-
params={"
|
2448
|
+
params={"hydrate": hydrate, "expand_secrets": expand_secrets},
|
2436
2449
|
)
|
2437
2450
|
self._populate_connector_type(connector_model)
|
2451
|
+
if expand_secrets:
|
2452
|
+
try:
|
2453
|
+
# Call this to properly split the secrets from the configuration
|
2454
|
+
connector_model.validate_configuration()
|
2455
|
+
except ValueError as e:
|
2456
|
+
logger.error(
|
2457
|
+
f"Error validating connector configuration for "
|
2458
|
+
f"{connector_model.name}: {e}"
|
2459
|
+
)
|
2438
2460
|
return connector_model
|
2439
2461
|
|
2440
2462
|
def list_service_connectors(
|
2441
2463
|
self,
|
2442
2464
|
filter_model: ServiceConnectorFilter,
|
2443
2465
|
hydrate: bool = False,
|
2466
|
+
expand_secrets: bool = False,
|
2444
2467
|
) -> Page[ServiceConnectorResponse]:
|
2445
2468
|
"""List all service connectors.
|
2446
2469
|
|
@@ -2449,6 +2472,8 @@ class RestZenStore(BaseZenStore):
|
|
2449
2472
|
params.
|
2450
2473
|
hydrate: Flag deciding whether to hydrate the output model(s)
|
2451
2474
|
by including metadata fields in the response.
|
2475
|
+
expand_secrets: Flag deciding whether to include the secrets
|
2476
|
+
associated with the service connector.
|
2452
2477
|
|
2453
2478
|
Returns:
|
2454
2479
|
A page of all service connectors.
|
@@ -2457,9 +2482,19 @@ class RestZenStore(BaseZenStore):
|
|
2457
2482
|
route=SERVICE_CONNECTORS,
|
2458
2483
|
response_model=ServiceConnectorResponse,
|
2459
2484
|
filter_model=filter_model,
|
2460
|
-
params={"
|
2485
|
+
params={"hydrate": hydrate, "expand_secrets": expand_secrets},
|
2461
2486
|
)
|
2462
2487
|
self._populate_connector_type(*connector_models.items)
|
2488
|
+
if expand_secrets:
|
2489
|
+
# Call this to properly split the secrets from the configuration
|
2490
|
+
for connector_model in connector_models.items:
|
2491
|
+
try:
|
2492
|
+
connector_model.validate_configuration()
|
2493
|
+
except ValueError as e:
|
2494
|
+
logger.error(
|
2495
|
+
f"Error validating connector configuration for "
|
2496
|
+
f"{connector_model.name}: {e}"
|
2497
|
+
)
|
2463
2498
|
return connector_models
|
2464
2499
|
|
2465
2500
|
def update_service_connector(
|
@@ -2499,6 +2534,14 @@ class RestZenStore(BaseZenStore):
|
|
2499
2534
|
route=SERVICE_CONNECTORS,
|
2500
2535
|
)
|
2501
2536
|
self._populate_connector_type(connector_model)
|
2537
|
+
# Call this to properly split the secrets from the configuration
|
2538
|
+
try:
|
2539
|
+
connector_model.validate_configuration()
|
2540
|
+
except ValueError as e:
|
2541
|
+
logger.error(
|
2542
|
+
f"Error validating connector configuration for "
|
2543
|
+
f"{connector_model.name}: {e}"
|
2544
|
+
)
|
2502
2545
|
return connector_model
|
2503
2546
|
|
2504
2547
|
def delete_service_connector(self, service_connector_id: UUID) -> None:
|
@@ -2665,6 +2708,14 @@ class RestZenStore(BaseZenStore):
|
|
2665
2708
|
|
2666
2709
|
connector = ServiceConnectorResponse.model_validate(response_body)
|
2667
2710
|
self._populate_connector_type(connector)
|
2711
|
+
# Call this to properly split the secrets from the configuration
|
2712
|
+
try:
|
2713
|
+
connector.validate_configuration()
|
2714
|
+
except ValueError as e:
|
2715
|
+
logger.error(
|
2716
|
+
f"Error validating connector configuration for connector client "
|
2717
|
+
f"{connector.name}: {e}"
|
2718
|
+
)
|
2668
2719
|
return connector
|
2669
2720
|
|
2670
2721
|
def list_service_connector_resources(
|
@@ -2711,7 +2762,9 @@ class RestZenStore(BaseZenStore):
|
|
2711
2762
|
|
2712
2763
|
# Retrieve the resource list locally
|
2713
2764
|
assert resources.id is not None
|
2714
|
-
connector = self.get_service_connector(
|
2765
|
+
connector = self.get_service_connector(
|
2766
|
+
resources.id, expand_secrets=True
|
2767
|
+
)
|
2715
2768
|
connector_instance = (
|
2716
2769
|
service_connector_registry.instantiate_connector(
|
2717
2770
|
model=connector
|
@@ -20,7 +20,7 @@ from uuid import UUID
|
|
20
20
|
|
21
21
|
from pydantic import ConfigDict
|
22
22
|
from sqlalchemy import UniqueConstraint
|
23
|
-
from sqlalchemy.orm import
|
23
|
+
from sqlalchemy.orm import selectinload
|
24
24
|
from sqlalchemy.sql.base import ExecutableOption
|
25
25
|
from sqlmodel import TEXT, Column, Field, Relationship
|
26
26
|
|
@@ -259,19 +259,19 @@ class PipelineRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
259
259
|
from zenml.zen_stores.schemas import ModelVersionSchema
|
260
260
|
|
261
261
|
options = [
|
262
|
-
|
262
|
+
selectinload(jl_arg(PipelineRunSchema.deployment)).joinedload(
|
263
263
|
jl_arg(PipelineDeploymentSchema.pipeline)
|
264
264
|
),
|
265
|
-
|
265
|
+
selectinload(jl_arg(PipelineRunSchema.deployment)).joinedload(
|
266
266
|
jl_arg(PipelineDeploymentSchema.stack)
|
267
267
|
),
|
268
|
-
|
268
|
+
selectinload(jl_arg(PipelineRunSchema.deployment)).joinedload(
|
269
269
|
jl_arg(PipelineDeploymentSchema.build)
|
270
270
|
),
|
271
|
-
|
271
|
+
selectinload(jl_arg(PipelineRunSchema.deployment)).joinedload(
|
272
272
|
jl_arg(PipelineDeploymentSchema.schedule)
|
273
273
|
),
|
274
|
-
|
274
|
+
selectinload(jl_arg(PipelineRunSchema.deployment)).joinedload(
|
275
275
|
jl_arg(PipelineDeploymentSchema.code_reference)
|
276
276
|
),
|
277
277
|
]
|
@@ -286,14 +286,14 @@ class PipelineRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
286
286
|
if include_resources:
|
287
287
|
options.extend(
|
288
288
|
[
|
289
|
-
|
289
|
+
selectinload(
|
290
290
|
jl_arg(PipelineRunSchema.model_version)
|
291
291
|
).joinedload(
|
292
292
|
jl_arg(ModelVersionSchema.model), innerjoin=True
|
293
293
|
),
|
294
|
-
|
295
|
-
|
296
|
-
|
294
|
+
selectinload(jl_arg(PipelineRunSchema.logs)),
|
295
|
+
selectinload(jl_arg(PipelineRunSchema.user)),
|
296
|
+
selectinload(jl_arg(PipelineRunSchema.tags)),
|
297
297
|
]
|
298
298
|
)
|
299
299
|
|
@@ -67,6 +67,8 @@ class SecretSchema(NamedSchema, table=True):
|
|
67
67
|
|
68
68
|
private: bool
|
69
69
|
|
70
|
+
internal: bool = Field(default=False)
|
71
|
+
|
70
72
|
values: Optional[bytes] = Field(sa_column=Column(TEXT, nullable=True))
|
71
73
|
|
72
74
|
user_id: UUID = build_foreign_key_field(
|
@@ -191,11 +193,13 @@ class SecretSchema(NamedSchema, table=True):
|
|
191
193
|
def from_request(
|
192
194
|
cls,
|
193
195
|
secret: SecretRequest,
|
196
|
+
internal: bool = False,
|
194
197
|
) -> "SecretSchema":
|
195
198
|
"""Create a `SecretSchema` from a `SecretRequest`.
|
196
199
|
|
197
200
|
Args:
|
198
201
|
secret: The `SecretRequest` from which to create the schema.
|
202
|
+
internal: Whether the secret is internal.
|
199
203
|
|
200
204
|
Returns:
|
201
205
|
The created `SecretSchema`.
|
@@ -209,6 +213,7 @@ class SecretSchema(NamedSchema, table=True):
|
|
209
213
|
# SQL secret store will call `store_secret_values` to store the
|
210
214
|
# values separately if SQL is used as the secrets store.
|
211
215
|
values=None,
|
216
|
+
internal=internal,
|
212
217
|
)
|
213
218
|
|
214
219
|
def update(
|
@@ -25,6 +25,7 @@ from sqlalchemy.sql.base import ExecutableOption
|
|
25
25
|
from sqlmodel import Field, Relationship
|
26
26
|
|
27
27
|
from zenml.models import (
|
28
|
+
ServiceConnectorConfiguration,
|
28
29
|
ServiceConnectorRequest,
|
29
30
|
ServiceConnectorResponse,
|
30
31
|
ServiceConnectorResponseBody,
|
@@ -168,6 +169,7 @@ class ServiceConnectorSchema(NamedSchema, table=True):
|
|
168
169
|
The created `ServiceConnectorSchema`.
|
169
170
|
"""
|
170
171
|
assert connector_request.user is not None, "User must be set."
|
172
|
+
configuration = connector_request.configuration.non_secrets
|
171
173
|
return cls(
|
172
174
|
user_id=connector_request.user,
|
173
175
|
name=connector_request.name,
|
@@ -180,9 +182,9 @@ class ServiceConnectorSchema(NamedSchema, table=True):
|
|
180
182
|
resource_id=connector_request.resource_id,
|
181
183
|
supports_instances=connector_request.supports_instances,
|
182
184
|
configuration=base64.b64encode(
|
183
|
-
json.dumps(
|
185
|
+
json.dumps(configuration).encode("utf-8")
|
184
186
|
)
|
185
|
-
if
|
187
|
+
if configuration
|
186
188
|
else None,
|
187
189
|
secret_id=secret_id,
|
188
190
|
expires_at=connector_request.expires_at,
|
@@ -226,15 +228,16 @@ class ServiceConnectorSchema(NamedSchema, table=True):
|
|
226
228
|
self.expiration_seconds = None
|
227
229
|
continue
|
228
230
|
if field == "configuration":
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
231
|
+
if connector_update.configuration is not None:
|
232
|
+
configuration = connector_update.configuration.non_secrets
|
233
|
+
if configuration is not None:
|
234
|
+
self.configuration = (
|
235
|
+
base64.b64encode(
|
236
|
+
json.dumps(configuration).encode("utf-8")
|
237
|
+
)
|
238
|
+
if configuration
|
239
|
+
else None
|
233
240
|
)
|
234
|
-
)
|
235
|
-
if connector_update.configuration
|
236
|
-
else None
|
237
|
-
)
|
238
241
|
elif field == "resource_types":
|
239
242
|
self.resource_types = base64.b64encode(
|
240
243
|
json.dumps(connector_update.resource_types).encode("utf-8")
|
@@ -286,12 +289,11 @@ class ServiceConnectorSchema(NamedSchema, table=True):
|
|
286
289
|
metadata = None
|
287
290
|
if include_metadata:
|
288
291
|
metadata = ServiceConnectorResponseMetadata(
|
289
|
-
configuration=
|
290
|
-
base64.b64decode(self.configuration).decode()
|
292
|
+
configuration=ServiceConnectorConfiguration(
|
293
|
+
**json.loads(base64.b64decode(self.configuration).decode())
|
291
294
|
)
|
292
295
|
if self.configuration
|
293
|
-
else
|
294
|
-
secret_id=self.secret_id,
|
296
|
+
else ServiceConnectorConfiguration(),
|
295
297
|
expiration_seconds=self.expiration_seconds,
|
296
298
|
labels=self.labels_dict,
|
297
299
|
)
|
@@ -21,7 +21,7 @@ from uuid import UUID
|
|
21
21
|
from pydantic import ConfigDict
|
22
22
|
from sqlalchemy import TEXT, Column, String, UniqueConstraint
|
23
23
|
from sqlalchemy.dialects.mysql import MEDIUMTEXT
|
24
|
-
from sqlalchemy.orm import joinedload
|
24
|
+
from sqlalchemy.orm import joinedload, selectinload
|
25
25
|
from sqlalchemy.sql.base import ExecutableOption
|
26
26
|
from sqlmodel import Field, Relationship, SQLModel
|
27
27
|
|
@@ -50,6 +50,7 @@ from zenml.zen_stores.schemas.base_schemas import NamedSchema
|
|
50
50
|
from zenml.zen_stores.schemas.constants import MODEL_VERSION_TABLENAME
|
51
51
|
from zenml.zen_stores.schemas.pipeline_deployment_schemas import (
|
52
52
|
PipelineDeploymentSchema,
|
53
|
+
StepConfigurationSchema,
|
53
54
|
)
|
54
55
|
from zenml.zen_stores.schemas.pipeline_run_schemas import PipelineRunSchema
|
55
56
|
from zenml.zen_stores.schemas.project_schemas import ProjectSchema
|
@@ -187,6 +188,14 @@ class StepRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
187
188
|
original_step_run: Optional["StepRunSchema"] = Relationship(
|
188
189
|
sa_relationship_kwargs={"remote_side": "StepRunSchema.id"}
|
189
190
|
)
|
191
|
+
step_configuration_schema: Optional["StepConfigurationSchema"] = (
|
192
|
+
Relationship(
|
193
|
+
sa_relationship_kwargs=dict(
|
194
|
+
viewonly=True,
|
195
|
+
primaryjoin="and_(foreign(StepConfigurationSchema.name) == StepRunSchema.name, foreign(StepConfigurationSchema.deployment_id) == StepRunSchema.deployment_id)",
|
196
|
+
),
|
197
|
+
)
|
198
|
+
)
|
190
199
|
|
191
200
|
model_config = ConfigDict(protected_namespaces=()) # type: ignore[assignment]
|
192
201
|
|
@@ -209,17 +218,25 @@ class StepRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
209
218
|
Returns:
|
210
219
|
A list of query options.
|
211
220
|
"""
|
212
|
-
from zenml.zen_stores.schemas import
|
221
|
+
from zenml.zen_stores.schemas import (
|
222
|
+
ArtifactVersionSchema,
|
223
|
+
ModelVersionSchema,
|
224
|
+
)
|
213
225
|
|
214
226
|
options = [
|
215
|
-
|
216
|
-
|
227
|
+
selectinload(jl_arg(StepRunSchema.deployment)).load_only(
|
228
|
+
jl_arg(PipelineDeploymentSchema.pipeline_configuration)
|
229
|
+
),
|
230
|
+
selectinload(jl_arg(StepRunSchema.pipeline_run)).load_only(
|
231
|
+
jl_arg(PipelineRunSchema.start_time)
|
232
|
+
),
|
233
|
+
joinedload(jl_arg(StepRunSchema.step_configuration_schema)),
|
217
234
|
]
|
218
235
|
|
219
236
|
if include_metadata:
|
220
237
|
options.extend(
|
221
238
|
[
|
222
|
-
|
239
|
+
selectinload(jl_arg(StepRunSchema.logs)),
|
223
240
|
# joinedload(jl_arg(StepRunSchema.parents)),
|
224
241
|
# joinedload(jl_arg(StepRunSchema.run_metadata)),
|
225
242
|
]
|
@@ -228,12 +245,28 @@ class StepRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
228
245
|
if include_resources:
|
229
246
|
options.extend(
|
230
247
|
[
|
231
|
-
|
248
|
+
selectinload(
|
249
|
+
jl_arg(StepRunSchema.model_version)
|
250
|
+
).joinedload(
|
232
251
|
jl_arg(ModelVersionSchema.model), innerjoin=True
|
233
252
|
),
|
234
|
-
|
235
|
-
|
236
|
-
|
253
|
+
selectinload(jl_arg(StepRunSchema.user)),
|
254
|
+
selectinload(jl_arg(StepRunSchema.input_artifacts))
|
255
|
+
.joinedload(
|
256
|
+
jl_arg(StepRunInputArtifactSchema.artifact_version),
|
257
|
+
innerjoin=True,
|
258
|
+
)
|
259
|
+
.joinedload(
|
260
|
+
jl_arg(ArtifactVersionSchema.artifact), innerjoin=True
|
261
|
+
),
|
262
|
+
selectinload(jl_arg(StepRunSchema.output_artifacts))
|
263
|
+
.joinedload(
|
264
|
+
jl_arg(StepRunOutputArtifactSchema.artifact_version),
|
265
|
+
innerjoin=True,
|
266
|
+
)
|
267
|
+
.joinedload(
|
268
|
+
jl_arg(ArtifactVersionSchema.artifact), innerjoin=True
|
269
|
+
),
|
237
270
|
]
|
238
271
|
)
|
239
272
|
|
@@ -290,10 +323,7 @@ class StepRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
290
323
|
"""
|
291
324
|
step = None
|
292
325
|
if self.deployment is not None:
|
293
|
-
|
294
|
-
include=[self.name]
|
295
|
-
)
|
296
|
-
if step_configurations:
|
326
|
+
if self.step_configuration_schema:
|
297
327
|
pipeline_configuration = (
|
298
328
|
PipelineConfiguration.model_validate_json(
|
299
329
|
self.deployment.pipeline_configuration
|
@@ -304,7 +334,7 @@ class StepRunSchema(NamedSchema, RunMetadataInterface, table=True):
|
|
304
334
|
inplace=True,
|
305
335
|
)
|
306
336
|
step = Step.from_dict(
|
307
|
-
json.loads(
|
337
|
+
json.loads(self.step_configuration_schema.config),
|
308
338
|
pipeline_configuration=pipeline_configuration,
|
309
339
|
)
|
310
340
|
if not step and self.step_configuration:
|
@@ -29,6 +29,7 @@ from pydantic import Field, model_validator
|
|
29
29
|
from zenml.config.secrets_store_config import SecretsStoreConfiguration
|
30
30
|
from zenml.logger import get_logger
|
31
31
|
from zenml.models import (
|
32
|
+
ServiceConnectorConfiguration,
|
32
33
|
ServiceConnectorRequest,
|
33
34
|
)
|
34
35
|
from zenml.service_connectors.service_connector import ServiceConnector
|
@@ -133,7 +134,9 @@ class ServiceConnectorSecretsStore(BaseSecretsStore):
|
|
133
134
|
connector_type=self.SERVICE_CONNECTOR_TYPE,
|
134
135
|
resource_types=[self.SERVICE_CONNECTOR_RESOURCE_TYPE],
|
135
136
|
auth_method=self.config.auth_method,
|
136
|
-
configuration=
|
137
|
+
configuration=ServiceConnectorConfiguration(
|
138
|
+
**self.config.auth_config
|
139
|
+
),
|
137
140
|
)
|
138
141
|
base_connector = service_connector_registry.instantiate_connector(
|
139
142
|
model=request
|