flowyml 1.7.1__py3-none-any.whl → 1.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowyml/assets/base.py +15 -0
- flowyml/assets/dataset.py +570 -17
- flowyml/assets/metrics.py +5 -0
- flowyml/assets/model.py +1052 -15
- flowyml/cli/main.py +709 -0
- flowyml/cli/stack_cli.py +138 -25
- flowyml/core/__init__.py +17 -0
- flowyml/core/executor.py +231 -37
- flowyml/core/image_builder.py +129 -0
- flowyml/core/log_streamer.py +227 -0
- flowyml/core/orchestrator.py +59 -4
- flowyml/core/pipeline.py +65 -13
- flowyml/core/routing.py +558 -0
- flowyml/core/scheduler.py +88 -5
- flowyml/core/step.py +9 -1
- flowyml/core/step_grouping.py +49 -35
- flowyml/core/types.py +407 -0
- flowyml/integrations/keras.py +247 -82
- flowyml/monitoring/alerts.py +10 -0
- flowyml/monitoring/notifications.py +104 -25
- flowyml/monitoring/slack_blocks.py +323 -0
- flowyml/plugins/__init__.py +251 -0
- flowyml/plugins/alerters/__init__.py +1 -0
- flowyml/plugins/alerters/slack.py +168 -0
- flowyml/plugins/base.py +752 -0
- flowyml/plugins/config.py +478 -0
- flowyml/plugins/deployers/__init__.py +22 -0
- flowyml/plugins/deployers/gcp_cloud_run.py +200 -0
- flowyml/plugins/deployers/sagemaker.py +306 -0
- flowyml/plugins/deployers/vertex.py +290 -0
- flowyml/plugins/integration.py +369 -0
- flowyml/plugins/manager.py +510 -0
- flowyml/plugins/model_registries/__init__.py +22 -0
- flowyml/plugins/model_registries/mlflow.py +159 -0
- flowyml/plugins/model_registries/sagemaker.py +489 -0
- flowyml/plugins/model_registries/vertex.py +386 -0
- flowyml/plugins/orchestrators/__init__.py +13 -0
- flowyml/plugins/orchestrators/sagemaker.py +443 -0
- flowyml/plugins/orchestrators/vertex_ai.py +461 -0
- flowyml/plugins/registries/__init__.py +13 -0
- flowyml/plugins/registries/ecr.py +321 -0
- flowyml/plugins/registries/gcr.py +313 -0
- flowyml/plugins/registry.py +454 -0
- flowyml/plugins/stack.py +494 -0
- flowyml/plugins/stack_config.py +537 -0
- flowyml/plugins/stores/__init__.py +13 -0
- flowyml/plugins/stores/gcs.py +460 -0
- flowyml/plugins/stores/s3.py +453 -0
- flowyml/plugins/trackers/__init__.py +11 -0
- flowyml/plugins/trackers/mlflow.py +316 -0
- flowyml/plugins/validators/__init__.py +3 -0
- flowyml/plugins/validators/deepchecks.py +119 -0
- flowyml/registry/__init__.py +2 -1
- flowyml/registry/model_environment.py +109 -0
- flowyml/registry/model_registry.py +241 -96
- flowyml/serving/__init__.py +17 -0
- flowyml/serving/model_server.py +628 -0
- flowyml/stacks/__init__.py +60 -0
- flowyml/stacks/aws.py +93 -0
- flowyml/stacks/base.py +62 -0
- flowyml/stacks/components.py +12 -0
- flowyml/stacks/gcp.py +44 -9
- flowyml/stacks/plugins.py +115 -0
- flowyml/stacks/registry.py +2 -1
- flowyml/storage/sql.py +401 -12
- flowyml/tracking/experiment.py +8 -5
- flowyml/ui/backend/Dockerfile +87 -16
- flowyml/ui/backend/auth.py +12 -2
- flowyml/ui/backend/main.py +149 -5
- flowyml/ui/backend/routers/ai_context.py +226 -0
- flowyml/ui/backend/routers/assets.py +23 -4
- flowyml/ui/backend/routers/auth.py +96 -0
- flowyml/ui/backend/routers/deployments.py +660 -0
- flowyml/ui/backend/routers/model_explorer.py +597 -0
- flowyml/ui/backend/routers/plugins.py +103 -51
- flowyml/ui/backend/routers/projects.py +91 -8
- flowyml/ui/backend/routers/runs.py +132 -1
- flowyml/ui/backend/routers/schedules.py +54 -29
- flowyml/ui/backend/routers/templates.py +319 -0
- flowyml/ui/backend/routers/websocket.py +2 -2
- flowyml/ui/frontend/Dockerfile +55 -6
- flowyml/ui/frontend/dist/assets/index-B5AsPTSz.css +1 -0
- flowyml/ui/frontend/dist/assets/index-dFbZ8wD8.js +753 -0
- flowyml/ui/frontend/dist/index.html +2 -2
- flowyml/ui/frontend/dist/logo.png +0 -0
- flowyml/ui/frontend/nginx.conf +65 -4
- flowyml/ui/frontend/package-lock.json +1415 -74
- flowyml/ui/frontend/package.json +4 -0
- flowyml/ui/frontend/public/logo.png +0 -0
- flowyml/ui/frontend/src/App.jsx +10 -7
- flowyml/ui/frontend/src/app/assets/page.jsx +890 -321
- flowyml/ui/frontend/src/app/auth/Login.jsx +90 -0
- flowyml/ui/frontend/src/app/dashboard/page.jsx +8 -8
- flowyml/ui/frontend/src/app/deployments/page.jsx +786 -0
- flowyml/ui/frontend/src/app/model-explorer/page.jsx +1031 -0
- flowyml/ui/frontend/src/app/pipelines/page.jsx +12 -2
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +19 -6
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectMetricsPanel.jsx +1 -1
- flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +601 -101
- flowyml/ui/frontend/src/app/runs/page.jsx +8 -2
- flowyml/ui/frontend/src/app/settings/page.jsx +267 -253
- flowyml/ui/frontend/src/components/ArtifactViewer.jsx +62 -2
- flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +424 -29
- flowyml/ui/frontend/src/components/AssetTreeHierarchy.jsx +119 -11
- flowyml/ui/frontend/src/components/DatasetViewer.jsx +753 -0
- flowyml/ui/frontend/src/components/Layout.jsx +6 -0
- flowyml/ui/frontend/src/components/PipelineGraph.jsx +79 -29
- flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +36 -6
- flowyml/ui/frontend/src/components/RunMetaPanel.jsx +113 -0
- flowyml/ui/frontend/src/components/TrainingHistoryChart.jsx +514 -0
- flowyml/ui/frontend/src/components/TrainingMetricsPanel.jsx +175 -0
- flowyml/ui/frontend/src/components/ai/AIAssistantButton.jsx +71 -0
- flowyml/ui/frontend/src/components/ai/AIAssistantPanel.jsx +420 -0
- flowyml/ui/frontend/src/components/header/Header.jsx +22 -0
- flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +4 -4
- flowyml/ui/frontend/src/components/plugins/{ZenMLIntegration.jsx → StackImport.jsx} +38 -12
- flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +36 -13
- flowyml/ui/frontend/src/contexts/AIAssistantContext.jsx +245 -0
- flowyml/ui/frontend/src/contexts/AuthContext.jsx +108 -0
- flowyml/ui/frontend/src/hooks/useAIContext.js +156 -0
- flowyml/ui/frontend/src/hooks/useWebGPU.js +54 -0
- flowyml/ui/frontend/src/layouts/MainLayout.jsx +6 -0
- flowyml/ui/frontend/src/router/index.jsx +47 -20
- flowyml/ui/frontend/src/services/pluginService.js +3 -1
- flowyml/ui/server_manager.py +5 -5
- flowyml/ui/utils.py +157 -39
- flowyml/utils/config.py +37 -15
- flowyml/utils/model_introspection.py +123 -0
- flowyml/utils/observability.py +30 -0
- flowyml-1.8.0.dist-info/METADATA +174 -0
- {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/RECORD +134 -73
- {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/WHEEL +1 -1
- flowyml/ui/frontend/dist/assets/index-BqDQvp63.js +0 -630
- flowyml/ui/frontend/dist/assets/index-By4trVyv.css +0 -1
- flowyml-1.7.1.dist-info/METADATA +0 -477
- {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/entry_points.txt +0 -0
- {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
"""Vertex AI Model Registry - Native FlowyML Plugin.
|
|
2
|
+
|
|
3
|
+
This plugin provides direct integration with Google Cloud Vertex AI
|
|
4
|
+
Model Registry for model versioning, cataloging, and deployment.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
from flowyml.plugins import get_plugin
|
|
8
|
+
|
|
9
|
+
registry = get_plugin("vertex_model_registry", project="my-project")
|
|
10
|
+
registry.register_model(
|
|
11
|
+
name="my-model",
|
|
12
|
+
model_uri="gs://bucket/model/",
|
|
13
|
+
version="1.0.0",
|
|
14
|
+
metadata={"framework": "tensorflow", "accuracy": 0.95}
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
# Deploy to endpoint
|
|
18
|
+
endpoint = registry.deploy_model(
|
|
19
|
+
model_name="my-model",
|
|
20
|
+
endpoint_name="my-endpoint",
|
|
21
|
+
machine_type="n1-standard-4"
|
|
22
|
+
)
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
import logging
|
|
26
|
+
from typing import Any
|
|
27
|
+
from datetime import datetime
|
|
28
|
+
|
|
29
|
+
from flowyml.plugins.base import ModelRegistryPlugin, PluginMetadata, PluginType
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class VertexModelRegistry(ModelRegistryPlugin):
|
|
35
|
+
"""Native Vertex AI Model Registry plugin for FlowyML.
|
|
36
|
+
|
|
37
|
+
This plugin integrates directly with Vertex AI Model Registry
|
|
38
|
+
for registering, versioning, and deploying ML models.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
project: GCP project ID.
|
|
42
|
+
location: GCP region (default: us-central1).
|
|
43
|
+
staging_bucket: GCS bucket for model staging.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
metadata = PluginMetadata(
|
|
47
|
+
name="vertex_model_registry",
|
|
48
|
+
version="1.0.0",
|
|
49
|
+
description="Google Cloud Vertex AI Model Registry",
|
|
50
|
+
author="FlowyML Team",
|
|
51
|
+
plugin_type=PluginType.CUSTOM,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
project: str,
|
|
57
|
+
location: str = "us-central1",
|
|
58
|
+
staging_bucket: str = None,
|
|
59
|
+
labels: dict[str, str] = None,
|
|
60
|
+
**kwargs,
|
|
61
|
+
):
|
|
62
|
+
"""Initialize the Vertex AI Model Registry plugin.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
project: GCP project ID.
|
|
66
|
+
location: GCP region.
|
|
67
|
+
staging_bucket: GCS bucket for model artifacts.
|
|
68
|
+
labels: Default labels to apply to models.
|
|
69
|
+
**kwargs: Additional plugin arguments.
|
|
70
|
+
"""
|
|
71
|
+
super().__init__(**kwargs)
|
|
72
|
+
self.project = project
|
|
73
|
+
self.location = location
|
|
74
|
+
self.staging_bucket = staging_bucket
|
|
75
|
+
self.labels = labels or {}
|
|
76
|
+
self._aiplatform = None
|
|
77
|
+
self._initialized = False
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def plugin_type(self) -> PluginType:
|
|
81
|
+
return PluginType.CUSTOM
|
|
82
|
+
|
|
83
|
+
def initialize(self) -> None:
|
|
84
|
+
"""Initialize connection to Vertex AI."""
|
|
85
|
+
if self._initialized:
|
|
86
|
+
return
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
from google.cloud import aiplatform
|
|
90
|
+
|
|
91
|
+
aiplatform.init(
|
|
92
|
+
project=self.project,
|
|
93
|
+
location=self.location,
|
|
94
|
+
staging_bucket=self.staging_bucket,
|
|
95
|
+
)
|
|
96
|
+
self._aiplatform = aiplatform
|
|
97
|
+
self._initialized = True
|
|
98
|
+
logger.info(f"Vertex AI Model Registry initialized for project {self.project}")
|
|
99
|
+
except ImportError:
|
|
100
|
+
raise ImportError(
|
|
101
|
+
"google-cloud-aiplatform is required. " "Install with: pip install google-cloud-aiplatform",
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
def _ensure_initialized(self) -> None:
|
|
105
|
+
"""Ensure Vertex AI is initialized."""
|
|
106
|
+
if not self._initialized:
|
|
107
|
+
self.initialize()
|
|
108
|
+
|
|
109
|
+
def register_model(
|
|
110
|
+
self,
|
|
111
|
+
name: str,
|
|
112
|
+
model_uri: str,
|
|
113
|
+
version: str = None,
|
|
114
|
+
metadata: dict = None,
|
|
115
|
+
serving_container_image_uri: str = None,
|
|
116
|
+
description: str = None,
|
|
117
|
+
labels: dict[str, str] = None,
|
|
118
|
+
**kwargs,
|
|
119
|
+
) -> str:
|
|
120
|
+
"""Register a model in Vertex AI Model Registry.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
name: Display name for the model.
|
|
124
|
+
model_uri: GCS URI to model artifacts.
|
|
125
|
+
version: Model version string.
|
|
126
|
+
metadata: Model metadata dictionary.
|
|
127
|
+
serving_container_image_uri: Docker image for serving.
|
|
128
|
+
description: Model description.
|
|
129
|
+
labels: Labels to attach to the model.
|
|
130
|
+
**kwargs: Additional registration arguments.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
Model resource name.
|
|
134
|
+
"""
|
|
135
|
+
self._ensure_initialized()
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
all_labels = {**self.labels, **(labels or {})}
|
|
139
|
+
if version:
|
|
140
|
+
all_labels["version"] = version
|
|
141
|
+
|
|
142
|
+
# Determine serving container if not provided
|
|
143
|
+
if not serving_container_image_uri:
|
|
144
|
+
# Auto-detect based on metadata
|
|
145
|
+
framework = (metadata or {}).get("framework", "").lower()
|
|
146
|
+
if framework == "tensorflow":
|
|
147
|
+
serving_container_image_uri = "us-docker.pkg.dev/vertex-ai/prediction/" "tf2-cpu.2-13:latest"
|
|
148
|
+
elif framework == "pytorch":
|
|
149
|
+
serving_container_image_uri = "us-docker.pkg.dev/vertex-ai/prediction/" "pytorch-cpu.2-0:latest"
|
|
150
|
+
elif framework == "sklearn" or framework == "scikit-learn":
|
|
151
|
+
serving_container_image_uri = "us-docker.pkg.dev/vertex-ai/prediction/" "sklearn-cpu.1-3:latest"
|
|
152
|
+
elif framework == "xgboost":
|
|
153
|
+
serving_container_image_uri = "us-docker.pkg.dev/vertex-ai/prediction/" "xgboost-cpu.1-7:latest"
|
|
154
|
+
else:
|
|
155
|
+
# Default to custom container
|
|
156
|
+
serving_container_image_uri = "us-docker.pkg.dev/vertex-ai/prediction/" "sklearn-cpu.1-3:latest"
|
|
157
|
+
|
|
158
|
+
model = self._aiplatform.Model.upload(
|
|
159
|
+
display_name=name,
|
|
160
|
+
artifact_uri=model_uri,
|
|
161
|
+
serving_container_image_uri=serving_container_image_uri,
|
|
162
|
+
description=description or f"Model registered via FlowyML at {datetime.now().isoformat()}",
|
|
163
|
+
labels=all_labels,
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
logger.info(f"Registered model '{name}' with resource: {model.resource_name}")
|
|
167
|
+
return model.resource_name
|
|
168
|
+
|
|
169
|
+
except Exception as e:
|
|
170
|
+
logger.error(f"Failed to register model '{name}': {e}")
|
|
171
|
+
raise
|
|
172
|
+
|
|
173
|
+
def get_model(
|
|
174
|
+
self,
|
|
175
|
+
name: str,
|
|
176
|
+
version: str = None,
|
|
177
|
+
) -> Any:
|
|
178
|
+
"""Get a model from the registry.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
name: Model display name or resource name.
|
|
182
|
+
version: Specific version to retrieve.
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
Model object from Vertex AI.
|
|
186
|
+
"""
|
|
187
|
+
self._ensure_initialized()
|
|
188
|
+
|
|
189
|
+
try:
|
|
190
|
+
# If it's a resource name, get directly
|
|
191
|
+
if name.startswith("projects/"):
|
|
192
|
+
return self._aiplatform.Model(model_name=name)
|
|
193
|
+
|
|
194
|
+
# Otherwise, search by display name
|
|
195
|
+
models = self._aiplatform.Model.list(
|
|
196
|
+
filter=f'display_name="{name}"',
|
|
197
|
+
order_by="create_time desc",
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
if not models:
|
|
201
|
+
logger.warning(f"Model '{name}' not found")
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
if version:
|
|
205
|
+
# Find specific version
|
|
206
|
+
for model in models:
|
|
207
|
+
if model.labels.get("version") == version:
|
|
208
|
+
return model
|
|
209
|
+
logger.warning(f"Model '{name}' version '{version}' not found")
|
|
210
|
+
return None
|
|
211
|
+
|
|
212
|
+
# Return latest
|
|
213
|
+
return models[0]
|
|
214
|
+
|
|
215
|
+
except Exception as e:
|
|
216
|
+
logger.error(f"Failed to get model '{name}': {e}")
|
|
217
|
+
raise
|
|
218
|
+
|
|
219
|
+
def list_models(
|
|
220
|
+
self,
|
|
221
|
+
filter_expr: str = None,
|
|
222
|
+
limit: int = 100,
|
|
223
|
+
) -> list[dict]:
|
|
224
|
+
"""List models in the registry.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
filter_expr: Filter expression for listing.
|
|
228
|
+
limit: Maximum number of models to return.
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
List of model dictionaries.
|
|
232
|
+
"""
|
|
233
|
+
self._ensure_initialized()
|
|
234
|
+
|
|
235
|
+
try:
|
|
236
|
+
models = self._aiplatform.Model.list(
|
|
237
|
+
filter=filter_expr,
|
|
238
|
+
order_by="create_time desc",
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
result = []
|
|
242
|
+
for model in models[:limit]:
|
|
243
|
+
result.append(
|
|
244
|
+
{
|
|
245
|
+
"name": model.display_name,
|
|
246
|
+
"resource_name": model.resource_name,
|
|
247
|
+
"created": str(model.create_time),
|
|
248
|
+
"labels": model.labels,
|
|
249
|
+
"description": model.description,
|
|
250
|
+
},
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
return result
|
|
254
|
+
|
|
255
|
+
except Exception as e:
|
|
256
|
+
logger.error(f"Failed to list models: {e}")
|
|
257
|
+
raise
|
|
258
|
+
|
|
259
|
+
def transition_model_stage(
|
|
260
|
+
self,
|
|
261
|
+
name: str,
|
|
262
|
+
stage: str,
|
|
263
|
+
version: str = None,
|
|
264
|
+
) -> bool:
|
|
265
|
+
"""Transition model to a stage (via labels).
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
name: Model name.
|
|
269
|
+
stage: Target stage (e.g., "staging", "production").
|
|
270
|
+
version: Specific version.
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
True if successful.
|
|
274
|
+
"""
|
|
275
|
+
self._ensure_initialized()
|
|
276
|
+
|
|
277
|
+
try:
|
|
278
|
+
model = self.get_model(name, version)
|
|
279
|
+
if not model:
|
|
280
|
+
return False
|
|
281
|
+
|
|
282
|
+
# Update labels to reflect stage
|
|
283
|
+
labels = dict(model.labels)
|
|
284
|
+
labels["stage"] = stage
|
|
285
|
+
model.update(labels=labels)
|
|
286
|
+
|
|
287
|
+
logger.info(f"Transitioned model '{name}' to stage '{stage}'")
|
|
288
|
+
return True
|
|
289
|
+
|
|
290
|
+
except Exception as e:
|
|
291
|
+
logger.error(f"Failed to transition model: {e}")
|
|
292
|
+
return False
|
|
293
|
+
|
|
294
|
+
def deploy_model(
|
|
295
|
+
self,
|
|
296
|
+
model_name: str,
|
|
297
|
+
endpoint_name: str,
|
|
298
|
+
machine_type: str = "n1-standard-4",
|
|
299
|
+
min_replica_count: int = 1,
|
|
300
|
+
max_replica_count: int = 1,
|
|
301
|
+
**kwargs,
|
|
302
|
+
) -> str:
|
|
303
|
+
"""Deploy a model to a Vertex AI endpoint.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
model_name: Model display name or resource name.
|
|
307
|
+
endpoint_name: Name for the endpoint.
|
|
308
|
+
machine_type: Machine type for deployment.
|
|
309
|
+
min_replica_count: Minimum replicas.
|
|
310
|
+
max_replica_count: Maximum replicas.
|
|
311
|
+
**kwargs: Additional deployment arguments.
|
|
312
|
+
|
|
313
|
+
Returns:
|
|
314
|
+
Endpoint resource name.
|
|
315
|
+
"""
|
|
316
|
+
self._ensure_initialized()
|
|
317
|
+
|
|
318
|
+
try:
|
|
319
|
+
# Get the model
|
|
320
|
+
model = self.get_model(model_name)
|
|
321
|
+
if not model:
|
|
322
|
+
raise ValueError(f"Model '{model_name}' not found")
|
|
323
|
+
|
|
324
|
+
# Create or get endpoint
|
|
325
|
+
endpoints = self._aiplatform.Endpoint.list(
|
|
326
|
+
filter=f'display_name="{endpoint_name}"',
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
if endpoints:
|
|
330
|
+
endpoint = endpoints[0]
|
|
331
|
+
logger.info(f"Using existing endpoint: {endpoint_name}")
|
|
332
|
+
else:
|
|
333
|
+
endpoint = self._aiplatform.Endpoint.create(
|
|
334
|
+
display_name=endpoint_name,
|
|
335
|
+
)
|
|
336
|
+
logger.info(f"Created endpoint: {endpoint_name}")
|
|
337
|
+
|
|
338
|
+
# Deploy model to endpoint
|
|
339
|
+
model.deploy(
|
|
340
|
+
endpoint=endpoint,
|
|
341
|
+
machine_type=machine_type,
|
|
342
|
+
min_replica_count=min_replica_count,
|
|
343
|
+
max_replica_count=max_replica_count,
|
|
344
|
+
**kwargs,
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
logger.info(f"Deployed model '{model_name}' to endpoint '{endpoint_name}'")
|
|
348
|
+
return endpoint.resource_name
|
|
349
|
+
|
|
350
|
+
except Exception as e:
|
|
351
|
+
logger.error(f"Failed to deploy model: {e}")
|
|
352
|
+
raise
|
|
353
|
+
|
|
354
|
+
def predict(
|
|
355
|
+
self,
|
|
356
|
+
endpoint_name: str,
|
|
357
|
+
instances: list[dict],
|
|
358
|
+
) -> list[dict]:
|
|
359
|
+
"""Make predictions using a deployed model.
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
endpoint_name: Endpoint display name or resource name.
|
|
363
|
+
instances: List of input instances.
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
List of predictions.
|
|
367
|
+
"""
|
|
368
|
+
self._ensure_initialized()
|
|
369
|
+
|
|
370
|
+
try:
|
|
371
|
+
if endpoint_name.startswith("projects/"):
|
|
372
|
+
endpoint = self._aiplatform.Endpoint(endpoint_name=endpoint_name)
|
|
373
|
+
else:
|
|
374
|
+
endpoints = self._aiplatform.Endpoint.list(
|
|
375
|
+
filter=f'display_name="{endpoint_name}"',
|
|
376
|
+
)
|
|
377
|
+
if not endpoints:
|
|
378
|
+
raise ValueError(f"Endpoint '{endpoint_name}' not found")
|
|
379
|
+
endpoint = endpoints[0]
|
|
380
|
+
|
|
381
|
+
predictions = endpoint.predict(instances=instances)
|
|
382
|
+
return predictions.predictions
|
|
383
|
+
|
|
384
|
+
except Exception as e:
|
|
385
|
+
logger.error(f"Failed to make prediction: {e}")
|
|
386
|
+
raise
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""FlowyML Orchestrator Plugins."""
|
|
2
|
+
|
|
3
|
+
try:
|
|
4
|
+
from flowyml.plugins.orchestrators.vertex_ai import VertexAIOrchestrator
|
|
5
|
+
except ImportError:
|
|
6
|
+
VertexAIOrchestrator = None
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
from flowyml.plugins.orchestrators.sagemaker import SageMakerOrchestrator
|
|
10
|
+
except ImportError:
|
|
11
|
+
SageMakerOrchestrator = None
|
|
12
|
+
|
|
13
|
+
__all__ = ["VertexAIOrchestrator", "SageMakerOrchestrator"]
|