flowyml 1.7.2__py3-none-any.whl → 1.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowyml/assets/base.py +15 -0
- flowyml/assets/metrics.py +5 -0
- flowyml/cli/main.py +709 -0
- flowyml/cli/stack_cli.py +138 -25
- flowyml/core/__init__.py +17 -0
- flowyml/core/executor.py +161 -26
- flowyml/core/image_builder.py +129 -0
- flowyml/core/log_streamer.py +227 -0
- flowyml/core/orchestrator.py +22 -2
- flowyml/core/pipeline.py +34 -10
- flowyml/core/routing.py +558 -0
- flowyml/core/step.py +9 -1
- flowyml/core/step_grouping.py +49 -35
- flowyml/core/types.py +407 -0
- flowyml/monitoring/alerts.py +10 -0
- flowyml/monitoring/notifications.py +104 -25
- flowyml/monitoring/slack_blocks.py +323 -0
- flowyml/plugins/__init__.py +251 -0
- flowyml/plugins/alerters/__init__.py +1 -0
- flowyml/plugins/alerters/slack.py +168 -0
- flowyml/plugins/base.py +752 -0
- flowyml/plugins/config.py +478 -0
- flowyml/plugins/deployers/__init__.py +22 -0
- flowyml/plugins/deployers/gcp_cloud_run.py +200 -0
- flowyml/plugins/deployers/sagemaker.py +306 -0
- flowyml/plugins/deployers/vertex.py +290 -0
- flowyml/plugins/integration.py +369 -0
- flowyml/plugins/manager.py +510 -0
- flowyml/plugins/model_registries/__init__.py +22 -0
- flowyml/plugins/model_registries/mlflow.py +159 -0
- flowyml/plugins/model_registries/sagemaker.py +489 -0
- flowyml/plugins/model_registries/vertex.py +386 -0
- flowyml/plugins/orchestrators/__init__.py +13 -0
- flowyml/plugins/orchestrators/sagemaker.py +443 -0
- flowyml/plugins/orchestrators/vertex_ai.py +461 -0
- flowyml/plugins/registries/__init__.py +13 -0
- flowyml/plugins/registries/ecr.py +321 -0
- flowyml/plugins/registries/gcr.py +313 -0
- flowyml/plugins/registry.py +454 -0
- flowyml/plugins/stack.py +494 -0
- flowyml/plugins/stack_config.py +537 -0
- flowyml/plugins/stores/__init__.py +13 -0
- flowyml/plugins/stores/gcs.py +460 -0
- flowyml/plugins/stores/s3.py +453 -0
- flowyml/plugins/trackers/__init__.py +11 -0
- flowyml/plugins/trackers/mlflow.py +316 -0
- flowyml/plugins/validators/__init__.py +3 -0
- flowyml/plugins/validators/deepchecks.py +119 -0
- flowyml/registry/__init__.py +2 -1
- flowyml/registry/model_environment.py +109 -0
- flowyml/registry/model_registry.py +241 -96
- flowyml/serving/__init__.py +17 -0
- flowyml/serving/model_server.py +628 -0
- flowyml/stacks/__init__.py +60 -0
- flowyml/stacks/aws.py +93 -0
- flowyml/stacks/base.py +62 -0
- flowyml/stacks/components.py +12 -0
- flowyml/stacks/gcp.py +44 -9
- flowyml/stacks/plugins.py +115 -0
- flowyml/stacks/registry.py +2 -1
- flowyml/storage/sql.py +401 -12
- flowyml/tracking/experiment.py +8 -5
- flowyml/ui/backend/Dockerfile +87 -16
- flowyml/ui/backend/auth.py +12 -2
- flowyml/ui/backend/main.py +149 -5
- flowyml/ui/backend/routers/ai_context.py +226 -0
- flowyml/ui/backend/routers/assets.py +23 -4
- flowyml/ui/backend/routers/auth.py +96 -0
- flowyml/ui/backend/routers/deployments.py +660 -0
- flowyml/ui/backend/routers/model_explorer.py +597 -0
- flowyml/ui/backend/routers/plugins.py +103 -51
- flowyml/ui/backend/routers/projects.py +91 -8
- flowyml/ui/backend/routers/runs.py +20 -1
- flowyml/ui/backend/routers/schedules.py +22 -17
- flowyml/ui/backend/routers/templates.py +319 -0
- flowyml/ui/backend/routers/websocket.py +2 -2
- flowyml/ui/frontend/Dockerfile +55 -6
- flowyml/ui/frontend/dist/assets/index-B5AsPTSz.css +1 -0
- flowyml/ui/frontend/dist/assets/index-dFbZ8wD8.js +753 -0
- flowyml/ui/frontend/dist/index.html +2 -2
- flowyml/ui/frontend/dist/logo.png +0 -0
- flowyml/ui/frontend/nginx.conf +65 -4
- flowyml/ui/frontend/package-lock.json +1404 -74
- flowyml/ui/frontend/package.json +3 -0
- flowyml/ui/frontend/public/logo.png +0 -0
- flowyml/ui/frontend/src/App.jsx +10 -7
- flowyml/ui/frontend/src/app/auth/Login.jsx +90 -0
- flowyml/ui/frontend/src/app/dashboard/page.jsx +8 -8
- flowyml/ui/frontend/src/app/deployments/page.jsx +786 -0
- flowyml/ui/frontend/src/app/model-explorer/page.jsx +1031 -0
- flowyml/ui/frontend/src/app/pipelines/page.jsx +12 -2
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +19 -6
- flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +36 -24
- flowyml/ui/frontend/src/app/runs/page.jsx +8 -2
- flowyml/ui/frontend/src/app/settings/page.jsx +267 -253
- flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +29 -7
- flowyml/ui/frontend/src/components/Layout.jsx +6 -0
- flowyml/ui/frontend/src/components/PipelineGraph.jsx +79 -29
- flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +36 -6
- flowyml/ui/frontend/src/components/RunMetaPanel.jsx +113 -0
- flowyml/ui/frontend/src/components/ai/AIAssistantButton.jsx +71 -0
- flowyml/ui/frontend/src/components/ai/AIAssistantPanel.jsx +420 -0
- flowyml/ui/frontend/src/components/header/Header.jsx +22 -0
- flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +4 -4
- flowyml/ui/frontend/src/components/plugins/{ZenMLIntegration.jsx → StackImport.jsx} +38 -12
- flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +36 -13
- flowyml/ui/frontend/src/contexts/AIAssistantContext.jsx +245 -0
- flowyml/ui/frontend/src/contexts/AuthContext.jsx +108 -0
- flowyml/ui/frontend/src/hooks/useAIContext.js +156 -0
- flowyml/ui/frontend/src/hooks/useWebGPU.js +54 -0
- flowyml/ui/frontend/src/layouts/MainLayout.jsx +6 -0
- flowyml/ui/frontend/src/router/index.jsx +47 -20
- flowyml/ui/frontend/src/services/pluginService.js +3 -1
- flowyml/ui/server_manager.py +5 -5
- flowyml/ui/utils.py +157 -39
- flowyml/utils/config.py +37 -15
- flowyml/utils/model_introspection.py +123 -0
- flowyml/utils/observability.py +30 -0
- flowyml-1.8.0.dist-info/METADATA +174 -0
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/RECORD +123 -65
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/WHEEL +1 -1
- flowyml/ui/frontend/dist/assets/index-B40RsQDq.css +0 -1
- flowyml/ui/frontend/dist/assets/index-CjI0zKCn.js +0 -685
- flowyml-1.7.2.dist-info/METADATA +0 -477
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/entry_points.txt +0 -0
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
"""AWS SageMaker Pipelines Orchestrator - Native FlowyML Plugin.
|
|
2
|
+
|
|
3
|
+
This is a native FlowyML implementation for AWS SageMaker Pipelines,
|
|
4
|
+
without requiring any external framework dependencies.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
from flowyml.plugins import get_plugin
|
|
8
|
+
|
|
9
|
+
orchestrator = get_plugin("sagemaker",
|
|
10
|
+
role_arn="arn:aws:iam::123456789012:role/SageMakerRole",
|
|
11
|
+
region="us-east-1"
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
# Run a pipeline
|
|
15
|
+
orchestrator.run_pipeline(my_pipeline, "training-run-001")
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import logging
|
|
19
|
+
from typing import Any
|
|
20
|
+
from datetime import datetime
|
|
21
|
+
|
|
22
|
+
from flowyml.plugins.base import OrchestratorPlugin, PluginMetadata, PluginType
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class SageMakerOrchestrator(OrchestratorPlugin):
|
|
28
|
+
"""Native AWS SageMaker Pipelines orchestrator for FlowyML.
|
|
29
|
+
|
|
30
|
+
This orchestrator integrates directly with AWS SageMaker Pipelines
|
|
31
|
+
without any intermediate framework.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
role_arn: IAM role ARN for SageMaker execution.
|
|
35
|
+
region: AWS region.
|
|
36
|
+
default_bucket: S3 bucket for pipeline artifacts.
|
|
37
|
+
pipeline_name: Default pipeline name.
|
|
38
|
+
|
|
39
|
+
Example:
|
|
40
|
+
orchestrator = SageMakerOrchestrator(
|
|
41
|
+
role_arn="arn:aws:iam::123456789012:role/SageMakerRole",
|
|
42
|
+
region="us-east-1",
|
|
43
|
+
default_bucket="my-sagemaker-bucket"
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
orchestrator.run_pipeline(pipeline_definition, "training-v1")
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
METADATA = PluginMetadata(
|
|
50
|
+
name="sagemaker",
|
|
51
|
+
description="AWS SageMaker Pipelines Orchestrator",
|
|
52
|
+
plugin_type=PluginType.ORCHESTRATOR,
|
|
53
|
+
version="1.0.0",
|
|
54
|
+
author="FlowyML",
|
|
55
|
+
packages=["sagemaker>=2.100", "boto3>=1.28"],
|
|
56
|
+
documentation_url="https://docs.aws.amazon.com/sagemaker/latest/dg/pipelines.html",
|
|
57
|
+
tags=["orchestrator", "aws", "cloud", "sagemaker"],
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
role_arn: str,
|
|
63
|
+
region: str = None,
|
|
64
|
+
default_bucket: str = None,
|
|
65
|
+
pipeline_name: str = None,
|
|
66
|
+
**kwargs,
|
|
67
|
+
):
|
|
68
|
+
"""Initialize the SageMaker orchestrator."""
|
|
69
|
+
super().__init__(
|
|
70
|
+
name=kwargs.pop("name", "sagemaker"),
|
|
71
|
+
role_arn=role_arn,
|
|
72
|
+
region=region,
|
|
73
|
+
default_bucket=default_bucket,
|
|
74
|
+
pipeline_name=pipeline_name,
|
|
75
|
+
**kwargs,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
self._role_arn = role_arn
|
|
79
|
+
self._region = region
|
|
80
|
+
self._default_bucket = default_bucket
|
|
81
|
+
self._pipeline_name = pipeline_name
|
|
82
|
+
self._session = None
|
|
83
|
+
self._sm_client = None
|
|
84
|
+
|
|
85
|
+
def initialize(self) -> None:
|
|
86
|
+
"""Initialize SageMaker session."""
|
|
87
|
+
try:
|
|
88
|
+
import sagemaker
|
|
89
|
+
import boto3
|
|
90
|
+
|
|
91
|
+
# Create session
|
|
92
|
+
boto_session = boto3.Session(region_name=self._region)
|
|
93
|
+
self._sm_client = boto_session.client("sagemaker")
|
|
94
|
+
|
|
95
|
+
self._session = sagemaker.Session(
|
|
96
|
+
boto_session=boto_session,
|
|
97
|
+
default_bucket=self._default_bucket,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
self._is_initialized = True
|
|
101
|
+
logger.info(f"SageMaker orchestrator initialized in region: {self._region}")
|
|
102
|
+
|
|
103
|
+
except ImportError:
|
|
104
|
+
raise ImportError(
|
|
105
|
+
"sagemaker is not installed. Run: flowyml plugin install sagemaker",
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
def _ensure_initialized(self) -> None:
|
|
109
|
+
"""Ensure the orchestrator is initialized."""
|
|
110
|
+
if not self._is_initialized:
|
|
111
|
+
self.initialize()
|
|
112
|
+
|
|
113
|
+
def run_pipeline(
|
|
114
|
+
self,
|
|
115
|
+
pipeline: Any,
|
|
116
|
+
run_id: str,
|
|
117
|
+
parameters: dict = None,
|
|
118
|
+
wait: bool = False,
|
|
119
|
+
**kwargs,
|
|
120
|
+
) -> Any:
|
|
121
|
+
"""Run a pipeline on SageMaker.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
pipeline: SageMaker Pipeline object or path to definition.
|
|
125
|
+
run_id: Unique identifier for this run.
|
|
126
|
+
parameters: Pipeline parameters.
|
|
127
|
+
wait: If True, wait for pipeline to complete.
|
|
128
|
+
**kwargs: Additional execution options.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
Pipeline execution ARN.
|
|
132
|
+
"""
|
|
133
|
+
self._ensure_initialized()
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
from sagemaker.workflow.pipeline import Pipeline
|
|
137
|
+
|
|
138
|
+
# Handle different pipeline types
|
|
139
|
+
if isinstance(pipeline, Pipeline):
|
|
140
|
+
sm_pipeline = pipeline
|
|
141
|
+
elif isinstance(pipeline, dict):
|
|
142
|
+
# Pipeline definition as dict
|
|
143
|
+
sm_pipeline = Pipeline(
|
|
144
|
+
name=self._pipeline_name or f"flowyml-{run_id}",
|
|
145
|
+
parameters=parameters or [],
|
|
146
|
+
steps=pipeline.get("steps", []),
|
|
147
|
+
sagemaker_session=self._session,
|
|
148
|
+
)
|
|
149
|
+
else:
|
|
150
|
+
raise ValueError(f"Unsupported pipeline type: {type(pipeline)}")
|
|
151
|
+
|
|
152
|
+
# Upsert pipeline (create or update)
|
|
153
|
+
sm_pipeline.upsert(role_arn=self._role_arn)
|
|
154
|
+
|
|
155
|
+
# Start execution
|
|
156
|
+
execution = sm_pipeline.start(
|
|
157
|
+
execution_display_name=run_id,
|
|
158
|
+
parameters=parameters or {},
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
logger.info(f"Started SageMaker pipeline execution: {execution.arn}")
|
|
162
|
+
|
|
163
|
+
if wait:
|
|
164
|
+
execution.wait()
|
|
165
|
+
logger.info(f"Pipeline execution completed: {execution.arn}")
|
|
166
|
+
|
|
167
|
+
return execution
|
|
168
|
+
|
|
169
|
+
except Exception as e:
|
|
170
|
+
logger.error(f"Failed to run pipeline: {e}")
|
|
171
|
+
raise
|
|
172
|
+
|
|
173
|
+
def get_run_status(self, run_id: str) -> dict:
|
|
174
|
+
"""Get the status of a pipeline run.
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
run_id: Pipeline execution ARN or display name.
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
Dictionary with run status information.
|
|
181
|
+
"""
|
|
182
|
+
self._ensure_initialized()
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
# Search for execution by display name or ARN
|
|
186
|
+
response = self._sm_client.list_pipeline_executions(
|
|
187
|
+
PipelineName=self._pipeline_name,
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
for execution in response.get("PipelineExecutionSummaries", []):
|
|
191
|
+
if (
|
|
192
|
+
execution.get("PipelineExecutionArn") == run_id
|
|
193
|
+
or execution.get("PipelineExecutionDisplayName") == run_id
|
|
194
|
+
):
|
|
195
|
+
return {
|
|
196
|
+
"run_id": execution.get("PipelineExecutionArn"),
|
|
197
|
+
"status": execution.get("PipelineExecutionStatus"),
|
|
198
|
+
"start_time": execution.get("StartTime"),
|
|
199
|
+
"end_time": execution.get("LastModifiedTime"),
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
return {"status": "NOT_FOUND"}
|
|
203
|
+
|
|
204
|
+
except Exception as e:
|
|
205
|
+
logger.error(f"Failed to get run status: {e}")
|
|
206
|
+
return {"status": "ERROR", "error": str(e)}
|
|
207
|
+
|
|
208
|
+
def cancel_run(self, run_id: str) -> bool:
|
|
209
|
+
"""Cancel a running pipeline execution.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
run_id: Pipeline execution ARN.
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
True if cancellation was successful.
|
|
216
|
+
"""
|
|
217
|
+
self._ensure_initialized()
|
|
218
|
+
|
|
219
|
+
try:
|
|
220
|
+
self._sm_client.stop_pipeline_execution(
|
|
221
|
+
PipelineExecutionArn=run_id,
|
|
222
|
+
ClientRequestToken=f"cancel-{datetime.now().timestamp()}",
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
logger.info(f"Cancelled pipeline execution: {run_id}")
|
|
226
|
+
return True
|
|
227
|
+
|
|
228
|
+
except Exception as e:
|
|
229
|
+
logger.error(f"Failed to cancel run: {e}")
|
|
230
|
+
return False
|
|
231
|
+
|
|
232
|
+
def list_runs(
|
|
233
|
+
self,
|
|
234
|
+
pipeline_name: str = None,
|
|
235
|
+
max_results: int = 100,
|
|
236
|
+
status: str = None,
|
|
237
|
+
) -> list[dict]:
|
|
238
|
+
"""List pipeline executions.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
pipeline_name: Filter by pipeline name.
|
|
242
|
+
max_results: Maximum number of results.
|
|
243
|
+
status: Filter by status.
|
|
244
|
+
|
|
245
|
+
Returns:
|
|
246
|
+
List of execution summaries.
|
|
247
|
+
"""
|
|
248
|
+
self._ensure_initialized()
|
|
249
|
+
|
|
250
|
+
pipeline = pipeline_name or self._pipeline_name
|
|
251
|
+
|
|
252
|
+
try:
|
|
253
|
+
kwargs = {
|
|
254
|
+
"PipelineName": pipeline,
|
|
255
|
+
"MaxResults": min(max_results, 100),
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
if status:
|
|
259
|
+
kwargs["SortBy"] = "CreationTime"
|
|
260
|
+
|
|
261
|
+
response = self._sm_client.list_pipeline_executions(**kwargs)
|
|
262
|
+
|
|
263
|
+
runs = []
|
|
264
|
+
for execution in response.get("PipelineExecutionSummaries", []):
|
|
265
|
+
runs.append(
|
|
266
|
+
{
|
|
267
|
+
"run_id": execution.get("PipelineExecutionArn"),
|
|
268
|
+
"display_name": execution.get("PipelineExecutionDisplayName"),
|
|
269
|
+
"status": execution.get("PipelineExecutionStatus"),
|
|
270
|
+
"start_time": str(execution.get("StartTime", "")),
|
|
271
|
+
},
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
return runs
|
|
275
|
+
|
|
276
|
+
except Exception as e:
|
|
277
|
+
logger.error(f"Failed to list runs: {e}")
|
|
278
|
+
return []
|
|
279
|
+
|
|
280
|
+
def get_logs(self, run_id: str, step_name: str = None) -> str:
|
|
281
|
+
"""Get logs for a pipeline execution.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
run_id: Pipeline execution ARN.
|
|
285
|
+
step_name: Optional specific step name.
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
Log content as string.
|
|
289
|
+
"""
|
|
290
|
+
self._ensure_initialized()
|
|
291
|
+
|
|
292
|
+
try:
|
|
293
|
+
# Get pipeline execution steps
|
|
294
|
+
response = self._sm_client.list_pipeline_execution_steps(
|
|
295
|
+
PipelineExecutionArn=run_id,
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
logs = []
|
|
299
|
+
for step in response.get("PipelineExecutionSteps", []):
|
|
300
|
+
if step_name and step.get("StepName") != step_name:
|
|
301
|
+
continue
|
|
302
|
+
|
|
303
|
+
logs.append(f"=== Step: {step.get('StepName')} ===")
|
|
304
|
+
logs.append(f"Status: {step.get('StepStatus')}")
|
|
305
|
+
|
|
306
|
+
if step.get("FailureReason"):
|
|
307
|
+
logs.append(f"Failure: {step.get('FailureReason')}")
|
|
308
|
+
|
|
309
|
+
logs.append("")
|
|
310
|
+
|
|
311
|
+
return "\n".join(logs)
|
|
312
|
+
|
|
313
|
+
except Exception as e:
|
|
314
|
+
logger.error(f"Failed to get logs: {e}")
|
|
315
|
+
return f"Error getting logs: {e}"
|
|
316
|
+
|
|
317
|
+
def run_with_routing(
|
|
318
|
+
self,
|
|
319
|
+
pipeline: Any,
|
|
320
|
+
run_id: str,
|
|
321
|
+
stack_name: str = None,
|
|
322
|
+
parameters: dict = None,
|
|
323
|
+
wait: bool = False,
|
|
324
|
+
**kwargs,
|
|
325
|
+
) -> Any:
|
|
326
|
+
"""Run a pipeline with type-based artifact routing.
|
|
327
|
+
|
|
328
|
+
This method integrates with FlowyML's type-based routing system,
|
|
329
|
+
ensuring that Model, Dataset, Metrics, and Parameters artifacts
|
|
330
|
+
are automatically routed to the configured infrastructure.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
pipeline: The pipeline to run.
|
|
334
|
+
run_id: Unique identifier for this run.
|
|
335
|
+
stack_name: Stack to use for routing (uses active stack if None).
|
|
336
|
+
parameters: Pipeline parameters.
|
|
337
|
+
wait: If True, wait for pipeline to complete.
|
|
338
|
+
**kwargs: Additional arguments.
|
|
339
|
+
|
|
340
|
+
Returns:
|
|
341
|
+
Pipeline execution with routing metadata.
|
|
342
|
+
"""
|
|
343
|
+
self._ensure_initialized()
|
|
344
|
+
|
|
345
|
+
# Get routing configuration
|
|
346
|
+
routing_config = self._get_routing_config(stack_name)
|
|
347
|
+
|
|
348
|
+
# Inject routing parameters
|
|
349
|
+
enriched_params = parameters or {}
|
|
350
|
+
enriched_params["__run_id__"] = run_id
|
|
351
|
+
enriched_params["__routing_config__"] = routing_config
|
|
352
|
+
|
|
353
|
+
# Run the pipeline
|
|
354
|
+
execution = self.run_pipeline(
|
|
355
|
+
pipeline=pipeline,
|
|
356
|
+
run_id=run_id,
|
|
357
|
+
parameters=enriched_params,
|
|
358
|
+
wait=wait,
|
|
359
|
+
**kwargs,
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
logger.info(f"Started type-aware SageMaker pipeline: {run_id}")
|
|
363
|
+
logger.info(f"Routing config: stack={stack_name or 'active'}")
|
|
364
|
+
|
|
365
|
+
return execution
|
|
366
|
+
|
|
367
|
+
def _get_routing_config(self, stack_name: str = None) -> dict:
|
|
368
|
+
"""Get routing configuration for a stack.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
371
|
+
stack_name: Stack name (uses active stack if None).
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
Dictionary of routing rules.
|
|
375
|
+
"""
|
|
376
|
+
try:
|
|
377
|
+
from flowyml.plugins.stack_config import get_stack_manager
|
|
378
|
+
|
|
379
|
+
manager = get_stack_manager()
|
|
380
|
+
stack = manager.get_stack(stack_name) if stack_name else manager.get_active_stack()
|
|
381
|
+
|
|
382
|
+
if stack and stack.artifact_routing:
|
|
383
|
+
return {
|
|
384
|
+
"Model": stack.artifact_routing.model.to_dict() if stack.artifact_routing.model else {},
|
|
385
|
+
"Dataset": stack.artifact_routing.dataset.to_dict() if stack.artifact_routing.dataset else {},
|
|
386
|
+
"Metrics": stack.artifact_routing.metrics.to_dict() if stack.artifact_routing.metrics else {},
|
|
387
|
+
"Parameters": stack.artifact_routing.parameters.to_dict()
|
|
388
|
+
if stack.artifact_routing.parameters
|
|
389
|
+
else {},
|
|
390
|
+
}
|
|
391
|
+
except ImportError:
|
|
392
|
+
logger.debug("Stack config not available for routing")
|
|
393
|
+
except Exception as e:
|
|
394
|
+
logger.warning(f"Failed to get routing config: {e}")
|
|
395
|
+
|
|
396
|
+
return {}
|
|
397
|
+
|
|
398
|
+
def configure_model_deployment(
|
|
399
|
+
self,
|
|
400
|
+
model_uri: str,
|
|
401
|
+
endpoint_name: str,
|
|
402
|
+
instance_type: str = "ml.m5.large",
|
|
403
|
+
initial_instance_count: int = 1,
|
|
404
|
+
) -> str:
|
|
405
|
+
"""Deploy a model to a SageMaker endpoint.
|
|
406
|
+
|
|
407
|
+
This method can be used after pipeline completion to deploy
|
|
408
|
+
registered models to serving endpoints.
|
|
409
|
+
|
|
410
|
+
Args:
|
|
411
|
+
model_uri: S3 URI to the model artifact.
|
|
412
|
+
endpoint_name: Name for the endpoint.
|
|
413
|
+
instance_type: Instance type for the endpoint.
|
|
414
|
+
initial_instance_count: Number of instances.
|
|
415
|
+
|
|
416
|
+
Returns:
|
|
417
|
+
Endpoint name.
|
|
418
|
+
"""
|
|
419
|
+
self._ensure_initialized()
|
|
420
|
+
|
|
421
|
+
try:
|
|
422
|
+
from sagemaker.model import Model
|
|
423
|
+
|
|
424
|
+
# Create model
|
|
425
|
+
model = Model(
|
|
426
|
+
model_data=model_uri,
|
|
427
|
+
role=self._role_arn,
|
|
428
|
+
sagemaker_session=self._session,
|
|
429
|
+
)
|
|
430
|
+
|
|
431
|
+
# Deploy to endpoint
|
|
432
|
+
model.deploy(
|
|
433
|
+
endpoint_name=endpoint_name,
|
|
434
|
+
instance_type=instance_type,
|
|
435
|
+
initial_instance_count=initial_instance_count,
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
logger.info(f"Model deployed to endpoint: {endpoint_name}")
|
|
439
|
+
return endpoint_name
|
|
440
|
+
|
|
441
|
+
except Exception as e:
|
|
442
|
+
logger.error(f"Failed to deploy model: {e}")
|
|
443
|
+
raise
|