datarobot-genai 0.2.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datarobot_genai/__init__.py +19 -0
- datarobot_genai/core/__init__.py +0 -0
- datarobot_genai/core/agents/__init__.py +43 -0
- datarobot_genai/core/agents/base.py +195 -0
- datarobot_genai/core/chat/__init__.py +19 -0
- datarobot_genai/core/chat/auth.py +146 -0
- datarobot_genai/core/chat/client.py +178 -0
- datarobot_genai/core/chat/responses.py +297 -0
- datarobot_genai/core/cli/__init__.py +18 -0
- datarobot_genai/core/cli/agent_environment.py +47 -0
- datarobot_genai/core/cli/agent_kernel.py +211 -0
- datarobot_genai/core/custom_model.py +141 -0
- datarobot_genai/core/mcp/__init__.py +0 -0
- datarobot_genai/core/mcp/common.py +218 -0
- datarobot_genai/core/telemetry_agent.py +126 -0
- datarobot_genai/core/utils/__init__.py +3 -0
- datarobot_genai/core/utils/auth.py +234 -0
- datarobot_genai/core/utils/urls.py +64 -0
- datarobot_genai/crewai/__init__.py +24 -0
- datarobot_genai/crewai/agent.py +42 -0
- datarobot_genai/crewai/base.py +159 -0
- datarobot_genai/crewai/events.py +117 -0
- datarobot_genai/crewai/mcp.py +59 -0
- datarobot_genai/drmcp/__init__.py +78 -0
- datarobot_genai/drmcp/core/__init__.py +13 -0
- datarobot_genai/drmcp/core/auth.py +165 -0
- datarobot_genai/drmcp/core/clients.py +180 -0
- datarobot_genai/drmcp/core/config.py +364 -0
- datarobot_genai/drmcp/core/config_utils.py +174 -0
- datarobot_genai/drmcp/core/constants.py +18 -0
- datarobot_genai/drmcp/core/credentials.py +190 -0
- datarobot_genai/drmcp/core/dr_mcp_server.py +350 -0
- datarobot_genai/drmcp/core/dr_mcp_server_logo.py +136 -0
- datarobot_genai/drmcp/core/dynamic_prompts/__init__.py +13 -0
- datarobot_genai/drmcp/core/dynamic_prompts/controllers.py +130 -0
- datarobot_genai/drmcp/core/dynamic_prompts/dr_lib.py +70 -0
- datarobot_genai/drmcp/core/dynamic_prompts/register.py +205 -0
- datarobot_genai/drmcp/core/dynamic_prompts/utils.py +33 -0
- datarobot_genai/drmcp/core/dynamic_tools/__init__.py +14 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/__init__.py +0 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/__init__.py +14 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/base.py +72 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/default.py +82 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/adapters/drum.py +238 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/config.py +228 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/controllers.py +63 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/metadata.py +162 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/register.py +87 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/schemas/drum_agentic_fallback_schema.json +36 -0
- datarobot_genai/drmcp/core/dynamic_tools/deployment/schemas/drum_prediction_fallback_schema.json +10 -0
- datarobot_genai/drmcp/core/dynamic_tools/register.py +254 -0
- datarobot_genai/drmcp/core/dynamic_tools/schema.py +532 -0
- datarobot_genai/drmcp/core/exceptions.py +25 -0
- datarobot_genai/drmcp/core/logging.py +98 -0
- datarobot_genai/drmcp/core/mcp_instance.py +515 -0
- datarobot_genai/drmcp/core/memory_management/__init__.py +13 -0
- datarobot_genai/drmcp/core/memory_management/manager.py +820 -0
- datarobot_genai/drmcp/core/memory_management/memory_tools.py +201 -0
- datarobot_genai/drmcp/core/routes.py +439 -0
- datarobot_genai/drmcp/core/routes_utils.py +30 -0
- datarobot_genai/drmcp/core/server_life_cycle.py +107 -0
- datarobot_genai/drmcp/core/telemetry.py +424 -0
- datarobot_genai/drmcp/core/tool_config.py +111 -0
- datarobot_genai/drmcp/core/tool_filter.py +117 -0
- datarobot_genai/drmcp/core/utils.py +138 -0
- datarobot_genai/drmcp/server.py +19 -0
- datarobot_genai/drmcp/test_utils/__init__.py +13 -0
- datarobot_genai/drmcp/test_utils/clients/__init__.py +0 -0
- datarobot_genai/drmcp/test_utils/clients/anthropic.py +68 -0
- datarobot_genai/drmcp/test_utils/clients/base.py +300 -0
- datarobot_genai/drmcp/test_utils/clients/dr_gateway.py +58 -0
- datarobot_genai/drmcp/test_utils/clients/openai.py +68 -0
- datarobot_genai/drmcp/test_utils/elicitation_test_tool.py +89 -0
- datarobot_genai/drmcp/test_utils/integration_mcp_server.py +109 -0
- datarobot_genai/drmcp/test_utils/mcp_utils_ete.py +133 -0
- datarobot_genai/drmcp/test_utils/mcp_utils_integration.py +107 -0
- datarobot_genai/drmcp/test_utils/test_interactive.py +205 -0
- datarobot_genai/drmcp/test_utils/tool_base_ete.py +220 -0
- datarobot_genai/drmcp/test_utils/utils.py +91 -0
- datarobot_genai/drmcp/tools/__init__.py +14 -0
- datarobot_genai/drmcp/tools/clients/__init__.py +14 -0
- datarobot_genai/drmcp/tools/clients/atlassian.py +188 -0
- datarobot_genai/drmcp/tools/clients/confluence.py +584 -0
- datarobot_genai/drmcp/tools/clients/gdrive.py +832 -0
- datarobot_genai/drmcp/tools/clients/jira.py +334 -0
- datarobot_genai/drmcp/tools/clients/microsoft_graph.py +479 -0
- datarobot_genai/drmcp/tools/clients/s3.py +28 -0
- datarobot_genai/drmcp/tools/confluence/__init__.py +14 -0
- datarobot_genai/drmcp/tools/confluence/tools.py +321 -0
- datarobot_genai/drmcp/tools/gdrive/__init__.py +0 -0
- datarobot_genai/drmcp/tools/gdrive/tools.py +347 -0
- datarobot_genai/drmcp/tools/jira/__init__.py +14 -0
- datarobot_genai/drmcp/tools/jira/tools.py +243 -0
- datarobot_genai/drmcp/tools/microsoft_graph/__init__.py +13 -0
- datarobot_genai/drmcp/tools/microsoft_graph/tools.py +198 -0
- datarobot_genai/drmcp/tools/predictive/__init__.py +27 -0
- datarobot_genai/drmcp/tools/predictive/data.py +133 -0
- datarobot_genai/drmcp/tools/predictive/deployment.py +91 -0
- datarobot_genai/drmcp/tools/predictive/deployment_info.py +392 -0
- datarobot_genai/drmcp/tools/predictive/model.py +148 -0
- datarobot_genai/drmcp/tools/predictive/predict.py +254 -0
- datarobot_genai/drmcp/tools/predictive/predict_realtime.py +307 -0
- datarobot_genai/drmcp/tools/predictive/project.py +90 -0
- datarobot_genai/drmcp/tools/predictive/training.py +661 -0
- datarobot_genai/langgraph/__init__.py +0 -0
- datarobot_genai/langgraph/agent.py +341 -0
- datarobot_genai/langgraph/mcp.py +73 -0
- datarobot_genai/llama_index/__init__.py +16 -0
- datarobot_genai/llama_index/agent.py +50 -0
- datarobot_genai/llama_index/base.py +299 -0
- datarobot_genai/llama_index/mcp.py +79 -0
- datarobot_genai/nat/__init__.py +0 -0
- datarobot_genai/nat/agent.py +275 -0
- datarobot_genai/nat/datarobot_auth_provider.py +110 -0
- datarobot_genai/nat/datarobot_llm_clients.py +318 -0
- datarobot_genai/nat/datarobot_llm_providers.py +130 -0
- datarobot_genai/nat/datarobot_mcp_client.py +266 -0
- datarobot_genai/nat/helpers.py +87 -0
- datarobot_genai/py.typed +0 -0
- datarobot_genai-0.2.31.dist-info/METADATA +145 -0
- datarobot_genai-0.2.31.dist-info/RECORD +125 -0
- datarobot_genai-0.2.31.dist-info/WHEEL +4 -0
- datarobot_genai-0.2.31.dist-info/entry_points.txt +5 -0
- datarobot_genai-0.2.31.dist-info/licenses/AUTHORS +2 -0
- datarobot_genai-0.2.31.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,820 @@
|
|
|
1
|
+
# Copyright 2025 DataRobot, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import json
|
|
16
|
+
import logging
|
|
17
|
+
import re
|
|
18
|
+
import uuid
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
from datetime import timedelta
|
|
21
|
+
from datetime import timezone
|
|
22
|
+
from typing import Any
|
|
23
|
+
from typing import Optional
|
|
24
|
+
|
|
25
|
+
import boto3
|
|
26
|
+
from botocore.exceptions import BotoCoreError
|
|
27
|
+
from botocore.exceptions import ClientError
|
|
28
|
+
from pydantic import BaseModel
|
|
29
|
+
from pydantic import Field
|
|
30
|
+
|
|
31
|
+
from datarobot_genai.drmcp.core.credentials import get_credentials
|
|
32
|
+
|
|
33
|
+
logger = logging.getLogger(__name__)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class MemoryError(Exception):
|
|
37
|
+
"""Base exception for memory management errors."""
|
|
38
|
+
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class S3StorageError(MemoryError):
|
|
43
|
+
"""Exception raised for S3 storage related errors."""
|
|
44
|
+
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class S3ConfigError(MemoryError):
|
|
49
|
+
"""Exception raised for S3 configuration related errors."""
|
|
50
|
+
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class S3Config:
|
|
55
|
+
def __init__(self) -> None:
|
|
56
|
+
credentials = get_credentials()
|
|
57
|
+
self.bucket_name = credentials.aws_predictions_s3_bucket
|
|
58
|
+
|
|
59
|
+
aws_access_key_id, aws_secret_access_key, aws_session_token = (
|
|
60
|
+
credentials.get_aws_credentials()
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if not aws_access_key_id or not aws_secret_access_key:
|
|
64
|
+
raise S3ConfigError(
|
|
65
|
+
"AWS credentials not found. Please provide credentials or set environment "
|
|
66
|
+
"variables."
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
# Initialize S3 client
|
|
71
|
+
self.client = boto3.client(
|
|
72
|
+
"s3",
|
|
73
|
+
aws_access_key_id=aws_access_key_id,
|
|
74
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
75
|
+
aws_session_token=aws_session_token,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
# Verify bucket exists and is accessible
|
|
79
|
+
self.client.head_bucket(Bucket=self.bucket_name)
|
|
80
|
+
|
|
81
|
+
# Test all required S3 operations
|
|
82
|
+
test_key = "_test_permissions"
|
|
83
|
+
try:
|
|
84
|
+
# Test PUT operation
|
|
85
|
+
self.client.put_object(Bucket=self.bucket_name, Key=test_key, Body=b"test")
|
|
86
|
+
|
|
87
|
+
# Test LIST operation
|
|
88
|
+
self.client.list_objects_v2(Bucket=self.bucket_name, Prefix=test_key, MaxKeys=1)
|
|
89
|
+
|
|
90
|
+
# Test HEAD operation
|
|
91
|
+
self.client.head_object(Bucket=self.bucket_name, Key=test_key)
|
|
92
|
+
|
|
93
|
+
# Test GET operation
|
|
94
|
+
self.client.get_object(Bucket=self.bucket_name, Key=test_key)
|
|
95
|
+
|
|
96
|
+
# Test DELETE operation
|
|
97
|
+
self.client.delete_object(Bucket=self.bucket_name, Key=test_key)
|
|
98
|
+
|
|
99
|
+
except ClientError as e:
|
|
100
|
+
error_code = e.response["Error"]["Code"]
|
|
101
|
+
operation = e.operation_name
|
|
102
|
+
if error_code == "403":
|
|
103
|
+
raise S3ConfigError(
|
|
104
|
+
f"Access denied: Missing {operation} permissions for bucket "
|
|
105
|
+
f"{self.bucket_name}"
|
|
106
|
+
)
|
|
107
|
+
else:
|
|
108
|
+
raise S3ConfigError(
|
|
109
|
+
f"Error testing {operation} access to bucket {self.bucket_name}: {str(e)}"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
except ClientError as e:
|
|
113
|
+
error_code = e.response["Error"]["Code"]
|
|
114
|
+
if error_code == "404":
|
|
115
|
+
raise S3ConfigError(f"Bucket {self.bucket_name} does not exist")
|
|
116
|
+
elif error_code == "403":
|
|
117
|
+
raise S3ConfigError(f"Access denied to bucket {self.bucket_name}")
|
|
118
|
+
else:
|
|
119
|
+
raise S3ConfigError(f"Error accessing bucket {self.bucket_name}: {str(e)}")
|
|
120
|
+
|
|
121
|
+
except BotoCoreError as e:
|
|
122
|
+
raise S3ConfigError(f"Error initializing S3 client: {str(e)}")
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def initialize_s3() -> S3Config:
|
|
126
|
+
"""Initialize S3 configuration with error handling and validation."""
|
|
127
|
+
try:
|
|
128
|
+
s3_config = S3Config()
|
|
129
|
+
logger.info(
|
|
130
|
+
f"Successfully initialized S3 configuration with bucket: {s3_config.bucket_name}"
|
|
131
|
+
)
|
|
132
|
+
return s3_config
|
|
133
|
+
except (S3ConfigError, Exception) as e:
|
|
134
|
+
logger.error(f"Failed to initialize S3 configuration: {str(e)}")
|
|
135
|
+
raise
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class ToolContext(BaseModel):
|
|
139
|
+
name: str
|
|
140
|
+
parameters: dict[str, Any]
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class MemoryResource(BaseModel):
|
|
144
|
+
id: str
|
|
145
|
+
memory_storage_id: str | None = (
|
|
146
|
+
None # a memory resource can belong to a memory storage or it can be standalone act as a
|
|
147
|
+
# temp session memory
|
|
148
|
+
)
|
|
149
|
+
prompt: str | None = None
|
|
150
|
+
tool_context: ToolContext | None = None
|
|
151
|
+
embedding_vector: list[float] | None = None
|
|
152
|
+
created_at: datetime = Field(default_factory=datetime.utcnow)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
class MemoryStorage(BaseModel):
|
|
156
|
+
id: str
|
|
157
|
+
agent_identifier: str
|
|
158
|
+
label: str
|
|
159
|
+
storage_config: dict[str, Any] | None = None
|
|
160
|
+
created_at: datetime = Field(default_factory=datetime.utcnow)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class ActiveStorageMapping(BaseModel):
|
|
164
|
+
"""Model for storing active storage mappings."""
|
|
165
|
+
|
|
166
|
+
agent_identifier: str
|
|
167
|
+
storage_id: str
|
|
168
|
+
label: str
|
|
169
|
+
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def get_memory_manager() -> Optional["MemoryManager"]:
|
|
173
|
+
"""Get the singleton instance of MemoryManager if it is initialized, otherwise return None."""
|
|
174
|
+
if MemoryManager.is_initialized():
|
|
175
|
+
return MemoryManager.get_instance()
|
|
176
|
+
else:
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class MemoryManager:
|
|
181
|
+
"""Manages memory operations."""
|
|
182
|
+
|
|
183
|
+
_instance: Optional["MemoryManager"] = None
|
|
184
|
+
_initialized = False
|
|
185
|
+
s3_config: S3Config
|
|
186
|
+
|
|
187
|
+
def __new__(cls) -> "MemoryManager":
|
|
188
|
+
if cls._instance is None:
|
|
189
|
+
cls._instance = super().__new__(cls)
|
|
190
|
+
return cls._instance
|
|
191
|
+
|
|
192
|
+
def __init__(self) -> None:
|
|
193
|
+
if not MemoryManager._initialized:
|
|
194
|
+
self.s3_config = self._initialize()
|
|
195
|
+
MemoryManager._initialized = True
|
|
196
|
+
|
|
197
|
+
@classmethod
|
|
198
|
+
def get_instance(cls) -> "MemoryManager":
|
|
199
|
+
"""Get the singleton instance of MemoryManager."""
|
|
200
|
+
if cls._instance is None:
|
|
201
|
+
cls._instance = cls()
|
|
202
|
+
return cls._instance
|
|
203
|
+
|
|
204
|
+
@classmethod
|
|
205
|
+
def is_initialized(cls) -> bool:
|
|
206
|
+
"""Check if the MemoryManager is initialized."""
|
|
207
|
+
return cls._initialized
|
|
208
|
+
|
|
209
|
+
def _initialize(self) -> S3Config:
|
|
210
|
+
"""Initialize the MemoryManager with S3 configuration."""
|
|
211
|
+
s3_config = initialize_s3()
|
|
212
|
+
logger.info("MemoryManager initialized successfully")
|
|
213
|
+
return s3_config
|
|
214
|
+
|
|
215
|
+
@staticmethod
|
|
216
|
+
def _generate_memory_storage_id() -> str:
|
|
217
|
+
"""Generate a unique memory ID."""
|
|
218
|
+
return str(uuid.uuid4())[:8]
|
|
219
|
+
|
|
220
|
+
@staticmethod
|
|
221
|
+
def _get_resource_data_s3_key(
|
|
222
|
+
resource_id: str,
|
|
223
|
+
agent_identifier: str | None = None,
|
|
224
|
+
storage_id: str | None = None,
|
|
225
|
+
) -> str:
|
|
226
|
+
"""Generate S3 key for a resource data."""
|
|
227
|
+
if agent_identifier:
|
|
228
|
+
if not storage_id:
|
|
229
|
+
raise ValueError("Storage ID is required for agent memory scope")
|
|
230
|
+
return f"agents/{agent_identifier}/storages/{storage_id}/resources/{resource_id}/data"
|
|
231
|
+
|
|
232
|
+
return f"resources/{resource_id}/data"
|
|
233
|
+
|
|
234
|
+
@staticmethod
|
|
235
|
+
def _get_resource_metadata_s3_key(
|
|
236
|
+
resource_id: str,
|
|
237
|
+
agent_identifier: str | None = None,
|
|
238
|
+
storage_id: str | None = None,
|
|
239
|
+
) -> str:
|
|
240
|
+
"""Generate S3 key for a resource metadata."""
|
|
241
|
+
if agent_identifier:
|
|
242
|
+
if not storage_id:
|
|
243
|
+
raise ValueError("Storage ID is required for agent memory scope")
|
|
244
|
+
return (
|
|
245
|
+
f"agents/{agent_identifier}/storages/{storage_id}/resources/{resource_id}/"
|
|
246
|
+
f"metadata.json"
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
return f"resources/{resource_id}/metadata.json"
|
|
250
|
+
|
|
251
|
+
@staticmethod
|
|
252
|
+
def _get_storage_metadata_s3_key(storage_id: str, agent_identifier: str) -> str:
|
|
253
|
+
"""Generate S3 key for a storage metadata."""
|
|
254
|
+
return f"agents/{agent_identifier}/storages/{storage_id}/metadata.json"
|
|
255
|
+
|
|
256
|
+
@staticmethod
|
|
257
|
+
def _get_agent_identifier_s3_key(agent_identifier: str) -> str:
|
|
258
|
+
"""Generate S3 key for a agent identifier."""
|
|
259
|
+
return f"agents/{agent_identifier}/"
|
|
260
|
+
|
|
261
|
+
@staticmethod
|
|
262
|
+
def _get_active_storage_mapping_key(agent_identifier: str) -> str:
|
|
263
|
+
"""Generate S3 key for active storage mapping."""
|
|
264
|
+
return f"agents/{agent_identifier}/active_storage.json"
|
|
265
|
+
|
|
266
|
+
@staticmethod
|
|
267
|
+
def _handle_s3_error(operation: str, error: Exception, resource_id: str | None = None) -> None:
|
|
268
|
+
"""Handle S3 related errors with proper logging and re-raising."""
|
|
269
|
+
error_msg = f"Error during {operation}"
|
|
270
|
+
if resource_id:
|
|
271
|
+
error_msg += f" for resource {resource_id}"
|
|
272
|
+
|
|
273
|
+
if isinstance(error, ClientError):
|
|
274
|
+
error_code = error.response["Error"]["Code"]
|
|
275
|
+
error_msg += f": {error_code} - {str(error)}"
|
|
276
|
+
logger.error(error_msg)
|
|
277
|
+
if error_code in ["NoSuchKey", "404"]:
|
|
278
|
+
return None
|
|
279
|
+
raise S3StorageError(error_msg) from error
|
|
280
|
+
else:
|
|
281
|
+
error_msg += f": {str(error)}"
|
|
282
|
+
logger.error(error_msg)
|
|
283
|
+
raise S3StorageError(error_msg) from error
|
|
284
|
+
|
|
285
|
+
def _validate_agent_identifier(self, agent_identifier: str) -> None:
|
|
286
|
+
"""Validate the agent identifier is a valid s3 string to be used as a key and the key does
|
|
287
|
+
not already exist.
|
|
288
|
+
"""
|
|
289
|
+
if not re.match(r"^[a-zA-Z0-9!-_.*\'()]+$", agent_identifier):
|
|
290
|
+
raise ValueError("Agent identifier must be a valid s3 string to be used as a key")
|
|
291
|
+
|
|
292
|
+
try:
|
|
293
|
+
self.s3_config.client.head_object(
|
|
294
|
+
Bucket=self.s3_config.bucket_name, Key=f"agents/{agent_identifier}"
|
|
295
|
+
)
|
|
296
|
+
# If we get here, the object exists
|
|
297
|
+
raise ValueError(f"Agent identifier {agent_identifier} is already in use")
|
|
298
|
+
except ClientError as e:
|
|
299
|
+
error_code = int(e.response["Error"]["Code"])
|
|
300
|
+
if error_code == 404:
|
|
301
|
+
# This is good - means the agent identifier doesn't exist yet
|
|
302
|
+
return
|
|
303
|
+
# For any other error, raise it
|
|
304
|
+
raise
|
|
305
|
+
|
|
306
|
+
async def initialize_storage(
|
|
307
|
+
self,
|
|
308
|
+
agent_identifier: str,
|
|
309
|
+
label: str,
|
|
310
|
+
storage_config: dict[str, Any] | None = None,
|
|
311
|
+
) -> str:
|
|
312
|
+
"""Initialize a new memory storage instance."""
|
|
313
|
+
self._validate_agent_identifier(agent_identifier)
|
|
314
|
+
|
|
315
|
+
memory_storage_id = MemoryManager._generate_memory_storage_id()
|
|
316
|
+
|
|
317
|
+
memory_storage = MemoryStorage(
|
|
318
|
+
id=memory_storage_id,
|
|
319
|
+
agent_identifier=agent_identifier,
|
|
320
|
+
label=label,
|
|
321
|
+
storage_config=storage_config,
|
|
322
|
+
created_at=datetime.utcnow(),
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
# Store metadata in S3
|
|
326
|
+
try:
|
|
327
|
+
s3_key = MemoryManager._get_storage_metadata_s3_key(memory_storage_id, agent_identifier)
|
|
328
|
+
self.s3_config.client.put_object(
|
|
329
|
+
Bucket=self.s3_config.bucket_name,
|
|
330
|
+
Key=s3_key,
|
|
331
|
+
Body=memory_storage.model_dump_json(),
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
# Set this as the active storage for the agent
|
|
335
|
+
await self.set_storage_id_for_agent(
|
|
336
|
+
agent_identifier=agent_identifier,
|
|
337
|
+
storage_id=memory_storage_id,
|
|
338
|
+
label=label,
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
logger.info(f"Initialized memory storage in S3: {memory_storage_id}")
|
|
342
|
+
return memory_storage_id
|
|
343
|
+
except ClientError as e:
|
|
344
|
+
MemoryManager._handle_s3_error("initialize_storage", e, memory_storage_id)
|
|
345
|
+
return ""
|
|
346
|
+
|
|
347
|
+
async def delete_storage(self, memory_storage_id: str, agent_identifier: str) -> bool:
|
|
348
|
+
"""Delete a memory storage and its resources."""
|
|
349
|
+
try:
|
|
350
|
+
# Check if this is the active storage
|
|
351
|
+
active_storage_id = await self.get_active_storage_id_for_agent(agent_identifier)
|
|
352
|
+
|
|
353
|
+
# List all objects with the storage ID prefix
|
|
354
|
+
prefix = f"agents/{agent_identifier}/storages/{memory_storage_id}"
|
|
355
|
+
response = self.s3_config.client.list_objects_v2(
|
|
356
|
+
Bucket=self.s3_config.bucket_name, Prefix=prefix
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
# Delete all resources
|
|
360
|
+
if "Contents" in response:
|
|
361
|
+
for obj in response["Contents"]:
|
|
362
|
+
self.s3_config.client.delete_object(
|
|
363
|
+
Bucket=self.s3_config.bucket_name, Key=obj["Key"]
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
# Clear the active storage mapping if this was the active storage
|
|
367
|
+
if active_storage_id == memory_storage_id:
|
|
368
|
+
await self.clear_storage_id_for_agent(agent_identifier)
|
|
369
|
+
|
|
370
|
+
logger.info(f"Deleted memory storage from S3: {memory_storage_id}")
|
|
371
|
+
return True
|
|
372
|
+
|
|
373
|
+
except ClientError as e:
|
|
374
|
+
MemoryManager._handle_s3_error("delete_storage", e, memory_storage_id)
|
|
375
|
+
return False
|
|
376
|
+
|
|
377
|
+
async def delete_agent(self, agent_identifier: str) -> bool:
|
|
378
|
+
"""Delete an agent and all its memory storages."""
|
|
379
|
+
try:
|
|
380
|
+
# List all contents for the agent
|
|
381
|
+
prefix = f"agents/{agent_identifier}"
|
|
382
|
+
response = self.s3_config.client.list_objects_v2(
|
|
383
|
+
Bucket=self.s3_config.bucket_name, Prefix=prefix
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
# Delete all contents
|
|
387
|
+
if "Contents" in response:
|
|
388
|
+
for obj in response["Contents"]:
|
|
389
|
+
self.s3_config.client.delete_object(
|
|
390
|
+
Bucket=self.s3_config.bucket_name, Key=obj["Key"]
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
# Clear the active storage mapping
|
|
394
|
+
await self.clear_storage_id_for_agent(agent_identifier)
|
|
395
|
+
|
|
396
|
+
logger.info(f"Deleted agent and all its memory storages from S3: {agent_identifier}")
|
|
397
|
+
return True
|
|
398
|
+
except ClientError as e:
|
|
399
|
+
MemoryManager._handle_s3_error("delete_agent", e, agent_identifier)
|
|
400
|
+
return False
|
|
401
|
+
|
|
402
|
+
async def list_storages(
|
|
403
|
+
self,
|
|
404
|
+
agent_identifier: str,
|
|
405
|
+
) -> list[MemoryStorage]:
|
|
406
|
+
"""List available memory storages for an agent."""
|
|
407
|
+
try:
|
|
408
|
+
prefix = f"agents/{agent_identifier}/storages/"
|
|
409
|
+
|
|
410
|
+
response = self.s3_config.client.list_objects_v2(
|
|
411
|
+
Bucket=self.s3_config.bucket_name, Prefix=prefix
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
storages = []
|
|
415
|
+
if "Contents" in response:
|
|
416
|
+
for obj in response["Contents"]:
|
|
417
|
+
if not obj["Key"].endswith("metadata.json"):
|
|
418
|
+
continue
|
|
419
|
+
|
|
420
|
+
result = self.s3_config.client.get_object(
|
|
421
|
+
Bucket=self.s3_config.bucket_name, Key=obj["Key"]
|
|
422
|
+
)
|
|
423
|
+
storage_data = json.loads(result["Body"].read().decode("utf-8"))
|
|
424
|
+
storage = MemoryStorage.model_validate_json(json.dumps(storage_data))
|
|
425
|
+
storages.append(storage)
|
|
426
|
+
|
|
427
|
+
return storages
|
|
428
|
+
|
|
429
|
+
except ClientError as e:
|
|
430
|
+
MemoryManager._handle_s3_error("list_storages", e)
|
|
431
|
+
return []
|
|
432
|
+
|
|
433
|
+
async def get_storage(
|
|
434
|
+
self,
|
|
435
|
+
agent_identifier: str,
|
|
436
|
+
memory_storage_id: str,
|
|
437
|
+
) -> MemoryStorage | None:
|
|
438
|
+
"""Get a memory storage by ID."""
|
|
439
|
+
try:
|
|
440
|
+
metadata_key = MemoryManager._get_storage_metadata_s3_key(
|
|
441
|
+
memory_storage_id, agent_identifier
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
result = self.s3_config.client.get_object(
|
|
445
|
+
Bucket=self.s3_config.bucket_name, Key=metadata_key
|
|
446
|
+
)
|
|
447
|
+
storage_data = json.loads(result["Body"].read().decode("utf-8"))
|
|
448
|
+
return MemoryStorage.model_validate_json(json.dumps(storage_data))
|
|
449
|
+
except ClientError as e:
|
|
450
|
+
MemoryManager._handle_s3_error("get_storage", e, memory_storage_id)
|
|
451
|
+
return None
|
|
452
|
+
|
|
453
|
+
async def store_resource(
|
|
454
|
+
self,
|
|
455
|
+
data: Any, # the data to stored it could be a string, a json or binary data
|
|
456
|
+
memory_storage_id: str | None = None,
|
|
457
|
+
agent_identifier: str | None = None,
|
|
458
|
+
prompt: str | None = None,
|
|
459
|
+
tool_context: ToolContext | None = None,
|
|
460
|
+
embedding_vector: list[float] | None = None,
|
|
461
|
+
) -> str:
|
|
462
|
+
"""Store a resource in the memory storage.
|
|
463
|
+
|
|
464
|
+
Args:
|
|
465
|
+
data: The data to store (string, json or binary)
|
|
466
|
+
memory_storage_id: Optional storage ID to associate the resource with
|
|
467
|
+
agent_identifier: Required if memory_storage_id is provided, the agent that owns the
|
|
468
|
+
storage
|
|
469
|
+
prompt: Optional prompt used to generate this resource
|
|
470
|
+
tool_context: Optional tool context associated with this resource
|
|
471
|
+
embedding_vector: Optional embedding vector for the resource
|
|
472
|
+
|
|
473
|
+
Returns
|
|
474
|
+
-------
|
|
475
|
+
str: The ID of the stored resource
|
|
476
|
+
|
|
477
|
+
Raises
|
|
478
|
+
------
|
|
479
|
+
ValueError: If memory_storage_id is provided without agent_identifier or vice versa
|
|
480
|
+
S3StorageError: If there are S3 related errors
|
|
481
|
+
"""
|
|
482
|
+
if (agent_identifier and not memory_storage_id) or (
|
|
483
|
+
memory_storage_id and not agent_identifier
|
|
484
|
+
):
|
|
485
|
+
raise ValueError("Agent identifier and memory storage ID must be provided together")
|
|
486
|
+
|
|
487
|
+
resource_id = str(uuid.uuid4())
|
|
488
|
+
resource = MemoryResource(
|
|
489
|
+
id=resource_id,
|
|
490
|
+
memory_storage_id=memory_storage_id,
|
|
491
|
+
prompt=prompt,
|
|
492
|
+
tool_context=tool_context,
|
|
493
|
+
embedding_vector=embedding_vector,
|
|
494
|
+
created_at=datetime.utcnow(),
|
|
495
|
+
)
|
|
496
|
+
|
|
497
|
+
try:
|
|
498
|
+
# Store resource metadata
|
|
499
|
+
metadata_key = MemoryManager._get_resource_metadata_s3_key(
|
|
500
|
+
resource_id,
|
|
501
|
+
agent_identifier=agent_identifier,
|
|
502
|
+
storage_id=memory_storage_id,
|
|
503
|
+
)
|
|
504
|
+
self.s3_config.client.put_object(
|
|
505
|
+
Bucket=self.s3_config.bucket_name,
|
|
506
|
+
Key=metadata_key,
|
|
507
|
+
Body=resource.model_dump_json(),
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
data_key = MemoryManager._get_resource_data_s3_key(
|
|
511
|
+
resource_id,
|
|
512
|
+
agent_identifier=agent_identifier,
|
|
513
|
+
storage_id=memory_storage_id,
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
# Store actual data
|
|
517
|
+
self.s3_config.client.put_object(
|
|
518
|
+
Bucket=self.s3_config.bucket_name, Key=data_key, Body=data
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
return resource_id
|
|
522
|
+
|
|
523
|
+
except ClientError as e:
|
|
524
|
+
MemoryManager._handle_s3_error("store_resource", e, resource_id)
|
|
525
|
+
return ""
|
|
526
|
+
|
|
527
|
+
async def get_resource(
|
|
528
|
+
self,
|
|
529
|
+
resource_id: str,
|
|
530
|
+
memory_storage_id: str | None = None,
|
|
531
|
+
agent_identifier: str | None = None,
|
|
532
|
+
) -> MemoryResource | None:
|
|
533
|
+
"""Get a resource from the memory storage.
|
|
534
|
+
|
|
535
|
+
Args:
|
|
536
|
+
resource_id: The ID of the resource to retrieve
|
|
537
|
+
memory_storage_id: Optional storage ID the resource belongs to
|
|
538
|
+
agent_identifier: Required if memory_storage_id is provided, the agent that owns the
|
|
539
|
+
storage
|
|
540
|
+
|
|
541
|
+
Returns
|
|
542
|
+
-------
|
|
543
|
+
Optional[MemoryResource]: The resource if found, None otherwise
|
|
544
|
+
|
|
545
|
+
Raises
|
|
546
|
+
------
|
|
547
|
+
ValueError: If memory_storage_id is provided without agent_identifier or vice versa
|
|
548
|
+
S3StorageError: If there are S3 related errors
|
|
549
|
+
"""
|
|
550
|
+
if (agent_identifier and not memory_storage_id) or (
|
|
551
|
+
memory_storage_id and not agent_identifier
|
|
552
|
+
):
|
|
553
|
+
raise ValueError("Agent identifier and memory storage ID must be provided together")
|
|
554
|
+
|
|
555
|
+
try:
|
|
556
|
+
metadata_key = MemoryManager._get_resource_metadata_s3_key(
|
|
557
|
+
resource_id,
|
|
558
|
+
agent_identifier=agent_identifier,
|
|
559
|
+
storage_id=memory_storage_id,
|
|
560
|
+
)
|
|
561
|
+
result = self.s3_config.client.get_object(
|
|
562
|
+
Bucket=self.s3_config.bucket_name, Key=metadata_key
|
|
563
|
+
)
|
|
564
|
+
resource_data = json.loads(result["Body"].read().decode("utf-8"))
|
|
565
|
+
return MemoryResource.model_validate_json(json.dumps(resource_data))
|
|
566
|
+
except ClientError as e:
|
|
567
|
+
MemoryManager._handle_s3_error("get_resource", e, resource_id)
|
|
568
|
+
return None
|
|
569
|
+
|
|
570
|
+
async def list_resources(
|
|
571
|
+
self, agent_identifier: str, memory_storage_id: str | None = None
|
|
572
|
+
) -> list[MemoryResource]:
|
|
573
|
+
"""List all resources from the memory storage.
|
|
574
|
+
|
|
575
|
+
Args:
|
|
576
|
+
agent_identifier: Agent identifier to scope the search
|
|
577
|
+
memory_storage_id: Optional Storage ID to filter resources
|
|
578
|
+
"""
|
|
579
|
+
try:
|
|
580
|
+
prefix = (
|
|
581
|
+
f"agents/{agent_identifier}/storages/{memory_storage_id}/resources/"
|
|
582
|
+
if memory_storage_id
|
|
583
|
+
else f"agents/{agent_identifier}/storages/"
|
|
584
|
+
)
|
|
585
|
+
|
|
586
|
+
response = self.s3_config.client.list_objects_v2(
|
|
587
|
+
Bucket=self.s3_config.bucket_name, Prefix=prefix
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
resources = []
|
|
591
|
+
if "Contents" in response:
|
|
592
|
+
for obj in response["Contents"]:
|
|
593
|
+
if not obj["Key"].endswith("metadata.json"):
|
|
594
|
+
continue
|
|
595
|
+
|
|
596
|
+
result = self.s3_config.client.get_object(
|
|
597
|
+
Bucket=self.s3_config.bucket_name, Key=obj["Key"]
|
|
598
|
+
)
|
|
599
|
+
resource_data = json.loads(result["Body"].read().decode("utf-8"))
|
|
600
|
+
resource = MemoryResource.model_validate_json(json.dumps(resource_data))
|
|
601
|
+
|
|
602
|
+
if memory_storage_id and resource.memory_storage_id != memory_storage_id:
|
|
603
|
+
continue
|
|
604
|
+
|
|
605
|
+
resources.append(resource)
|
|
606
|
+
|
|
607
|
+
return resources
|
|
608
|
+
|
|
609
|
+
except ClientError as e:
|
|
610
|
+
MemoryManager._handle_s3_error("list_resources", e)
|
|
611
|
+
return []
|
|
612
|
+
|
|
613
|
+
async def get_resource_data(
|
|
614
|
+
self,
|
|
615
|
+
resource_id: str,
|
|
616
|
+
memory_storage_id: str | None = None,
|
|
617
|
+
agent_identifier: str | None = None,
|
|
618
|
+
) -> bytes | None:
|
|
619
|
+
"""Get the data of a resource by resource id.
|
|
620
|
+
|
|
621
|
+
Args:
|
|
622
|
+
resource_id: The ID of the resource to retrieve data for
|
|
623
|
+
memory_storage_id: Optional storage ID the resource belongs to
|
|
624
|
+
agent_identifier: Required if memory_storage_id is provided, the agent that owns the
|
|
625
|
+
storage
|
|
626
|
+
|
|
627
|
+
Returns
|
|
628
|
+
-------
|
|
629
|
+
Optional[bytes]: The resource data if found, None otherwise
|
|
630
|
+
|
|
631
|
+
Raises
|
|
632
|
+
------
|
|
633
|
+
ValueError: If memory_storage_id is provided without agent_identifier or vice versa
|
|
634
|
+
S3StorageError: If there are S3 related errors
|
|
635
|
+
"""
|
|
636
|
+
if (agent_identifier and not memory_storage_id) or (
|
|
637
|
+
memory_storage_id and not agent_identifier
|
|
638
|
+
):
|
|
639
|
+
raise ValueError("Agent identifier and memory storage ID must be provided together")
|
|
640
|
+
|
|
641
|
+
try:
|
|
642
|
+
data_key = MemoryManager._get_resource_data_s3_key(
|
|
643
|
+
resource_id,
|
|
644
|
+
agent_identifier=agent_identifier,
|
|
645
|
+
storage_id=memory_storage_id,
|
|
646
|
+
)
|
|
647
|
+
result = self.s3_config.client.get_object(
|
|
648
|
+
Bucket=self.s3_config.bucket_name, Key=data_key
|
|
649
|
+
)
|
|
650
|
+
data = result["Body"].read()
|
|
651
|
+
return data if isinstance(data, bytes) else None
|
|
652
|
+
except ClientError as e:
|
|
653
|
+
MemoryManager._handle_s3_error("get_resource_data", e, resource_id)
|
|
654
|
+
return None
|
|
655
|
+
|
|
656
|
+
async def delete_resource(
|
|
657
|
+
self,
|
|
658
|
+
resource_id: str,
|
|
659
|
+
memory_storage_id: str | None = None,
|
|
660
|
+
agent_identifier: str | None = None,
|
|
661
|
+
) -> bool:
|
|
662
|
+
"""Delete a resource from the memory storage.
|
|
663
|
+
|
|
664
|
+
Args:
|
|
665
|
+
resource_id: The ID of the resource to delete
|
|
666
|
+
memory_storage_id: Optional storage ID the resource belongs to
|
|
667
|
+
agent_identifier: Required if memory_storage_id is provided, the agent that owns the
|
|
668
|
+
storage
|
|
669
|
+
|
|
670
|
+
Returns
|
|
671
|
+
-------
|
|
672
|
+
bool: True if deletion was successful
|
|
673
|
+
|
|
674
|
+
Raises
|
|
675
|
+
------
|
|
676
|
+
ValueError: If memory_storage_id is provided without agent_identifier or vice versa
|
|
677
|
+
S3StorageError: If there are S3 related errors
|
|
678
|
+
"""
|
|
679
|
+
if (agent_identifier and not memory_storage_id) or (
|
|
680
|
+
memory_storage_id and not agent_identifier
|
|
681
|
+
):
|
|
682
|
+
raise ValueError("Agent identifier and memory storage ID must be provided together")
|
|
683
|
+
|
|
684
|
+
try:
|
|
685
|
+
# Delete metadata
|
|
686
|
+
metadata_key = MemoryManager._get_resource_metadata_s3_key(
|
|
687
|
+
resource_id,
|
|
688
|
+
agent_identifier=agent_identifier,
|
|
689
|
+
storage_id=memory_storage_id,
|
|
690
|
+
)
|
|
691
|
+
self.s3_config.client.delete_object(Bucket=self.s3_config.bucket_name, Key=metadata_key)
|
|
692
|
+
|
|
693
|
+
# Delete data
|
|
694
|
+
data_key = MemoryManager._get_resource_data_s3_key(
|
|
695
|
+
resource_id,
|
|
696
|
+
agent_identifier=agent_identifier,
|
|
697
|
+
storage_id=memory_storage_id,
|
|
698
|
+
)
|
|
699
|
+
self.s3_config.client.delete_object(Bucket=self.s3_config.bucket_name, Key=data_key)
|
|
700
|
+
|
|
701
|
+
return True
|
|
702
|
+
|
|
703
|
+
except ClientError as e:
|
|
704
|
+
MemoryManager._handle_s3_error("delete_resource", e, resource_id)
|
|
705
|
+
return False
|
|
706
|
+
|
|
707
|
+
async def clear_all_temp_resources(self, older_than_by_days: int = 1) -> bool:
|
|
708
|
+
"""Clear all temp resources older than a given number of days.
|
|
709
|
+
|
|
710
|
+
Args:
|
|
711
|
+
older_than_by_days: Optional number of days to compare against. If not provided,
|
|
712
|
+
defaults to 1 day ago.
|
|
713
|
+
"""
|
|
714
|
+
older_than = datetime.now(timezone.utc) - timedelta(days=older_than_by_days)
|
|
715
|
+
|
|
716
|
+
try:
|
|
717
|
+
prefix = "resources/"
|
|
718
|
+
response = self.s3_config.client.list_objects_v2(
|
|
719
|
+
Bucket=self.s3_config.bucket_name, Prefix=prefix
|
|
720
|
+
)
|
|
721
|
+
|
|
722
|
+
if "Contents" in response:
|
|
723
|
+
for obj in response["Contents"]:
|
|
724
|
+
last_modified = obj["LastModified"]
|
|
725
|
+
if last_modified < older_than:
|
|
726
|
+
self.s3_config.client.delete_object(
|
|
727
|
+
Bucket=self.s3_config.bucket_name, Key=obj["Key"]
|
|
728
|
+
)
|
|
729
|
+
|
|
730
|
+
return True
|
|
731
|
+
except ClientError as e:
|
|
732
|
+
MemoryManager._handle_s3_error("clear_all_temp_resources", e)
|
|
733
|
+
return False
|
|
734
|
+
|
|
735
|
+
async def list_temp_resources(self) -> list[MemoryResource]:
|
|
736
|
+
"""List all temp resources."""
|
|
737
|
+
prefix = "resources/"
|
|
738
|
+
response = self.s3_config.client.list_objects_v2(
|
|
739
|
+
Bucket=self.s3_config.bucket_name, Prefix=prefix
|
|
740
|
+
)
|
|
741
|
+
|
|
742
|
+
resources = []
|
|
743
|
+
if "Contents" in response:
|
|
744
|
+
for obj in response["Contents"]:
|
|
745
|
+
if not obj["Key"].endswith("metadata.json"):
|
|
746
|
+
continue
|
|
747
|
+
result = self.s3_config.client.get_object(
|
|
748
|
+
Bucket=self.s3_config.bucket_name, Key=obj["Key"]
|
|
749
|
+
)
|
|
750
|
+
resource_data = json.loads(result["Body"].read().decode("utf-8"))
|
|
751
|
+
resource = MemoryResource.model_validate_json(json.dumps(resource_data))
|
|
752
|
+
resources.append(resource)
|
|
753
|
+
return resources
|
|
754
|
+
|
|
755
|
+
async def find_agent_identifier_for_storage(self, memory_storage_id: str) -> str:
|
|
756
|
+
"""Find agent identifier from storage ID."""
|
|
757
|
+
prefix = "agents/"
|
|
758
|
+
response = self.s3_config.client.list_objects_v2(
|
|
759
|
+
Bucket=self.s3_config.bucket_name, Prefix=prefix
|
|
760
|
+
)
|
|
761
|
+
|
|
762
|
+
agent_identifier = None
|
|
763
|
+
if "Contents" in response:
|
|
764
|
+
for obj in response["Contents"]:
|
|
765
|
+
if memory_storage_id in obj["Key"]:
|
|
766
|
+
# Extract agent identifier from key pattern:
|
|
767
|
+
# agents/{agent_id}/storages/{storage_id}/
|
|
768
|
+
parts = obj["Key"].split("/")
|
|
769
|
+
if len(parts) > 1:
|
|
770
|
+
agent_identifier = parts[1]
|
|
771
|
+
break
|
|
772
|
+
|
|
773
|
+
if not agent_identifier:
|
|
774
|
+
raise ValueError(f"Memory storage {memory_storage_id} not found")
|
|
775
|
+
|
|
776
|
+
return str(agent_identifier)
|
|
777
|
+
|
|
778
|
+
async def set_storage_id_for_agent(
|
|
779
|
+
self, agent_identifier: str, storage_id: str, label: str
|
|
780
|
+
) -> None:
|
|
781
|
+
"""Set the active storage ID for an agent in S3."""
|
|
782
|
+
try:
|
|
783
|
+
mapping = ActiveStorageMapping(
|
|
784
|
+
agent_identifier=agent_identifier,
|
|
785
|
+
storage_id=storage_id,
|
|
786
|
+
label=label,
|
|
787
|
+
updated_at=datetime.now(timezone.utc),
|
|
788
|
+
)
|
|
789
|
+
|
|
790
|
+
key = self._get_active_storage_mapping_key(agent_identifier)
|
|
791
|
+
self.s3_config.client.put_object(
|
|
792
|
+
Bucket=self.s3_config.bucket_name,
|
|
793
|
+
Key=key,
|
|
794
|
+
Body=mapping.model_dump_json(),
|
|
795
|
+
)
|
|
796
|
+
except ClientError as e:
|
|
797
|
+
MemoryManager._handle_s3_error("set_storage_id_for_agent", e)
|
|
798
|
+
|
|
799
|
+
async def get_active_storage_id_for_agent(self, agent_identifier: str) -> str | None:
|
|
800
|
+
"""Get the active storage ID for an agent from S3."""
|
|
801
|
+
try:
|
|
802
|
+
key = self._get_active_storage_mapping_key(agent_identifier)
|
|
803
|
+
result = self.s3_config.client.get_object(Bucket=self.s3_config.bucket_name, Key=key)
|
|
804
|
+
mapping_data = json.loads(result["Body"].read().decode("utf-8"))
|
|
805
|
+
mapping = ActiveStorageMapping.model_validate_json(json.dumps(mapping_data))
|
|
806
|
+
return mapping.storage_id
|
|
807
|
+
except ClientError as e:
|
|
808
|
+
if e.response["Error"]["Code"] == "404":
|
|
809
|
+
return None
|
|
810
|
+
MemoryManager._handle_s3_error("get_active_storage_id_for_agent", e)
|
|
811
|
+
return None
|
|
812
|
+
|
|
813
|
+
async def clear_storage_id_for_agent(self, agent_identifier: str) -> None:
|
|
814
|
+
"""Clear the active storage ID for an agent from S3."""
|
|
815
|
+
try:
|
|
816
|
+
key = self._get_active_storage_mapping_key(agent_identifier)
|
|
817
|
+
self.s3_config.client.delete_object(Bucket=self.s3_config.bucket_name, Key=key)
|
|
818
|
+
except ClientError as e:
|
|
819
|
+
if e.response["Error"]["Code"] != "404":
|
|
820
|
+
MemoryManager._handle_s3_error("clear_storage_id_for_agent", e)
|