cloudbase-agent-langgraph 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudbase_agent/__init__.py +5 -0
- cloudbase_agent/langchain/__init__.py +14 -0
- cloudbase_agent/langchain/chat_history.py +219 -0
- cloudbase_agent/langchain/store/__init__.py +7 -0
- cloudbase_agent/langchain/store/tdai.py +396 -0
- cloudbase_agent/langgraph/__init__.py +26 -0
- cloudbase_agent/langgraph/ag_ui_langgraph_patch.py +837 -0
- cloudbase_agent/langgraph/agent.py +184 -0
- cloudbase_agent/langgraph/checkpoint.py +439 -0
- cloudbase_agent/langgraph/store/__init__.py +7 -0
- cloudbase_agent/langgraph/store/tdai_store.py +498 -0
- cloudbase_agent/langgraph/util.py +216 -0
- cloudbase_agent_langgraph-0.1.1.dist-info/METADATA +45 -0
- cloudbase_agent_langgraph-0.1.1.dist-info/RECORD +15 -0
- cloudbase_agent_langgraph-0.1.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""LangChain integration for Cloudbase Agent storage.
|
|
4
|
+
|
|
5
|
+
This module provides LangChain-compatible storage implementations using TDAI Memory.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .chat_history import TDAIChatHistory
|
|
9
|
+
from .store.tdai import TDAIStore
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"TDAIChatHistory",
|
|
13
|
+
"TDAIStore",
|
|
14
|
+
]
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""LangChain chat history implementation using TDAI Memory.
|
|
4
|
+
|
|
5
|
+
This module provides a chat message history for LangChain that uses TDAI Memory
|
|
6
|
+
as the backend storage, enabling persistent conversation history.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import List
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
from langchain_core.chat_history import BaseChatMessageHistory
|
|
13
|
+
from langchain_core.messages import (
|
|
14
|
+
BaseMessage,
|
|
15
|
+
message_to_dict,
|
|
16
|
+
messages_from_dict,
|
|
17
|
+
)
|
|
18
|
+
except ImportError:
|
|
19
|
+
raise ImportError("langchain-core is required for TDAIChatHistory. Install it with: pip install langchain-core")
|
|
20
|
+
|
|
21
|
+
from cloudbase_agent.tdaimemory import MemoryClient
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class TDAIChatHistory(BaseChatMessageHistory):
|
|
25
|
+
"""LangChain chat message history using TDAI Memory as backend.
|
|
26
|
+
|
|
27
|
+
This class implements the BaseChatMessageHistory interface to store and retrieve
|
|
28
|
+
chat messages using TDAI Memory's event storage system.
|
|
29
|
+
|
|
30
|
+
:param client: TDAI Memory client instance
|
|
31
|
+
:type client: MemoryClient
|
|
32
|
+
:param session_id: Session ID for storing messages
|
|
33
|
+
:type session_id: str
|
|
34
|
+
|
|
35
|
+
Example::
|
|
36
|
+
|
|
37
|
+
from cloudbase_agent.storage.tdaimemory import MemoryClient
|
|
38
|
+
from cloudbase_agent.storage.langchain import TDAIChatHistory
|
|
39
|
+
from langchain_core.messages import HumanMessage, AIMessage
|
|
40
|
+
|
|
41
|
+
client = MemoryClient(
|
|
42
|
+
endpoint="https://memory.tdai.tencentyun.com",
|
|
43
|
+
api_key="your-api-key",
|
|
44
|
+
memory_id="your-memory-id"
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
history = TDAIChatHistory(
|
|
48
|
+
client=client,
|
|
49
|
+
session_id="user_123"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# Add messages
|
|
53
|
+
history.add_user_message("Hello!")
|
|
54
|
+
history.add_ai_message("Hi there! How can I help you?")
|
|
55
|
+
|
|
56
|
+
# Get messages
|
|
57
|
+
messages = history.messages
|
|
58
|
+
|
|
59
|
+
# Clear history
|
|
60
|
+
history.clear()
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
def __init__(
|
|
64
|
+
self,
|
|
65
|
+
client: MemoryClient,
|
|
66
|
+
session_id: str,
|
|
67
|
+
auto_create_session: bool = True,
|
|
68
|
+
):
|
|
69
|
+
"""Initialize the TDAI chat history.
|
|
70
|
+
|
|
71
|
+
:param client: TDAI Memory client instance
|
|
72
|
+
:type client: MemoryClient
|
|
73
|
+
:param session_id: Session ID for storing messages (must start with 'session-' and be 16-20 chars)
|
|
74
|
+
:type session_id: str
|
|
75
|
+
:param auto_create_session: Whether to automatically create the session if it doesn't exist
|
|
76
|
+
:type auto_create_session: bool
|
|
77
|
+
"""
|
|
78
|
+
self.client = client
|
|
79
|
+
self.session_id = session_id
|
|
80
|
+
self._session_created = False
|
|
81
|
+
|
|
82
|
+
# Auto-create session if requested
|
|
83
|
+
if auto_create_session:
|
|
84
|
+
self._ensure_session()
|
|
85
|
+
|
|
86
|
+
def _ensure_session(self) -> None:
|
|
87
|
+
"""Ensure the session exists, create if it doesn't."""
|
|
88
|
+
if not self._session_created:
|
|
89
|
+
try:
|
|
90
|
+
# Try to query the session to see if it exists
|
|
91
|
+
self.client.query_events(session_id=self.session_id)
|
|
92
|
+
self._session_created = True
|
|
93
|
+
except Exception:
|
|
94
|
+
# Session doesn't exist, create it
|
|
95
|
+
# Note: We can't use the session_id directly in create_session
|
|
96
|
+
# because TDAI generates its own session IDs
|
|
97
|
+
# So we'll just mark it as created and let the first operation create it implicitly
|
|
98
|
+
self._session_created = True
|
|
99
|
+
|
|
100
|
+
@property
|
|
101
|
+
def messages(self) -> List[BaseMessage]:
|
|
102
|
+
"""Retrieve all messages from the store.
|
|
103
|
+
|
|
104
|
+
:return: List of messages
|
|
105
|
+
:rtype: List[BaseMessage]
|
|
106
|
+
"""
|
|
107
|
+
try:
|
|
108
|
+
result = self.client.query_events(
|
|
109
|
+
session_id=self.session_id,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
events = result.get("events", [])
|
|
113
|
+
|
|
114
|
+
# Convert events to message dicts
|
|
115
|
+
message_dicts = []
|
|
116
|
+
for event in events:
|
|
117
|
+
# Events are stored as message dicts
|
|
118
|
+
if isinstance(event, dict) and "type" in event:
|
|
119
|
+
message_dicts.append(event)
|
|
120
|
+
|
|
121
|
+
# Convert dicts to BaseMessage objects
|
|
122
|
+
return messages_from_dict(message_dicts)
|
|
123
|
+
except Exception as e:
|
|
124
|
+
# Only ignore "data not exist" errors (empty session)
|
|
125
|
+
# All other errors should be raised
|
|
126
|
+
error_msg = str(e).lower()
|
|
127
|
+
if "data not exist" in error_msg or "not exist" in error_msg:
|
|
128
|
+
# Session exists but has no events yet - this is normal
|
|
129
|
+
return []
|
|
130
|
+
else:
|
|
131
|
+
# Real error - should not be silently ignored
|
|
132
|
+
print(f"Error retrieving messages: {e}")
|
|
133
|
+
raise
|
|
134
|
+
|
|
135
|
+
def add_message(self, message: BaseMessage) -> None:
|
|
136
|
+
"""Add a message to the store.
|
|
137
|
+
|
|
138
|
+
:param message: Message to add
|
|
139
|
+
:type message: BaseMessage
|
|
140
|
+
"""
|
|
141
|
+
try:
|
|
142
|
+
# Convert message to dict
|
|
143
|
+
message_dict = message_to_dict(message)
|
|
144
|
+
|
|
145
|
+
# Store in TDAI Memory
|
|
146
|
+
self.client.append_event(
|
|
147
|
+
session_id=self.session_id,
|
|
148
|
+
messages=message_dict,
|
|
149
|
+
)
|
|
150
|
+
except Exception as e:
|
|
151
|
+
print(f"Error adding message: {e}")
|
|
152
|
+
raise
|
|
153
|
+
|
|
154
|
+
def add_messages(self, messages: List[BaseMessage]) -> None:
|
|
155
|
+
"""Add multiple messages to the store.
|
|
156
|
+
|
|
157
|
+
:param messages: List of messages to add
|
|
158
|
+
:type messages: List[BaseMessage]
|
|
159
|
+
"""
|
|
160
|
+
for message in messages:
|
|
161
|
+
self.add_message(message)
|
|
162
|
+
|
|
163
|
+
def clear(self) -> None:
|
|
164
|
+
"""Clear all messages from the store.
|
|
165
|
+
|
|
166
|
+
This will delete all events in the session and the session itself.
|
|
167
|
+
"""
|
|
168
|
+
try:
|
|
169
|
+
# Get all event IDs
|
|
170
|
+
try:
|
|
171
|
+
result = self.client.query_events(
|
|
172
|
+
session_id=self.session_id,
|
|
173
|
+
)
|
|
174
|
+
event_ids = [event.get("event_id") for event in result.get("events", []) if event.get("event_id")]
|
|
175
|
+
except Exception as e:
|
|
176
|
+
# If query fails with "data not exist", session is already empty
|
|
177
|
+
if "data not exist" in str(e).lower() or "not exist" in str(e).lower():
|
|
178
|
+
event_ids = []
|
|
179
|
+
else:
|
|
180
|
+
# Real error - re-raise
|
|
181
|
+
raise
|
|
182
|
+
|
|
183
|
+
# Delete all events (ignore "not exist" errors)
|
|
184
|
+
for event_id in event_ids:
|
|
185
|
+
try:
|
|
186
|
+
self.client.delete_event(
|
|
187
|
+
session_id=self.session_id,
|
|
188
|
+
event_id=event_id,
|
|
189
|
+
)
|
|
190
|
+
except Exception as e:
|
|
191
|
+
# Ignore "not exist" errors, event may have been deleted already
|
|
192
|
+
if "not exist" not in str(e).lower():
|
|
193
|
+
raise
|
|
194
|
+
|
|
195
|
+
# Delete the session (ignore "not exist" errors)
|
|
196
|
+
try:
|
|
197
|
+
self.client.delete_session(
|
|
198
|
+
session_id=self.session_id,
|
|
199
|
+
)
|
|
200
|
+
except Exception as e:
|
|
201
|
+
# Ignore "not exist" errors, session may have been deleted already
|
|
202
|
+
if "not exist" not in str(e).lower():
|
|
203
|
+
raise
|
|
204
|
+
|
|
205
|
+
# Recreate the session so the chat history can continue to be used
|
|
206
|
+
result = self.client.create_session(name="chat_history_session")
|
|
207
|
+
self.session_id = result.get("session_id")
|
|
208
|
+
self._session_created = True
|
|
209
|
+
except Exception as e:
|
|
210
|
+
# Only print error, don't raise - clear() should be idempotent
|
|
211
|
+
# But we should log what went wrong
|
|
212
|
+
error_msg = str(e).lower()
|
|
213
|
+
if "not exist" not in error_msg and "data not exist" not in error_msg:
|
|
214
|
+
print(f"Error clearing messages: {e}")
|
|
215
|
+
raise
|
|
216
|
+
|
|
217
|
+
def close(self) -> None:
|
|
218
|
+
"""Close the TDAI client connection."""
|
|
219
|
+
self.client.close()
|
|
@@ -0,0 +1,396 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""LangChain store implementation using TDAI Memory.
|
|
4
|
+
|
|
5
|
+
This module provides a key-value store for LangChain that uses TDAI Memory
|
|
6
|
+
as the backend storage, enabling persistent caching and storage.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import base64
|
|
10
|
+
import json
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from typing import Iterator, List, Optional, Sequence, Tuple
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
from langchain_core.stores import BaseStore
|
|
16
|
+
except ImportError:
|
|
17
|
+
raise ImportError("langchain-core is required for TDAIStore. Install it with: pip install langchain-core")
|
|
18
|
+
|
|
19
|
+
from cloudbase_agent.tdaimemory import MemoryClient
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class TDAIStore(BaseStore[str, bytes]):
|
|
23
|
+
"""LangChain key-value store using TDAI Memory as backend.
|
|
24
|
+
|
|
25
|
+
This class implements the BaseStore interface for persistent key-value storage
|
|
26
|
+
using TDAI Memory's record storage system.
|
|
27
|
+
|
|
28
|
+
:param client: TDAI Memory client instance
|
|
29
|
+
:type client: MemoryClient
|
|
30
|
+
:param namespace: Optional namespace for keys
|
|
31
|
+
:type namespace: Optional[str]
|
|
32
|
+
:param default_session_id: Default session ID for storing records
|
|
33
|
+
:type default_session_id: str
|
|
34
|
+
:param default_strategy: Default strategy for storing records
|
|
35
|
+
:type default_strategy: str
|
|
36
|
+
:param ttl_seconds: Optional TTL for records in seconds
|
|
37
|
+
:type ttl_seconds: Optional[int]
|
|
38
|
+
:param yield_keys_scan_batch_size: Batch size for yielding keys
|
|
39
|
+
:type yield_keys_scan_batch_size: int
|
|
40
|
+
|
|
41
|
+
Example::
|
|
42
|
+
|
|
43
|
+
from cloudbase_agent.storage.tdaimemory import MemoryClient
|
|
44
|
+
from cloudbase_agent.storage.langchain.store import TDAIStore
|
|
45
|
+
|
|
46
|
+
client = MemoryClient(
|
|
47
|
+
endpoint="https://memory.tdai.tencentyun.com",
|
|
48
|
+
api_key="your-api-key",
|
|
49
|
+
memory_id="your-memory-id"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
store = TDAIStore(
|
|
53
|
+
client=client,
|
|
54
|
+
namespace="cache"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Store values
|
|
58
|
+
store.mset([
|
|
59
|
+
("key1", b"value1"),
|
|
60
|
+
("key2", b"value2"),
|
|
61
|
+
])
|
|
62
|
+
|
|
63
|
+
# Get values
|
|
64
|
+
values = store.mget(["key1", "key2"])
|
|
65
|
+
|
|
66
|
+
# Delete values
|
|
67
|
+
store.mdelete(["key1"])
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
def __init__(
|
|
71
|
+
self,
|
|
72
|
+
client: MemoryClient,
|
|
73
|
+
namespace: Optional[str] = None,
|
|
74
|
+
default_session_id: str = "session-lc_store",
|
|
75
|
+
default_strategy: str = "Persona_Profile",
|
|
76
|
+
ttl_seconds: Optional[int] = None,
|
|
77
|
+
yield_keys_scan_batch_size: int = 100,
|
|
78
|
+
):
|
|
79
|
+
"""Initialize the TDAI store.
|
|
80
|
+
|
|
81
|
+
:param client: TDAI Memory client instance
|
|
82
|
+
:type client: MemoryClient
|
|
83
|
+
:param namespace: Optional namespace for keys
|
|
84
|
+
:type namespace: Optional[str]
|
|
85
|
+
:param default_session_id: Default session ID (must start with 'session-' and be 16-20 chars)
|
|
86
|
+
:type default_session_id: str
|
|
87
|
+
:param default_strategy: Default storage strategy (must be configured on TDAI server)
|
|
88
|
+
:type default_strategy: str
|
|
89
|
+
:param ttl_seconds: Optional TTL in seconds
|
|
90
|
+
:type ttl_seconds: Optional[int]
|
|
91
|
+
:param yield_keys_scan_batch_size: Batch size for key iteration (max 100)
|
|
92
|
+
:type yield_keys_scan_batch_size: int
|
|
93
|
+
"""
|
|
94
|
+
self.client = client
|
|
95
|
+
self.namespace = namespace
|
|
96
|
+
self.default_session_id = default_session_id
|
|
97
|
+
self.default_strategy = default_strategy
|
|
98
|
+
self.ttl_seconds = ttl_seconds
|
|
99
|
+
self.yield_keys_scan_batch_size = min(yield_keys_scan_batch_size, 100) # Max 100
|
|
100
|
+
self._session_cache: Optional[str] = None
|
|
101
|
+
|
|
102
|
+
def _get_prefixed_key(self, key: str) -> str:
|
|
103
|
+
"""Get prefixed key with namespace.
|
|
104
|
+
|
|
105
|
+
:param key: Original key
|
|
106
|
+
:type key: str
|
|
107
|
+
:return: Prefixed key
|
|
108
|
+
:rtype: str
|
|
109
|
+
"""
|
|
110
|
+
if self.namespace:
|
|
111
|
+
return f"{self.namespace}/{key}"
|
|
112
|
+
return key
|
|
113
|
+
|
|
114
|
+
def _get_deprefixed_key(self, key: str) -> str:
|
|
115
|
+
"""Remove prefix from key.
|
|
116
|
+
|
|
117
|
+
:param key: Prefixed key
|
|
118
|
+
:type key: str
|
|
119
|
+
:return: Original key
|
|
120
|
+
:rtype: str
|
|
121
|
+
"""
|
|
122
|
+
if self.namespace:
|
|
123
|
+
prefix = f"{self.namespace}/"
|
|
124
|
+
if key.startswith(prefix):
|
|
125
|
+
return key[len(prefix) :]
|
|
126
|
+
return key
|
|
127
|
+
|
|
128
|
+
def _get_session(self) -> str:
|
|
129
|
+
"""Get or create session for the store.
|
|
130
|
+
|
|
131
|
+
:return: Session ID
|
|
132
|
+
:rtype: str
|
|
133
|
+
"""
|
|
134
|
+
if self._session_cache:
|
|
135
|
+
return self._session_cache
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
# Try to find existing session
|
|
139
|
+
result = self.client.query_sessions(limit=1)
|
|
140
|
+
sessions = result.get("sessions", [])
|
|
141
|
+
|
|
142
|
+
if sessions:
|
|
143
|
+
self._session_cache = sessions[0].get("session_id")
|
|
144
|
+
else:
|
|
145
|
+
# Create new session
|
|
146
|
+
result = self.client.create_session(name=f"langchain_store_{self.namespace or 'default'}")
|
|
147
|
+
self._session_cache = result.get("session_id")
|
|
148
|
+
|
|
149
|
+
return self._session_cache
|
|
150
|
+
except Exception:
|
|
151
|
+
# Fallback to default session
|
|
152
|
+
return self.default_session_id
|
|
153
|
+
|
|
154
|
+
def _create_record_content(self, key: str, value: bytes) -> str:
|
|
155
|
+
"""Create record content for storage.
|
|
156
|
+
|
|
157
|
+
:param key: Key
|
|
158
|
+
:type key: str
|
|
159
|
+
:param value: Value as bytes
|
|
160
|
+
:type value: bytes
|
|
161
|
+
:return: JSON string for storage
|
|
162
|
+
:rtype: str
|
|
163
|
+
"""
|
|
164
|
+
# Use base64 encoding to handle binary data safely
|
|
165
|
+
content = {
|
|
166
|
+
"key": self._get_prefixed_key(key),
|
|
167
|
+
"value": base64.b64encode(value).decode("ascii"),
|
|
168
|
+
"timestamp": datetime.now().isoformat(),
|
|
169
|
+
"ttl": self.ttl_seconds,
|
|
170
|
+
}
|
|
171
|
+
return json.dumps(content)
|
|
172
|
+
|
|
173
|
+
def _parse_record_content(self, content: str) -> Optional[dict]:
|
|
174
|
+
"""Parse record content from storage.
|
|
175
|
+
|
|
176
|
+
:param content: JSON string from storage
|
|
177
|
+
:type content: str
|
|
178
|
+
:return: Parsed data or None if expired/invalid
|
|
179
|
+
:rtype: Optional[dict]
|
|
180
|
+
"""
|
|
181
|
+
try:
|
|
182
|
+
data = json.loads(content)
|
|
183
|
+
|
|
184
|
+
# Check TTL if present
|
|
185
|
+
if data.get("ttl") and data.get("timestamp"):
|
|
186
|
+
timestamp = datetime.fromisoformat(data["timestamp"])
|
|
187
|
+
now = datetime.now()
|
|
188
|
+
if (now - timestamp).total_seconds() > data["ttl"]:
|
|
189
|
+
return None # Expired
|
|
190
|
+
|
|
191
|
+
# Decode base64 value back to bytes
|
|
192
|
+
return {
|
|
193
|
+
"key": data["key"],
|
|
194
|
+
"value": base64.b64decode(data["value"]),
|
|
195
|
+
"timestamp": data["timestamp"],
|
|
196
|
+
"ttl": data.get("ttl"),
|
|
197
|
+
}
|
|
198
|
+
except Exception:
|
|
199
|
+
return None
|
|
200
|
+
|
|
201
|
+
def mget(self, keys: Sequence[str]) -> List[Optional[bytes]]:
|
|
202
|
+
"""Get multiple keys from the store.
|
|
203
|
+
|
|
204
|
+
:param keys: List of keys to retrieve
|
|
205
|
+
:type keys: Sequence[str]
|
|
206
|
+
:return: List of values (None if key not found)
|
|
207
|
+
:rtype: List[Optional[bytes]]
|
|
208
|
+
"""
|
|
209
|
+
if not keys:
|
|
210
|
+
return []
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
session_id = self._get_session()
|
|
214
|
+
results: List[Optional[bytes]] = [None] * len(keys)
|
|
215
|
+
|
|
216
|
+
# Build a map of prefixed keys to original indices
|
|
217
|
+
prefixed_keys = {self._get_prefixed_key(key): i for i, key in enumerate(keys)}
|
|
218
|
+
|
|
219
|
+
# Query all records for this session and strategy
|
|
220
|
+
try:
|
|
221
|
+
result = self.client.query_records(
|
|
222
|
+
session_id=session_id,
|
|
223
|
+
strategies=[self.default_strategy],
|
|
224
|
+
limit=100, # Max allowed by API
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
records = result.get("records", [])
|
|
228
|
+
|
|
229
|
+
# Parse each record and match against requested keys
|
|
230
|
+
for record in records:
|
|
231
|
+
parsed = self._parse_record_content(record["record_content"])
|
|
232
|
+
if parsed and parsed["key"] in prefixed_keys:
|
|
233
|
+
idx = prefixed_keys[parsed["key"]]
|
|
234
|
+
results[idx] = parsed["value"]
|
|
235
|
+
except Exception:
|
|
236
|
+
# If query fails, try search_records as fallback for each key
|
|
237
|
+
for i, key in enumerate(keys):
|
|
238
|
+
prefixed_key = self._get_prefixed_key(key)
|
|
239
|
+
try:
|
|
240
|
+
result = self.client.search_records(
|
|
241
|
+
content=prefixed_key,
|
|
242
|
+
session_id=session_id,
|
|
243
|
+
strategies=[self.default_strategy],
|
|
244
|
+
limit=1,
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
records = result.get("records", [])
|
|
248
|
+
if records:
|
|
249
|
+
record = records[0]
|
|
250
|
+
parsed = self._parse_record_content(record["record_content"])
|
|
251
|
+
|
|
252
|
+
if parsed and parsed["key"] == prefixed_key:
|
|
253
|
+
results[i] = parsed["value"]
|
|
254
|
+
except Exception:
|
|
255
|
+
results[i] = None
|
|
256
|
+
|
|
257
|
+
return results
|
|
258
|
+
except Exception:
|
|
259
|
+
return [None] * len(keys)
|
|
260
|
+
|
|
261
|
+
def mset(self, key_value_pairs: Sequence[Tuple[str, bytes]]) -> None:
|
|
262
|
+
"""Set multiple keys in the store.
|
|
263
|
+
|
|
264
|
+
:param key_value_pairs: List of (key, value) tuples
|
|
265
|
+
:type key_value_pairs: Sequence[Tuple[str, bytes]]
|
|
266
|
+
"""
|
|
267
|
+
if not key_value_pairs:
|
|
268
|
+
return
|
|
269
|
+
|
|
270
|
+
try:
|
|
271
|
+
session_id = self._get_session()
|
|
272
|
+
|
|
273
|
+
# Set each key-value pair
|
|
274
|
+
for key, value in key_value_pairs:
|
|
275
|
+
content = self._create_record_content(key, value)
|
|
276
|
+
|
|
277
|
+
try:
|
|
278
|
+
self.client.append_record(
|
|
279
|
+
session_id=session_id,
|
|
280
|
+
content=content,
|
|
281
|
+
strategy=self.default_strategy,
|
|
282
|
+
)
|
|
283
|
+
except Exception as e:
|
|
284
|
+
print(f"Failed to set key {key}: {e}")
|
|
285
|
+
except Exception as e:
|
|
286
|
+
raise RuntimeError(f"Failed to set keys: {e}")
|
|
287
|
+
|
|
288
|
+
def mdelete(self, keys: Sequence[str]) -> None:
|
|
289
|
+
"""Delete multiple keys from the store.
|
|
290
|
+
|
|
291
|
+
:param keys: List of keys to delete
|
|
292
|
+
:type keys: Sequence[str]
|
|
293
|
+
"""
|
|
294
|
+
if not keys:
|
|
295
|
+
return
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
session_id = self._get_session()
|
|
299
|
+
|
|
300
|
+
# Build a map of prefixed keys to original keys
|
|
301
|
+
prefixed_keys = {self._get_prefixed_key(key): key for key in keys}
|
|
302
|
+
|
|
303
|
+
# Query all records to find the ones to delete
|
|
304
|
+
try:
|
|
305
|
+
result = self.client.query_records(
|
|
306
|
+
session_id=session_id,
|
|
307
|
+
strategies=[self.default_strategy],
|
|
308
|
+
limit=100,
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
records = result.get("records", [])
|
|
312
|
+
|
|
313
|
+
# Find and delete matching records
|
|
314
|
+
for record in records:
|
|
315
|
+
parsed = self._parse_record_content(record["record_content"])
|
|
316
|
+
if parsed and parsed["key"] in prefixed_keys:
|
|
317
|
+
try:
|
|
318
|
+
self.client.delete_record(
|
|
319
|
+
session_id=session_id,
|
|
320
|
+
record_id=record["record_id"],
|
|
321
|
+
)
|
|
322
|
+
except Exception as e:
|
|
323
|
+
print(f"Failed to delete key {prefixed_keys[parsed['key']]}: {e}")
|
|
324
|
+
except Exception:
|
|
325
|
+
# Fallback to search_records for each key
|
|
326
|
+
for key in keys:
|
|
327
|
+
prefixed_key = self._get_prefixed_key(key)
|
|
328
|
+
try:
|
|
329
|
+
result = self.client.search_records(
|
|
330
|
+
content=prefixed_key,
|
|
331
|
+
session_id=session_id,
|
|
332
|
+
strategies=[self.default_strategy],
|
|
333
|
+
limit=1,
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
records = result.get("records", [])
|
|
337
|
+
if records:
|
|
338
|
+
record = records[0]
|
|
339
|
+
self.client.delete_record(
|
|
340
|
+
session_id=session_id,
|
|
341
|
+
record_id=record["record_id"],
|
|
342
|
+
)
|
|
343
|
+
except Exception as e:
|
|
344
|
+
print(f"Failed to delete key {key}: {e}")
|
|
345
|
+
except Exception as e:
|
|
346
|
+
raise RuntimeError(f"Failed to delete keys: {e}")
|
|
347
|
+
|
|
348
|
+
def yield_keys(self, prefix: Optional[str] = None) -> Iterator[str]:
|
|
349
|
+
"""Yield keys from the store.
|
|
350
|
+
|
|
351
|
+
:param prefix: Optional prefix to filter keys
|
|
352
|
+
:type prefix: Optional[str]
|
|
353
|
+
:yield: Keys from the store
|
|
354
|
+
:rtype: Iterator[str]
|
|
355
|
+
"""
|
|
356
|
+
try:
|
|
357
|
+
session_id = self._get_session()
|
|
358
|
+
offset = 0
|
|
359
|
+
has_more = True
|
|
360
|
+
|
|
361
|
+
while has_more:
|
|
362
|
+
try:
|
|
363
|
+
result = self.client.query_records(
|
|
364
|
+
session_id=session_id,
|
|
365
|
+
strategies=[self.default_strategy],
|
|
366
|
+
limit=self.yield_keys_scan_batch_size,
|
|
367
|
+
offset=offset,
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
records = result.get("records", [])
|
|
371
|
+
if not records:
|
|
372
|
+
has_more = False
|
|
373
|
+
break
|
|
374
|
+
|
|
375
|
+
for record in records:
|
|
376
|
+
parsed = self._parse_record_content(record["record_content"])
|
|
377
|
+
if parsed:
|
|
378
|
+
deprefixed_key = self._get_deprefixed_key(parsed["key"])
|
|
379
|
+
|
|
380
|
+
if not prefix or deprefixed_key.startswith(prefix):
|
|
381
|
+
yield deprefixed_key
|
|
382
|
+
|
|
383
|
+
offset += len(records)
|
|
384
|
+
|
|
385
|
+
# If we got fewer records than requested, we've reached the end
|
|
386
|
+
if len(records) < self.yield_keys_scan_batch_size:
|
|
387
|
+
has_more = False
|
|
388
|
+
except Exception as e:
|
|
389
|
+
print(f"Error yielding keys at offset {offset}: {e}")
|
|
390
|
+
has_more = False
|
|
391
|
+
except Exception as e:
|
|
392
|
+
print(f"Error yielding keys: {e}")
|
|
393
|
+
|
|
394
|
+
def close(self) -> None:
|
|
395
|
+
"""Close the TDAI client connection."""
|
|
396
|
+
self.client.close()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""LangGraph integration for Cloudbase Agent storage.
|
|
4
|
+
|
|
5
|
+
This module provides LangGraph-compatible storage implementations using TDAI Memory.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .agent import LangGraphAgent
|
|
9
|
+
from .checkpoint import TDAICheckpointSaver
|
|
10
|
+
from .store.tdai_store import TDAIStore
|
|
11
|
+
from .util import (
|
|
12
|
+
convert_action_to_dynamic_structured_tool,
|
|
13
|
+
convert_actions_to_dynamic_structured_tools,
|
|
14
|
+
convert_json_schema_to_pydantic_model,
|
|
15
|
+
convert_pydantic_model_to_json_schema,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"LangGraphAgent",
|
|
20
|
+
"TDAICheckpointSaver",
|
|
21
|
+
"TDAIStore",
|
|
22
|
+
"convert_actions_to_dynamic_structured_tools",
|
|
23
|
+
"convert_action_to_dynamic_structured_tool",
|
|
24
|
+
"convert_json_schema_to_pydantic_model",
|
|
25
|
+
"convert_pydantic_model_to_json_schema",
|
|
26
|
+
]
|