agentscope-runtime 1.0.5.post1__py3-none-any.whl → 1.1.0b3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/__init__.py +3 -0
- agentscope_runtime/adapters/agentscope/message.py +85 -295
- agentscope_runtime/adapters/agentscope/stream.py +133 -3
- agentscope_runtime/adapters/agno/message.py +11 -2
- agentscope_runtime/adapters/agno/stream.py +1 -0
- agentscope_runtime/adapters/langgraph/__init__.py +1 -3
- agentscope_runtime/adapters/langgraph/message.py +11 -106
- agentscope_runtime/adapters/langgraph/stream.py +1 -0
- agentscope_runtime/adapters/ms_agent_framework/message.py +11 -1
- agentscope_runtime/adapters/ms_agent_framework/stream.py +1 -0
- agentscope_runtime/adapters/text/stream.py +1 -0
- agentscope_runtime/common/container_clients/agentrun_client.py +0 -3
- agentscope_runtime/common/container_clients/boxlite_client.py +26 -15
- agentscope_runtime/common/container_clients/fc_client.py +0 -11
- agentscope_runtime/common/utils/deprecation.py +14 -17
- agentscope_runtime/common/utils/logging.py +44 -0
- agentscope_runtime/engine/app/agent_app.py +5 -5
- agentscope_runtime/engine/app/celery_mixin.py +43 -4
- agentscope_runtime/engine/deployers/adapter/agui/__init__.py +8 -1
- agentscope_runtime/engine/deployers/adapter/agui/agui_adapter_utils.py +6 -1
- agentscope_runtime/engine/deployers/adapter/agui/agui_protocol_adapter.py +2 -2
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +13 -0
- agentscope_runtime/engine/runner.py +31 -6
- agentscope_runtime/engine/schemas/agent_schemas.py +28 -0
- agentscope_runtime/engine/services/sandbox/sandbox_service.py +41 -9
- agentscope_runtime/sandbox/box/base/base_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/browser/browser_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/dummy/dummy_sandbox.py +9 -2
- agentscope_runtime/sandbox/box/filesystem/filesystem_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/gui/gui_sandbox.py +5 -1
- agentscope_runtime/sandbox/box/mobile/mobile_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/sandbox.py +122 -13
- agentscope_runtime/sandbox/client/async_http_client.py +1 -0
- agentscope_runtime/sandbox/client/base.py +0 -1
- agentscope_runtime/sandbox/client/http_client.py +0 -2
- agentscope_runtime/sandbox/manager/heartbeat_mixin.py +486 -0
- agentscope_runtime/sandbox/manager/sandbox_manager.py +740 -153
- agentscope_runtime/sandbox/manager/server/app.py +18 -11
- agentscope_runtime/sandbox/manager/server/config.py +10 -2
- agentscope_runtime/sandbox/mcp_server.py +0 -1
- agentscope_runtime/sandbox/model/__init__.py +2 -1
- agentscope_runtime/sandbox/model/container.py +90 -3
- agentscope_runtime/sandbox/model/manager_config.py +45 -1
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b3.dist-info}/METADATA +37 -54
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b3.dist-info}/RECORD +50 -69
- agentscope_runtime/adapters/agentscope/long_term_memory/__init__.py +0 -6
- agentscope_runtime/adapters/agentscope/long_term_memory/_long_term_memory_adapter.py +0 -258
- agentscope_runtime/adapters/agentscope/memory/__init__.py +0 -6
- agentscope_runtime/adapters/agentscope/memory/_memory_adapter.py +0 -152
- agentscope_runtime/engine/services/agent_state/__init__.py +0 -25
- agentscope_runtime/engine/services/agent_state/redis_state_service.py +0 -166
- agentscope_runtime/engine/services/agent_state/state_service.py +0 -179
- agentscope_runtime/engine/services/agent_state/state_service_factory.py +0 -52
- agentscope_runtime/engine/services/memory/__init__.py +0 -33
- agentscope_runtime/engine/services/memory/mem0_memory_service.py +0 -128
- agentscope_runtime/engine/services/memory/memory_service.py +0 -292
- agentscope_runtime/engine/services/memory/memory_service_factory.py +0 -126
- agentscope_runtime/engine/services/memory/redis_memory_service.py +0 -290
- agentscope_runtime/engine/services/memory/reme_personal_memory_service.py +0 -109
- agentscope_runtime/engine/services/memory/reme_task_memory_service.py +0 -11
- agentscope_runtime/engine/services/memory/tablestore_memory_service.py +0 -301
- agentscope_runtime/engine/services/session_history/__init__.py +0 -32
- agentscope_runtime/engine/services/session_history/redis_session_history_service.py +0 -283
- agentscope_runtime/engine/services/session_history/session_history_service.py +0 -267
- agentscope_runtime/engine/services/session_history/session_history_service_factory.py +0 -73
- agentscope_runtime/engine/services/session_history/tablestore_session_history_service.py +0 -288
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b3.dist-info}/WHEEL +0 -0
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b3.dist-info}/entry_points.txt +0 -0
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b3.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b3.dist-info}/top_level.txt +0 -0
|
@@ -1,290 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
from typing import Optional, Dict, Any
|
|
3
|
-
import json
|
|
4
|
-
import redis.asyncio as aioredis
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
from .memory_service import MemoryService
|
|
8
|
-
from ...schemas.agent_schemas import Message, MessageType
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class RedisMemoryService(MemoryService):
|
|
12
|
-
"""
|
|
13
|
-
A Redis-based implementation of the memory service.
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
def __init__(
|
|
17
|
-
self,
|
|
18
|
-
redis_url: str = "redis://localhost:6379/0",
|
|
19
|
-
redis_client: Optional[aioredis.Redis] = None,
|
|
20
|
-
socket_timeout: Optional[float] = 5.0,
|
|
21
|
-
socket_connect_timeout: Optional[float] = 5.0,
|
|
22
|
-
max_connections: Optional[int] = None,
|
|
23
|
-
retry_on_timeout: bool = True,
|
|
24
|
-
ttl_seconds: Optional[int] = 3600, # 1 hour in seconds
|
|
25
|
-
max_messages_per_session: Optional[int] = None,
|
|
26
|
-
health_check_interval: Optional[float] = 30.0,
|
|
27
|
-
socket_keepalive: bool = True,
|
|
28
|
-
):
|
|
29
|
-
"""
|
|
30
|
-
Initialize RedisMemoryService.
|
|
31
|
-
|
|
32
|
-
Args:
|
|
33
|
-
redis_url: Redis connection URL.
|
|
34
|
-
redis_client: Optional pre-configured Redis client.
|
|
35
|
-
socket_timeout: Socket timeout in seconds (default: 5.0).
|
|
36
|
-
socket_connect_timeout: Socket connect timeout in seconds
|
|
37
|
-
(default: 5.0).
|
|
38
|
-
max_connections: Maximum number of connections in the pool
|
|
39
|
-
(default: None).
|
|
40
|
-
retry_on_timeout: Whether to retry on timeout (default: True).
|
|
41
|
-
ttl_seconds: Time-to-live in seconds for memory data. If None,
|
|
42
|
-
data never expires (default: 3600, i.e., 1 hour).
|
|
43
|
-
max_messages_per_session: Maximum number of messages stored per
|
|
44
|
-
session_id field within a user's Redis memory hash. If None,
|
|
45
|
-
no limit (default: None).
|
|
46
|
-
health_check_interval: Interval in seconds for health checks on
|
|
47
|
-
idle connections (default: 30.0). Connections idle longer
|
|
48
|
-
than this will be checked before reuse. Set to 0 to disable.
|
|
49
|
-
socket_keepalive: Enable TCP keepalive to prevent silent
|
|
50
|
-
disconnections (default: True).
|
|
51
|
-
"""
|
|
52
|
-
self._redis_url = redis_url
|
|
53
|
-
self._redis = redis_client
|
|
54
|
-
self._DEFAULT_SESSION_ID = "default"
|
|
55
|
-
self._socket_timeout = socket_timeout
|
|
56
|
-
self._socket_connect_timeout = socket_connect_timeout
|
|
57
|
-
self._max_connections = max_connections
|
|
58
|
-
self._retry_on_timeout = retry_on_timeout
|
|
59
|
-
self._ttl_seconds = ttl_seconds
|
|
60
|
-
self._max_messages_per_session = max_messages_per_session
|
|
61
|
-
self._health_check_interval = health_check_interval
|
|
62
|
-
self._socket_keepalive = socket_keepalive
|
|
63
|
-
|
|
64
|
-
async def start(self) -> None:
|
|
65
|
-
"""Starts the Redis connection with proper timeout
|
|
66
|
-
and connection pool settings."""
|
|
67
|
-
if self._redis is None:
|
|
68
|
-
self._redis = aioredis.from_url(
|
|
69
|
-
self._redis_url,
|
|
70
|
-
decode_responses=True,
|
|
71
|
-
socket_timeout=self._socket_timeout,
|
|
72
|
-
socket_connect_timeout=self._socket_connect_timeout,
|
|
73
|
-
max_connections=self._max_connections,
|
|
74
|
-
retry_on_timeout=self._retry_on_timeout,
|
|
75
|
-
health_check_interval=self._health_check_interval,
|
|
76
|
-
socket_keepalive=self._socket_keepalive,
|
|
77
|
-
)
|
|
78
|
-
|
|
79
|
-
async def stop(self) -> None:
|
|
80
|
-
"""Closes the Redis connection."""
|
|
81
|
-
if self._redis:
|
|
82
|
-
await self._redis.aclose()
|
|
83
|
-
self._redis = None
|
|
84
|
-
|
|
85
|
-
async def health(self) -> bool:
|
|
86
|
-
"""Checks the health of the service."""
|
|
87
|
-
|
|
88
|
-
if not self._redis:
|
|
89
|
-
return False
|
|
90
|
-
try:
|
|
91
|
-
pong = await self._redis.ping()
|
|
92
|
-
return pong is True or pong == "PONG"
|
|
93
|
-
except Exception:
|
|
94
|
-
return False
|
|
95
|
-
|
|
96
|
-
def _user_key(self, user_id):
|
|
97
|
-
# Each user is a Redis hash
|
|
98
|
-
return f"user_memory:{user_id}"
|
|
99
|
-
|
|
100
|
-
def _serialize(self, messages):
|
|
101
|
-
return json.dumps([msg.dict() for msg in messages])
|
|
102
|
-
|
|
103
|
-
def _deserialize(self, messages_json):
|
|
104
|
-
if not messages_json:
|
|
105
|
-
return []
|
|
106
|
-
return [Message.parse_obj(m) for m in json.loads(messages_json)]
|
|
107
|
-
|
|
108
|
-
async def add_memory(
|
|
109
|
-
self,
|
|
110
|
-
user_id: str,
|
|
111
|
-
messages: list,
|
|
112
|
-
session_id: Optional[str] = None,
|
|
113
|
-
) -> None:
|
|
114
|
-
if not self._redis:
|
|
115
|
-
raise RuntimeError("Redis connection is not available")
|
|
116
|
-
key = self._user_key(user_id)
|
|
117
|
-
field = session_id if session_id else self._DEFAULT_SESSION_ID
|
|
118
|
-
|
|
119
|
-
existing_json = await self._redis.hget(key, field)
|
|
120
|
-
existing_msgs = self._deserialize(existing_json)
|
|
121
|
-
all_msgs = existing_msgs + messages
|
|
122
|
-
|
|
123
|
-
# Limit the number of messages per session to prevent memory issues
|
|
124
|
-
if self._max_messages_per_session is not None:
|
|
125
|
-
if len(all_msgs) > self._max_messages_per_session:
|
|
126
|
-
# Keep only the most recent messages
|
|
127
|
-
all_msgs = all_msgs[-self._max_messages_per_session :]
|
|
128
|
-
|
|
129
|
-
await self._redis.hset(key, field, self._serialize(all_msgs))
|
|
130
|
-
|
|
131
|
-
# Set TTL for the key if configured
|
|
132
|
-
if self._ttl_seconds is not None:
|
|
133
|
-
await self._redis.expire(key, self._ttl_seconds)
|
|
134
|
-
|
|
135
|
-
async def search_memory( # pylint: disable=too-many-branches
|
|
136
|
-
self,
|
|
137
|
-
user_id: str,
|
|
138
|
-
messages: list,
|
|
139
|
-
filters: Optional[Dict[str, Any]] = None,
|
|
140
|
-
) -> list:
|
|
141
|
-
if not self._redis:
|
|
142
|
-
raise RuntimeError("Redis connection is not available")
|
|
143
|
-
key = self._user_key(user_id)
|
|
144
|
-
if (
|
|
145
|
-
not messages
|
|
146
|
-
or not isinstance(messages, list)
|
|
147
|
-
or len(messages) == 0
|
|
148
|
-
):
|
|
149
|
-
return []
|
|
150
|
-
|
|
151
|
-
message = messages[-1]
|
|
152
|
-
query = await self.get_query_text(message)
|
|
153
|
-
if not query:
|
|
154
|
-
return []
|
|
155
|
-
|
|
156
|
-
keywords = set(query.lower().split())
|
|
157
|
-
|
|
158
|
-
# Process messages in batches to avoid loading all into memory at once
|
|
159
|
-
matched_messages = []
|
|
160
|
-
hash_keys = await self._redis.hkeys(key)
|
|
161
|
-
|
|
162
|
-
# Get top_k limit early to optimize memory usage
|
|
163
|
-
top_k = None
|
|
164
|
-
if (
|
|
165
|
-
filters
|
|
166
|
-
and "top_k" in filters
|
|
167
|
-
and isinstance(filters["top_k"], int)
|
|
168
|
-
):
|
|
169
|
-
top_k = filters["top_k"]
|
|
170
|
-
|
|
171
|
-
# Process each session separately to reduce memory footprint
|
|
172
|
-
for session_id in hash_keys:
|
|
173
|
-
msgs_json = await self._redis.hget(key, session_id)
|
|
174
|
-
if not msgs_json:
|
|
175
|
-
continue
|
|
176
|
-
try:
|
|
177
|
-
msgs = self._deserialize(msgs_json)
|
|
178
|
-
except Exception:
|
|
179
|
-
# Skip corrupted message data
|
|
180
|
-
continue
|
|
181
|
-
|
|
182
|
-
# Match messages in this session
|
|
183
|
-
for msg in msgs:
|
|
184
|
-
candidate_content = await self.get_query_text(msg)
|
|
185
|
-
if candidate_content:
|
|
186
|
-
msg_content_lower = candidate_content.lower()
|
|
187
|
-
if any(
|
|
188
|
-
keyword in msg_content_lower for keyword in keywords
|
|
189
|
-
):
|
|
190
|
-
matched_messages.append(msg)
|
|
191
|
-
|
|
192
|
-
# Apply top_k filter if specified
|
|
193
|
-
if top_k is not None:
|
|
194
|
-
result = matched_messages[-top_k:]
|
|
195
|
-
else:
|
|
196
|
-
result = matched_messages
|
|
197
|
-
|
|
198
|
-
# Refresh TTL on read to extend lifetime of actively used data,
|
|
199
|
-
# if a TTL is configured and there is existing data for this key.
|
|
200
|
-
if self._ttl_seconds is not None and hash_keys:
|
|
201
|
-
await self._redis.expire(key, self._ttl_seconds)
|
|
202
|
-
|
|
203
|
-
return result
|
|
204
|
-
|
|
205
|
-
async def get_query_text(self, message: Message) -> str:
|
|
206
|
-
if message:
|
|
207
|
-
if message.type == MessageType.MESSAGE:
|
|
208
|
-
for content in message.content:
|
|
209
|
-
if content.type == "text":
|
|
210
|
-
return content.text
|
|
211
|
-
return ""
|
|
212
|
-
|
|
213
|
-
async def list_memory(
|
|
214
|
-
self,
|
|
215
|
-
user_id: str,
|
|
216
|
-
filters: Optional[Dict[str, Any]] = None,
|
|
217
|
-
) -> list:
|
|
218
|
-
if not self._redis:
|
|
219
|
-
raise RuntimeError("Redis connection is not available")
|
|
220
|
-
key = self._user_key(user_id)
|
|
221
|
-
page_num = filters.get("page_num", 1) if filters else 1
|
|
222
|
-
page_size = filters.get("page_size", 10) if filters else 10
|
|
223
|
-
|
|
224
|
-
start_index = (page_num - 1) * page_size
|
|
225
|
-
end_index = start_index + page_size
|
|
226
|
-
|
|
227
|
-
# Optimize: Calculate which sessions we need to load
|
|
228
|
-
# For simplicity, we still load all but could be optimized further
|
|
229
|
-
# to only load sessions that contain the requested page range
|
|
230
|
-
all_msgs = []
|
|
231
|
-
hash_keys = await self._redis.hkeys(key)
|
|
232
|
-
for session_id in sorted(hash_keys):
|
|
233
|
-
msgs_json = await self._redis.hget(key, session_id)
|
|
234
|
-
if msgs_json:
|
|
235
|
-
try:
|
|
236
|
-
msgs = self._deserialize(msgs_json)
|
|
237
|
-
all_msgs.extend(msgs)
|
|
238
|
-
except json.JSONDecodeError:
|
|
239
|
-
# Skip corrupted message data
|
|
240
|
-
continue
|
|
241
|
-
|
|
242
|
-
# Early exit optimization: if we've loaded enough messages
|
|
243
|
-
# to cover the requested page, we can stop (but this assumes
|
|
244
|
-
# we need all previous messages for proper ordering)
|
|
245
|
-
# For now, we keep loading all for correctness
|
|
246
|
-
|
|
247
|
-
# Refresh TTL on active use to keep memory alive,
|
|
248
|
-
# mirroring get_session behavior
|
|
249
|
-
if self._ttl_seconds is not None and hash_keys:
|
|
250
|
-
await self._redis.expire(key, self._ttl_seconds)
|
|
251
|
-
return all_msgs[start_index:end_index]
|
|
252
|
-
|
|
253
|
-
async def delete_memory(
|
|
254
|
-
self,
|
|
255
|
-
user_id: str,
|
|
256
|
-
session_id: Optional[str] = None,
|
|
257
|
-
) -> None:
|
|
258
|
-
if not self._redis:
|
|
259
|
-
raise RuntimeError("Redis connection is not available")
|
|
260
|
-
key = self._user_key(user_id)
|
|
261
|
-
if session_id:
|
|
262
|
-
await self._redis.hdel(key, session_id)
|
|
263
|
-
else:
|
|
264
|
-
await self._redis.delete(key)
|
|
265
|
-
|
|
266
|
-
async def clear_all_memory(self) -> None:
|
|
267
|
-
"""
|
|
268
|
-
Clears all memory data from Redis.
|
|
269
|
-
This method removes all user memory keys from the Redis database.
|
|
270
|
-
"""
|
|
271
|
-
if not self._redis:
|
|
272
|
-
raise RuntimeError("Redis connection is not available")
|
|
273
|
-
|
|
274
|
-
keys = await self._redis.keys(self._user_key("*"))
|
|
275
|
-
if keys:
|
|
276
|
-
await self._redis.delete(*keys)
|
|
277
|
-
|
|
278
|
-
async def delete_user_memory(self, user_id: str) -> None:
|
|
279
|
-
"""
|
|
280
|
-
Deletes all memory data for a specific user.
|
|
281
|
-
|
|
282
|
-
Args:
|
|
283
|
-
user_id (str): The ID of the user whose memory data should be
|
|
284
|
-
deleted
|
|
285
|
-
"""
|
|
286
|
-
if not self._redis:
|
|
287
|
-
raise RuntimeError("Redis connection is not available")
|
|
288
|
-
|
|
289
|
-
key = self._user_key(user_id)
|
|
290
|
-
await self._redis.delete(key)
|
|
@@ -1,109 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
import os
|
|
3
|
-
from typing import Optional, Dict, Any, List
|
|
4
|
-
|
|
5
|
-
from reme_ai.service.personal_memory_service import (
|
|
6
|
-
PersonalMemoryService,
|
|
7
|
-
)
|
|
8
|
-
|
|
9
|
-
from .memory_service import MemoryService
|
|
10
|
-
from ...schemas.agent_schemas import Message
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class ReMePersonalMemoryService(MemoryService):
|
|
14
|
-
"""
|
|
15
|
-
ReMe requires the following env variables to be set:
|
|
16
|
-
FLOW_EMBEDDING_API_KEY=sk-xxxx
|
|
17
|
-
FLOW_EMBEDDING_BASE_URL=https://xxxx/v1
|
|
18
|
-
FLOW_LLM_API_KEY=sk-xxxx
|
|
19
|
-
FLOW_LLM_BASE_URL=https://xxxx/v1
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
def __init__(self, **kwargs):
|
|
23
|
-
super().__init__(**kwargs)
|
|
24
|
-
for key in [
|
|
25
|
-
"FLOW_EMBEDDING_API_KEY",
|
|
26
|
-
"FLOW_EMBEDDING_BASE_URL",
|
|
27
|
-
"FLOW_LLM_API_KEY",
|
|
28
|
-
"FLOW_LLM_BASE_URL",
|
|
29
|
-
]:
|
|
30
|
-
if os.getenv(key) is None:
|
|
31
|
-
raise ValueError(f"{key} is not set")
|
|
32
|
-
|
|
33
|
-
self.service = PersonalMemoryService()
|
|
34
|
-
|
|
35
|
-
@staticmethod
|
|
36
|
-
def transform_message(message: Message) -> dict:
|
|
37
|
-
content_text = None
|
|
38
|
-
|
|
39
|
-
try:
|
|
40
|
-
if hasattr(message, "content") and isinstance(
|
|
41
|
-
message.content,
|
|
42
|
-
list,
|
|
43
|
-
):
|
|
44
|
-
if len(message.content) > 0 and hasattr(
|
|
45
|
-
message.content[0],
|
|
46
|
-
"text",
|
|
47
|
-
):
|
|
48
|
-
content_text = message.content[0].text
|
|
49
|
-
except (AttributeError, IndexError):
|
|
50
|
-
# Log error or handle appropriately
|
|
51
|
-
pass
|
|
52
|
-
|
|
53
|
-
return {
|
|
54
|
-
"role": getattr(message, "role", None),
|
|
55
|
-
"content": content_text,
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
def transform_messages(self, messages: List[Message]) -> List[dict]:
|
|
59
|
-
return [self.transform_message(message) for message in messages]
|
|
60
|
-
|
|
61
|
-
async def start(self) -> None:
|
|
62
|
-
return await self.service.start()
|
|
63
|
-
|
|
64
|
-
async def stop(self) -> None:
|
|
65
|
-
return await self.service.stop()
|
|
66
|
-
|
|
67
|
-
async def health(self) -> bool:
|
|
68
|
-
try:
|
|
69
|
-
return await self.service.health()
|
|
70
|
-
except Exception:
|
|
71
|
-
return False
|
|
72
|
-
|
|
73
|
-
async def add_memory(
|
|
74
|
-
self,
|
|
75
|
-
user_id: str,
|
|
76
|
-
messages: list,
|
|
77
|
-
session_id: Optional[str] = None,
|
|
78
|
-
) -> None:
|
|
79
|
-
return await self.service.add_memory(
|
|
80
|
-
user_id,
|
|
81
|
-
self.transform_messages(messages),
|
|
82
|
-
session_id,
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
async def search_memory(
|
|
86
|
-
self,
|
|
87
|
-
user_id: str,
|
|
88
|
-
messages: list,
|
|
89
|
-
filters: Optional[Dict[str, Any]] = None,
|
|
90
|
-
) -> list:
|
|
91
|
-
return await self.service.search_memory(
|
|
92
|
-
user_id,
|
|
93
|
-
self.transform_messages(messages),
|
|
94
|
-
filters,
|
|
95
|
-
)
|
|
96
|
-
|
|
97
|
-
async def list_memory(
|
|
98
|
-
self,
|
|
99
|
-
user_id: str,
|
|
100
|
-
filters: Optional[Dict[str, Any]] = None,
|
|
101
|
-
) -> list:
|
|
102
|
-
return await self.service.list_memory(user_id, filters)
|
|
103
|
-
|
|
104
|
-
async def delete_memory(
|
|
105
|
-
self,
|
|
106
|
-
user_id: str,
|
|
107
|
-
session_id: Optional[str] = None,
|
|
108
|
-
) -> None:
|
|
109
|
-
return await self.service.delete_memory(user_id, session_id)
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
from reme_ai.service.task_memory_service import TaskMemoryService
|
|
3
|
-
|
|
4
|
-
from .reme_personal_memory_service import ReMePersonalMemoryService
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
class ReMeTaskMemoryService(ReMePersonalMemoryService):
|
|
8
|
-
def __init__(self, **kwargs):
|
|
9
|
-
super().__init__(**kwargs)
|
|
10
|
-
|
|
11
|
-
self.service = TaskMemoryService()
|