agentscope-runtime 1.0.2__py3-none-any.whl → 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/adapters/agentscope/stream.py +2 -9
- agentscope_runtime/adapters/ms_agent_framework/__init__.py +0 -0
- agentscope_runtime/adapters/ms_agent_framework/message.py +205 -0
- agentscope_runtime/adapters/ms_agent_framework/stream.py +418 -0
- agentscope_runtime/adapters/utils.py +6 -0
- agentscope_runtime/cli/commands/deploy.py +383 -0
- agentscope_runtime/common/collections/redis_mapping.py +4 -1
- agentscope_runtime/common/container_clients/knative_client.py +466 -0
- agentscope_runtime/engine/__init__.py +4 -0
- agentscope_runtime/engine/app/agent_app.py +48 -5
- agentscope_runtime/engine/constant.py +1 -0
- agentscope_runtime/engine/deployers/__init__.py +12 -0
- agentscope_runtime/engine/deployers/adapter/a2a/__init__.py +31 -1
- agentscope_runtime/engine/deployers/adapter/a2a/a2a_protocol_adapter.py +458 -41
- agentscope_runtime/engine/deployers/adapter/a2a/a2a_registry.py +76 -0
- agentscope_runtime/engine/deployers/adapter/a2a/nacos_a2a_registry.py +749 -0
- agentscope_runtime/engine/deployers/agentrun_deployer.py +2 -2
- agentscope_runtime/engine/deployers/fc_deployer.py +1506 -0
- agentscope_runtime/engine/deployers/knative_deployer.py +290 -0
- agentscope_runtime/engine/deployers/kubernetes_deployer.py +3 -0
- agentscope_runtime/engine/deployers/utils/docker_image_utils/dockerfile_generator.py +8 -2
- agentscope_runtime/engine/deployers/utils/docker_image_utils/image_factory.py +5 -0
- agentscope_runtime/engine/deployers/utils/net_utils.py +65 -0
- agentscope_runtime/engine/runner.py +17 -3
- agentscope_runtime/engine/schemas/exception.py +24 -0
- agentscope_runtime/engine/services/agent_state/redis_state_service.py +61 -8
- agentscope_runtime/engine/services/agent_state/state_service_factory.py +2 -5
- agentscope_runtime/engine/services/memory/redis_memory_service.py +129 -25
- agentscope_runtime/engine/services/session_history/redis_session_history_service.py +160 -34
- agentscope_runtime/engine/tracing/wrapper.py +18 -4
- agentscope_runtime/sandbox/__init__.py +14 -6
- agentscope_runtime/sandbox/box/base/__init__.py +2 -2
- agentscope_runtime/sandbox/box/base/base_sandbox.py +51 -1
- agentscope_runtime/sandbox/box/browser/__init__.py +2 -2
- agentscope_runtime/sandbox/box/browser/browser_sandbox.py +198 -2
- agentscope_runtime/sandbox/box/filesystem/__init__.py +2 -2
- agentscope_runtime/sandbox/box/filesystem/filesystem_sandbox.py +99 -2
- agentscope_runtime/sandbox/box/gui/__init__.py +2 -2
- agentscope_runtime/sandbox/box/gui/gui_sandbox.py +117 -1
- agentscope_runtime/sandbox/box/mobile/__init__.py +2 -2
- agentscope_runtime/sandbox/box/mobile/mobile_sandbox.py +247 -100
- agentscope_runtime/sandbox/box/sandbox.py +98 -65
- agentscope_runtime/sandbox/box/shared/routers/generic.py +36 -29
- agentscope_runtime/sandbox/build.py +50 -57
- agentscope_runtime/sandbox/client/__init__.py +6 -1
- agentscope_runtime/sandbox/client/async_http_client.py +339 -0
- agentscope_runtime/sandbox/client/base.py +74 -0
- agentscope_runtime/sandbox/client/http_client.py +108 -329
- agentscope_runtime/sandbox/enums.py +7 -0
- agentscope_runtime/sandbox/manager/sandbox_manager.py +264 -4
- agentscope_runtime/sandbox/manager/server/app.py +7 -1
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-1.0.2.dist-info → agentscope_runtime-1.0.4.dist-info}/METADATA +109 -29
- {agentscope_runtime-1.0.2.dist-info → agentscope_runtime-1.0.4.dist-info}/RECORD +58 -46
- {agentscope_runtime-1.0.2.dist-info → agentscope_runtime-1.0.4.dist-info}/WHEEL +0 -0
- {agentscope_runtime-1.0.2.dist-info → agentscope_runtime-1.0.4.dist-info}/entry_points.txt +0 -0
- {agentscope_runtime-1.0.2.dist-info → agentscope_runtime-1.0.4.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-1.0.2.dist-info → agentscope_runtime-1.0.4.dist-info}/top_level.txt +0 -0
|
@@ -17,23 +17,70 @@ class RedisMemoryService(MemoryService):
|
|
|
17
17
|
self,
|
|
18
18
|
redis_url: str = "redis://localhost:6379/0",
|
|
19
19
|
redis_client: Optional[aioredis.Redis] = None,
|
|
20
|
+
socket_timeout: Optional[float] = 5.0,
|
|
21
|
+
socket_connect_timeout: Optional[float] = 5.0,
|
|
22
|
+
max_connections: Optional[int] = None,
|
|
23
|
+
retry_on_timeout: bool = True,
|
|
24
|
+
ttl_seconds: Optional[int] = 3600, # 1 hour in seconds
|
|
25
|
+
max_messages_per_session: Optional[int] = None,
|
|
26
|
+
health_check_interval: Optional[float] = 30.0,
|
|
27
|
+
socket_keepalive: bool = True,
|
|
20
28
|
):
|
|
29
|
+
"""
|
|
30
|
+
Initialize RedisMemoryService.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
redis_url: Redis connection URL
|
|
34
|
+
redis_client: Optional pre-configured Redis client
|
|
35
|
+
socket_timeout: Socket timeout in seconds (default: 5.0)
|
|
36
|
+
socket_connect_timeout: Socket connect timeout in seconds
|
|
37
|
+
(default: 5.0)
|
|
38
|
+
max_connections: Maximum number of connections in the pool
|
|
39
|
+
(default: None)
|
|
40
|
+
retry_on_timeout: Whether to retry on timeout (default: True)
|
|
41
|
+
ttl_seconds: Time-to-live in seconds for memory data.
|
|
42
|
+
If None, data never expires (default: 3600, i.e., 1 hour)
|
|
43
|
+
max_messages_per_session: Maximum number of messages stored per
|
|
44
|
+
session_id field within a user's Redis memory hash.
|
|
45
|
+
If None, no limit (default: None)
|
|
46
|
+
health_check_interval: Interval in seconds for health checks
|
|
47
|
+
on idle connections (default: 30.0).
|
|
48
|
+
Connections idle longer than this will be checked before reuse.
|
|
49
|
+
Set to 0 to disable.
|
|
50
|
+
socket_keepalive: Enable TCP keepalive to prevent
|
|
51
|
+
silent disconnections (default: True)
|
|
52
|
+
"""
|
|
21
53
|
self._redis_url = redis_url
|
|
22
54
|
self._redis = redis_client
|
|
23
55
|
self._DEFAULT_SESSION_ID = "default"
|
|
56
|
+
self._socket_timeout = socket_timeout
|
|
57
|
+
self._socket_connect_timeout = socket_connect_timeout
|
|
58
|
+
self._max_connections = max_connections
|
|
59
|
+
self._retry_on_timeout = retry_on_timeout
|
|
60
|
+
self._ttl_seconds = ttl_seconds
|
|
61
|
+
self._max_messages_per_session = max_messages_per_session
|
|
62
|
+
self._health_check_interval = health_check_interval
|
|
63
|
+
self._socket_keepalive = socket_keepalive
|
|
24
64
|
|
|
25
65
|
async def start(self) -> None:
|
|
26
|
-
"""Starts the Redis connection
|
|
66
|
+
"""Starts the Redis connection with proper timeout
|
|
67
|
+
and connection pool settings."""
|
|
27
68
|
if self._redis is None:
|
|
28
69
|
self._redis = aioredis.from_url(
|
|
29
70
|
self._redis_url,
|
|
30
71
|
decode_responses=True,
|
|
72
|
+
socket_timeout=self._socket_timeout,
|
|
73
|
+
socket_connect_timeout=self._socket_connect_timeout,
|
|
74
|
+
max_connections=self._max_connections,
|
|
75
|
+
retry_on_timeout=self._retry_on_timeout,
|
|
76
|
+
health_check_interval=self._health_check_interval,
|
|
77
|
+
socket_keepalive=self._socket_keepalive,
|
|
31
78
|
)
|
|
32
79
|
|
|
33
80
|
async def stop(self) -> None:
|
|
34
81
|
"""Closes the Redis connection."""
|
|
35
82
|
if self._redis:
|
|
36
|
-
await self._redis.
|
|
83
|
+
await self._redis.aclose()
|
|
37
84
|
self._redis = None
|
|
38
85
|
|
|
39
86
|
async def health(self) -> bool:
|
|
@@ -73,14 +120,27 @@ class RedisMemoryService(MemoryService):
|
|
|
73
120
|
existing_json = await self._redis.hget(key, field)
|
|
74
121
|
existing_msgs = self._deserialize(existing_json)
|
|
75
122
|
all_msgs = existing_msgs + messages
|
|
123
|
+
|
|
124
|
+
# Limit the number of messages per session to prevent memory issues
|
|
125
|
+
if self._max_messages_per_session is not None:
|
|
126
|
+
if len(all_msgs) > self._max_messages_per_session:
|
|
127
|
+
# Keep only the most recent messages
|
|
128
|
+
all_msgs = all_msgs[-self._max_messages_per_session :]
|
|
129
|
+
|
|
76
130
|
await self._redis.hset(key, field, self._serialize(all_msgs))
|
|
77
131
|
|
|
78
|
-
|
|
132
|
+
# Set TTL for the key if configured
|
|
133
|
+
if self._ttl_seconds is not None:
|
|
134
|
+
await self._redis.expire(key, self._ttl_seconds)
|
|
135
|
+
|
|
136
|
+
async def search_memory( # pylint: disable=too-many-branches
|
|
79
137
|
self,
|
|
80
138
|
user_id: str,
|
|
81
139
|
messages: list,
|
|
82
140
|
filters: Optional[Dict[str, Any]] = None,
|
|
83
141
|
) -> list:
|
|
142
|
+
if not self._redis:
|
|
143
|
+
raise RuntimeError("Redis connection is not available")
|
|
84
144
|
key = self._user_key(user_id)
|
|
85
145
|
if (
|
|
86
146
|
not messages
|
|
@@ -96,29 +156,52 @@ class RedisMemoryService(MemoryService):
|
|
|
96
156
|
|
|
97
157
|
keywords = set(query.lower().split())
|
|
98
158
|
|
|
99
|
-
|
|
100
|
-
hash_keys = await self._redis.hkeys(key)
|
|
101
|
-
for session_id in hash_keys:
|
|
102
|
-
msgs_json = await self._redis.hget(key, session_id)
|
|
103
|
-
msgs = self._deserialize(msgs_json)
|
|
104
|
-
all_msgs.extend(msgs)
|
|
105
|
-
|
|
159
|
+
# Process messages in batches to avoid loading all into memory at once
|
|
106
160
|
matched_messages = []
|
|
107
|
-
|
|
108
|
-
candidate_content = await self.get_query_text(msg)
|
|
109
|
-
if candidate_content:
|
|
110
|
-
msg_content_lower = candidate_content.lower()
|
|
111
|
-
if any(keyword in msg_content_lower for keyword in keywords):
|
|
112
|
-
matched_messages.append(msg)
|
|
161
|
+
hash_keys = await self._redis.hkeys(key)
|
|
113
162
|
|
|
163
|
+
# Get top_k limit early to optimize memory usage
|
|
164
|
+
top_k = None
|
|
114
165
|
if (
|
|
115
166
|
filters
|
|
116
167
|
and "top_k" in filters
|
|
117
168
|
and isinstance(filters["top_k"], int)
|
|
118
169
|
):
|
|
119
|
-
|
|
170
|
+
top_k = filters["top_k"]
|
|
120
171
|
|
|
121
|
-
|
|
172
|
+
# Process each session separately to reduce memory footprint
|
|
173
|
+
for session_id in hash_keys:
|
|
174
|
+
msgs_json = await self._redis.hget(key, session_id)
|
|
175
|
+
if not msgs_json:
|
|
176
|
+
continue
|
|
177
|
+
try:
|
|
178
|
+
msgs = self._deserialize(msgs_json)
|
|
179
|
+
except Exception:
|
|
180
|
+
# Skip corrupted message data
|
|
181
|
+
continue
|
|
182
|
+
|
|
183
|
+
# Match messages in this session
|
|
184
|
+
for msg in msgs:
|
|
185
|
+
candidate_content = await self.get_query_text(msg)
|
|
186
|
+
if candidate_content:
|
|
187
|
+
msg_content_lower = candidate_content.lower()
|
|
188
|
+
if any(
|
|
189
|
+
keyword in msg_content_lower for keyword in keywords
|
|
190
|
+
):
|
|
191
|
+
matched_messages.append(msg)
|
|
192
|
+
|
|
193
|
+
# Apply top_k filter if specified
|
|
194
|
+
if top_k is not None:
|
|
195
|
+
result = matched_messages[-top_k:]
|
|
196
|
+
else:
|
|
197
|
+
result = matched_messages
|
|
198
|
+
|
|
199
|
+
# Refresh TTL on read to extend lifetime of actively used data,
|
|
200
|
+
# if a TTL is configured and there is existing data for this key.
|
|
201
|
+
if self._ttl_seconds is not None and hash_keys:
|
|
202
|
+
await self._redis.expire(key, self._ttl_seconds)
|
|
203
|
+
|
|
204
|
+
return result
|
|
122
205
|
|
|
123
206
|
async def get_query_text(self, message: Message) -> str:
|
|
124
207
|
if message:
|
|
@@ -133,20 +216,39 @@ class RedisMemoryService(MemoryService):
|
|
|
133
216
|
user_id: str,
|
|
134
217
|
filters: Optional[Dict[str, Any]] = None,
|
|
135
218
|
) -> list:
|
|
219
|
+
if not self._redis:
|
|
220
|
+
raise RuntimeError("Redis connection is not available")
|
|
136
221
|
key = self._user_key(user_id)
|
|
137
|
-
all_msgs = []
|
|
138
|
-
hash_keys = await self._redis.hkeys(key)
|
|
139
|
-
for session_id in sorted(hash_keys):
|
|
140
|
-
msgs_json = await self._redis.hget(key, session_id)
|
|
141
|
-
msgs = self._deserialize(msgs_json)
|
|
142
|
-
all_msgs.extend(msgs)
|
|
143
|
-
|
|
144
222
|
page_num = filters.get("page_num", 1) if filters else 1
|
|
145
223
|
page_size = filters.get("page_size", 10) if filters else 10
|
|
146
224
|
|
|
147
225
|
start_index = (page_num - 1) * page_size
|
|
148
226
|
end_index = start_index + page_size
|
|
149
227
|
|
|
228
|
+
# Optimize: Calculate which sessions we need to load
|
|
229
|
+
# For simplicity, we still load all but could be optimized further
|
|
230
|
+
# to only load sessions that contain the requested page range
|
|
231
|
+
all_msgs = []
|
|
232
|
+
hash_keys = await self._redis.hkeys(key)
|
|
233
|
+
for session_id in sorted(hash_keys):
|
|
234
|
+
msgs_json = await self._redis.hget(key, session_id)
|
|
235
|
+
if msgs_json:
|
|
236
|
+
try:
|
|
237
|
+
msgs = self._deserialize(msgs_json)
|
|
238
|
+
all_msgs.extend(msgs)
|
|
239
|
+
except json.JSONDecodeError:
|
|
240
|
+
# Skip corrupted message data
|
|
241
|
+
continue
|
|
242
|
+
|
|
243
|
+
# Early exit optimization: if we've loaded enough messages
|
|
244
|
+
# to cover the requested page, we can stop (but this assumes
|
|
245
|
+
# we need all previous messages for proper ordering)
|
|
246
|
+
# For now, we keep loading all for correctness
|
|
247
|
+
|
|
248
|
+
# Refresh TTL on active use to keep memory alive,
|
|
249
|
+
# mirroring get_session behavior
|
|
250
|
+
if self._ttl_seconds is not None and hash_keys:
|
|
251
|
+
await self._redis.expire(key, self._ttl_seconds)
|
|
150
252
|
return all_msgs[start_index:end_index]
|
|
151
253
|
|
|
152
254
|
async def delete_memory(
|
|
@@ -154,6 +256,8 @@ class RedisMemoryService(MemoryService):
|
|
|
154
256
|
user_id: str,
|
|
155
257
|
session_id: Optional[str] = None,
|
|
156
258
|
) -> None:
|
|
259
|
+
if not self._redis:
|
|
260
|
+
raise RuntimeError("Redis connection is not available")
|
|
157
261
|
key = self._user_key(user_id)
|
|
158
262
|
if session_id:
|
|
159
263
|
await self._redis.hdel(key, session_id)
|
|
@@ -15,23 +15,73 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
15
15
|
self,
|
|
16
16
|
redis_url: str = "redis://localhost:6379/0",
|
|
17
17
|
redis_client: Optional[aioredis.Redis] = None,
|
|
18
|
+
socket_timeout: Optional[float] = 5.0,
|
|
19
|
+
socket_connect_timeout: Optional[float] = 5.0,
|
|
20
|
+
max_connections: Optional[int] = None,
|
|
21
|
+
retry_on_timeout: bool = True,
|
|
22
|
+
ttl_seconds: Optional[int] = 3600, # 1 hour in seconds
|
|
23
|
+
max_messages_per_session: Optional[int] = None,
|
|
24
|
+
health_check_interval: Optional[float] = 30.0,
|
|
25
|
+
socket_keepalive: bool = True,
|
|
18
26
|
):
|
|
27
|
+
"""
|
|
28
|
+
Initialize RedisSessionHistoryService.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
redis_url: Redis connection URL
|
|
32
|
+
redis_client: Optional pre-configured Redis client
|
|
33
|
+
socket_timeout: Socket timeout in seconds (default: 5.0)
|
|
34
|
+
socket_connect_timeout: Socket connect timeout in seconds
|
|
35
|
+
(default: 5.0)
|
|
36
|
+
max_connections: Maximum number of connections in the pool
|
|
37
|
+
(default: None)
|
|
38
|
+
retry_on_timeout: Whether to retry on timeout (default: True)
|
|
39
|
+
ttl_seconds: Time-to-live in seconds for session data.
|
|
40
|
+
If None, data never expires (default: 3600, i.e., 1 hour)
|
|
41
|
+
max_messages_per_session: Maximum number of messages per session.
|
|
42
|
+
If None, no limit (default: None)
|
|
43
|
+
health_check_interval: Interval in seconds for health checks on
|
|
44
|
+
idle connections (default: 30.0).
|
|
45
|
+
Connections idle longer than this will be checked before reuse.
|
|
46
|
+
Set to 0 to disable.
|
|
47
|
+
socket_keepalive: Enable TCP keepalive to prevent
|
|
48
|
+
silent disconnections (default: True)
|
|
49
|
+
"""
|
|
19
50
|
self._redis_url = redis_url
|
|
20
51
|
self._redis = redis_client
|
|
52
|
+
self._socket_timeout = socket_timeout
|
|
53
|
+
self._socket_connect_timeout = socket_connect_timeout
|
|
54
|
+
self._max_connections = max_connections
|
|
55
|
+
self._retry_on_timeout = retry_on_timeout
|
|
56
|
+
self._ttl_seconds = ttl_seconds
|
|
57
|
+
self._max_messages_per_session = max_messages_per_session
|
|
58
|
+
self._health_check_interval = health_check_interval
|
|
59
|
+
self._socket_keepalive = socket_keepalive
|
|
21
60
|
|
|
22
61
|
async def start(self):
|
|
62
|
+
"""Starts the Redis connection with proper timeout and connection
|
|
63
|
+
pool settings."""
|
|
23
64
|
if self._redis is None:
|
|
24
65
|
self._redis = aioredis.from_url(
|
|
25
66
|
self._redis_url,
|
|
26
67
|
decode_responses=True,
|
|
68
|
+
socket_timeout=self._socket_timeout,
|
|
69
|
+
socket_connect_timeout=self._socket_connect_timeout,
|
|
70
|
+
max_connections=self._max_connections,
|
|
71
|
+
retry_on_timeout=self._retry_on_timeout,
|
|
72
|
+
health_check_interval=self._health_check_interval,
|
|
73
|
+
socket_keepalive=self._socket_keepalive,
|
|
27
74
|
)
|
|
28
75
|
|
|
29
76
|
async def stop(self):
|
|
30
77
|
if self._redis:
|
|
31
|
-
await self._redis.
|
|
78
|
+
await self._redis.aclose()
|
|
32
79
|
self._redis = None
|
|
33
80
|
|
|
34
81
|
async def health(self) -> bool:
|
|
82
|
+
"""Checks the health of the service."""
|
|
83
|
+
if not self._redis:
|
|
84
|
+
return False
|
|
35
85
|
try:
|
|
36
86
|
pong = await self._redis.ping()
|
|
37
87
|
return pong is True or pong == "PONG"
|
|
@@ -41,8 +91,9 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
41
91
|
def _session_key(self, user_id: str, session_id: str):
|
|
42
92
|
return f"session:{user_id}:{session_id}"
|
|
43
93
|
|
|
44
|
-
def
|
|
45
|
-
|
|
94
|
+
def _session_pattern(self, user_id: str):
|
|
95
|
+
"""Generate the pattern for scanning session keys for a user."""
|
|
96
|
+
return f"session:{user_id}:*"
|
|
46
97
|
|
|
47
98
|
def _session_to_json(self, session: Session) -> str:
|
|
48
99
|
return session.model_dump_json()
|
|
@@ -55,6 +106,8 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
55
106
|
user_id: str,
|
|
56
107
|
session_id: Optional[str] = None,
|
|
57
108
|
) -> Session:
|
|
109
|
+
if not self._redis:
|
|
110
|
+
raise RuntimeError("Redis connection is not available")
|
|
58
111
|
if session_id and session_id.strip():
|
|
59
112
|
sid = session_id.strip()
|
|
60
113
|
else:
|
|
@@ -64,7 +117,11 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
64
117
|
key = self._session_key(user_id, sid)
|
|
65
118
|
|
|
66
119
|
await self._redis.set(key, self._session_to_json(session))
|
|
67
|
-
|
|
120
|
+
|
|
121
|
+
# Set TTL for the session key if configured
|
|
122
|
+
if self._ttl_seconds is not None:
|
|
123
|
+
await self._redis.expire(key, self._ttl_seconds)
|
|
124
|
+
|
|
68
125
|
return session
|
|
69
126
|
|
|
70
127
|
async def get_session(
|
|
@@ -72,31 +129,63 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
72
129
|
user_id: str,
|
|
73
130
|
session_id: str,
|
|
74
131
|
) -> Optional[Session]:
|
|
132
|
+
if not self._redis:
|
|
133
|
+
raise RuntimeError("Redis connection is not available")
|
|
75
134
|
key = self._session_key(user_id, session_id)
|
|
76
135
|
session_json = await self._redis.get(key)
|
|
77
136
|
if session_json is None:
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
137
|
+
return None
|
|
138
|
+
|
|
139
|
+
try:
|
|
140
|
+
session = self._session_from_json(session_json)
|
|
141
|
+
except Exception:
|
|
142
|
+
# Return None for corrupted session data
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
# Refresh TTL when accessing the session
|
|
146
|
+
if self._ttl_seconds is not None:
|
|
147
|
+
await self._redis.expire(key, self._ttl_seconds)
|
|
148
|
+
|
|
149
|
+
return session
|
|
83
150
|
|
|
84
151
|
async def delete_session(self, user_id: str, session_id: str):
|
|
152
|
+
if not self._redis:
|
|
153
|
+
raise RuntimeError("Redis connection is not available")
|
|
85
154
|
key = self._session_key(user_id, session_id)
|
|
86
155
|
await self._redis.delete(key)
|
|
87
|
-
await self._redis.srem(self._index_key(user_id), session_id)
|
|
88
156
|
|
|
89
157
|
async def list_sessions(self, user_id: str) -> list[Session]:
|
|
90
|
-
|
|
91
|
-
|
|
158
|
+
"""List all sessions for a user by scanning session keys.
|
|
159
|
+
|
|
160
|
+
Uses SCAN to find all session:{user_id}:* keys. Expired sessions
|
|
161
|
+
naturally disappear as their keys expire, avoiding stale entries.
|
|
162
|
+
"""
|
|
163
|
+
if not self._redis:
|
|
164
|
+
raise RuntimeError("Redis connection is not available")
|
|
165
|
+
pattern = self._session_pattern(user_id)
|
|
92
166
|
sessions = []
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
167
|
+
cursor = 0
|
|
168
|
+
|
|
169
|
+
while True:
|
|
170
|
+
cursor, keys = await self._redis.scan(
|
|
171
|
+
cursor,
|
|
172
|
+
match=pattern,
|
|
173
|
+
count=100,
|
|
174
|
+
)
|
|
175
|
+
for key in keys:
|
|
176
|
+
session_json = await self._redis.get(key)
|
|
177
|
+
if session_json:
|
|
178
|
+
try:
|
|
179
|
+
session = self._session_from_json(session_json)
|
|
180
|
+
session.messages = []
|
|
181
|
+
sessions.append(session)
|
|
182
|
+
except Exception:
|
|
183
|
+
# Skip corrupted session data
|
|
184
|
+
continue
|
|
185
|
+
|
|
186
|
+
if cursor == 0:
|
|
187
|
+
break
|
|
188
|
+
|
|
100
189
|
return sessions
|
|
101
190
|
|
|
102
191
|
async def append_message(
|
|
@@ -109,6 +198,8 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
109
198
|
List[Dict[str, Any]],
|
|
110
199
|
],
|
|
111
200
|
):
|
|
201
|
+
if not self._redis:
|
|
202
|
+
raise RuntimeError("Redis connection is not available")
|
|
112
203
|
if not isinstance(message, list):
|
|
113
204
|
message = [message]
|
|
114
205
|
norm_message = []
|
|
@@ -125,21 +216,50 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
125
216
|
key = self._session_key(user_id, session_id)
|
|
126
217
|
|
|
127
218
|
session_json = await self._redis.get(key)
|
|
128
|
-
if session_json:
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
f"Warning: Session {session.id} not found in storage for "
|
|
136
|
-
f"append_message.",
|
|
219
|
+
if session_json is None:
|
|
220
|
+
# Session expired or not found, treat as a new session
|
|
221
|
+
# Create a new session with the current messages
|
|
222
|
+
stored_session = Session(
|
|
223
|
+
id=session_id,
|
|
224
|
+
user_id=user_id,
|
|
225
|
+
messages=norm_message.copy(),
|
|
137
226
|
)
|
|
227
|
+
else:
|
|
228
|
+
try:
|
|
229
|
+
stored_session = self._session_from_json(session_json)
|
|
230
|
+
stored_session.messages.extend(norm_message)
|
|
231
|
+
except Exception:
|
|
232
|
+
# Session data corrupted, treat as a new session
|
|
233
|
+
stored_session = Session(
|
|
234
|
+
id=session_id,
|
|
235
|
+
user_id=user_id,
|
|
236
|
+
messages=norm_message.copy(),
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
# Limit the number of messages per session to prevent memory issues
|
|
240
|
+
if self._max_messages_per_session is not None:
|
|
241
|
+
if len(stored_session.messages) > self._max_messages_per_session:
|
|
242
|
+
# Keep only the most recent messages
|
|
243
|
+
stored_session.messages = stored_session.messages[
|
|
244
|
+
-self._max_messages_per_session :
|
|
245
|
+
]
|
|
246
|
+
# Keep the in-memory session in sync with the stored session
|
|
247
|
+
session.messages = session.messages[
|
|
248
|
+
-self._max_messages_per_session :
|
|
249
|
+
]
|
|
250
|
+
|
|
251
|
+
await self._redis.set(key, self._session_to_json(stored_session))
|
|
252
|
+
|
|
253
|
+
# Set TTL for the session key if configured
|
|
254
|
+
if self._ttl_seconds is not None:
|
|
255
|
+
await self._redis.expire(key, self._ttl_seconds)
|
|
138
256
|
|
|
139
257
|
async def delete_user_sessions(self, user_id: str) -> None:
|
|
140
258
|
"""
|
|
141
259
|
Deletes all session history data for a specific user.
|
|
142
260
|
|
|
261
|
+
Uses SCAN to find all session keys for the user and deletes them.
|
|
262
|
+
|
|
143
263
|
Args:
|
|
144
264
|
user_id (str): The ID of the user whose session history data should
|
|
145
265
|
be deleted
|
|
@@ -147,11 +267,17 @@ class RedisSessionHistoryService(SessionHistoryService):
|
|
|
147
267
|
if not self._redis:
|
|
148
268
|
raise RuntimeError("Redis connection is not available")
|
|
149
269
|
|
|
150
|
-
|
|
151
|
-
|
|
270
|
+
pattern = self._session_pattern(user_id)
|
|
271
|
+
cursor = 0
|
|
152
272
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
273
|
+
while True:
|
|
274
|
+
cursor, keys = await self._redis.scan(
|
|
275
|
+
cursor,
|
|
276
|
+
match=pattern,
|
|
277
|
+
count=100,
|
|
278
|
+
)
|
|
279
|
+
if keys:
|
|
280
|
+
await self._redis.delete(*keys)
|
|
156
281
|
|
|
157
|
-
|
|
282
|
+
if cursor == 0:
|
|
283
|
+
break
|
|
@@ -26,7 +26,11 @@ from typing import (
|
|
|
26
26
|
from pydantic import BaseModel
|
|
27
27
|
from opentelemetry.propagate import extract
|
|
28
28
|
from opentelemetry.context import attach
|
|
29
|
-
from opentelemetry.trace import
|
|
29
|
+
from opentelemetry.trace import (
|
|
30
|
+
ProxyTracerProvider,
|
|
31
|
+
StatusCode,
|
|
32
|
+
NoOpTracerProvider,
|
|
33
|
+
)
|
|
30
34
|
from opentelemetry import trace as ot_trace
|
|
31
35
|
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
|
32
36
|
OTLPSpanExporter as OTLPSpanGrpcExporter,
|
|
@@ -917,12 +921,22 @@ def _get_ot_tracer() -> ot_trace.Tracer:
|
|
|
917
921
|
ot_trace.Tracer: The OpenTelemetry tracer instance.
|
|
918
922
|
"""
|
|
919
923
|
|
|
920
|
-
def
|
|
924
|
+
def _has_existing_trace_provider() -> bool:
|
|
925
|
+
from opentelemetry.trace import _TRACER_PROVIDER
|
|
926
|
+
|
|
921
927
|
existing_provider = ot_trace.get_tracer_provider()
|
|
928
|
+
if isinstance(existing_provider, NoOpTracerProvider):
|
|
929
|
+
return False
|
|
930
|
+
elif isinstance(existing_provider, ProxyTracerProvider):
|
|
931
|
+
# ProxyTracerProvider will use the _TRACER_PROVIDER as real tracer
|
|
932
|
+
# provider to get the tracer
|
|
933
|
+
return bool(_TRACER_PROVIDER)
|
|
922
934
|
|
|
923
|
-
|
|
924
|
-
return ot_trace.get_tracer("agentscope_runtime")
|
|
935
|
+
return True
|
|
925
936
|
|
|
937
|
+
def _get_ot_tracer_inner() -> ot_trace.Tracer:
|
|
938
|
+
if _has_existing_trace_provider():
|
|
939
|
+
return ot_trace.get_tracer("agentscope_runtime")
|
|
926
940
|
resource = Resource(
|
|
927
941
|
attributes={
|
|
928
942
|
SERVICE_NAME: _get_service_name(),
|
|
@@ -3,22 +3,30 @@
|
|
|
3
3
|
# This ensures SandboxRegistry.register() runs at import time.
|
|
4
4
|
# Without this, lazy loading delays module import and types may not be
|
|
5
5
|
# registered.
|
|
6
|
-
from .box.base.base_sandbox import BaseSandbox
|
|
7
|
-
from .box.browser.browser_sandbox import BrowserSandbox
|
|
8
|
-
from .box.filesystem.filesystem_sandbox import
|
|
9
|
-
|
|
6
|
+
from .box.base.base_sandbox import BaseSandbox, BaseSandboxAsync
|
|
7
|
+
from .box.browser.browser_sandbox import BrowserSandbox, BrowserSandboxAsync
|
|
8
|
+
from .box.filesystem.filesystem_sandbox import (
|
|
9
|
+
FilesystemSandbox,
|
|
10
|
+
FilesystemSandboxAsync,
|
|
11
|
+
)
|
|
12
|
+
from .box.gui.gui_sandbox import GuiSandbox, GuiSandboxAsync
|
|
13
|
+
from .box.mobile.mobile_sandbox import MobileSandbox, MobileSandboxAsync
|
|
10
14
|
from .box.training_box.training_box import TrainingSandbox
|
|
11
15
|
from .box.cloud.cloud_sandbox import CloudSandbox
|
|
12
|
-
from .box.mobile.mobile_sandbox import MobileSandbox
|
|
13
16
|
from .box.agentbay.agentbay_sandbox import AgentbaySandbox
|
|
14
17
|
|
|
15
18
|
__all__ = [
|
|
16
19
|
"BaseSandbox",
|
|
20
|
+
"BaseSandboxAsync",
|
|
17
21
|
"BrowserSandbox",
|
|
22
|
+
"BrowserSandboxAsync",
|
|
18
23
|
"FilesystemSandbox",
|
|
24
|
+
"FilesystemSandboxAsync",
|
|
19
25
|
"GuiSandbox",
|
|
26
|
+
"GuiSandboxAsync",
|
|
27
|
+
"MobileSandbox",
|
|
28
|
+
"MobileSandboxAsync",
|
|
20
29
|
"TrainingSandbox",
|
|
21
30
|
"CloudSandbox",
|
|
22
|
-
"MobileSandbox",
|
|
23
31
|
"AgentbaySandbox",
|
|
24
32
|
]
|
|
@@ -4,7 +4,7 @@ from typing import Optional
|
|
|
4
4
|
from ...utils import build_image_uri
|
|
5
5
|
from ...registry import SandboxRegistry
|
|
6
6
|
from ...enums import SandboxType
|
|
7
|
-
from ...box.sandbox import Sandbox
|
|
7
|
+
from ...box.sandbox import Sandbox, SandboxAsync
|
|
8
8
|
from ...constant import TIMEOUT
|
|
9
9
|
|
|
10
10
|
|
|
@@ -49,3 +49,53 @@ class BaseSandbox(Sandbox):
|
|
|
49
49
|
command (str): Shell command to execute.
|
|
50
50
|
"""
|
|
51
51
|
return self.call_tool("run_shell_command", {"command": command})
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@SandboxRegistry.register(
|
|
55
|
+
build_image_uri("runtime-sandbox-base"),
|
|
56
|
+
sandbox_type=SandboxType.BASE_ASYNC,
|
|
57
|
+
security_level="medium",
|
|
58
|
+
timeout=TIMEOUT,
|
|
59
|
+
description="Base Sandbox (Async)",
|
|
60
|
+
)
|
|
61
|
+
class BaseSandboxAsync(SandboxAsync):
|
|
62
|
+
def __init__(
|
|
63
|
+
self,
|
|
64
|
+
sandbox_id: Optional[str] = None,
|
|
65
|
+
timeout: int = 3000,
|
|
66
|
+
base_url: Optional[str] = None,
|
|
67
|
+
bearer_token: Optional[str] = None,
|
|
68
|
+
sandbox_type: SandboxType = SandboxType.BASE_ASYNC,
|
|
69
|
+
):
|
|
70
|
+
super().__init__(
|
|
71
|
+
sandbox_id,
|
|
72
|
+
timeout,
|
|
73
|
+
base_url,
|
|
74
|
+
bearer_token,
|
|
75
|
+
sandbox_type,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
async def run_ipython_cell(self, code: str):
|
|
79
|
+
"""
|
|
80
|
+
Run an IPython cell asynchronously.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
code (str): IPython code to execute.
|
|
84
|
+
Returns:
|
|
85
|
+
Any: Response from sandbox execution
|
|
86
|
+
"""
|
|
87
|
+
return await self.call_tool_async("run_ipython_cell", {"code": code})
|
|
88
|
+
|
|
89
|
+
async def run_shell_command(self, command: str):
|
|
90
|
+
"""
|
|
91
|
+
Run a shell command asynchronously.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
command (str): Shell command to execute.
|
|
95
|
+
Returns:
|
|
96
|
+
Any: Response from sandbox execution
|
|
97
|
+
"""
|
|
98
|
+
return await self.call_tool_async(
|
|
99
|
+
"run_shell_command",
|
|
100
|
+
{"command": command},
|
|
101
|
+
)
|