agentscope-runtime 1.0.5.post1__py3-none-any.whl → 1.1.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentscope_runtime/__init__.py +3 -0
- agentscope_runtime/adapters/agentscope/message.py +36 -295
- agentscope_runtime/adapters/agentscope/stream.py +89 -2
- agentscope_runtime/adapters/agno/message.py +11 -2
- agentscope_runtime/adapters/agno/stream.py +1 -0
- agentscope_runtime/adapters/langgraph/__init__.py +1 -3
- agentscope_runtime/adapters/langgraph/message.py +11 -106
- agentscope_runtime/adapters/langgraph/stream.py +1 -0
- agentscope_runtime/adapters/ms_agent_framework/message.py +11 -1
- agentscope_runtime/adapters/ms_agent_framework/stream.py +1 -0
- agentscope_runtime/adapters/text/stream.py +1 -0
- agentscope_runtime/common/container_clients/agentrun_client.py +0 -3
- agentscope_runtime/common/container_clients/boxlite_client.py +26 -15
- agentscope_runtime/common/container_clients/fc_client.py +0 -11
- agentscope_runtime/common/utils/deprecation.py +14 -17
- agentscope_runtime/common/utils/logging.py +44 -0
- agentscope_runtime/engine/app/agent_app.py +5 -5
- agentscope_runtime/engine/app/celery_mixin.py +43 -4
- agentscope_runtime/engine/deployers/adapter/agui/__init__.py +8 -1
- agentscope_runtime/engine/deployers/adapter/agui/agui_adapter_utils.py +6 -1
- agentscope_runtime/engine/deployers/adapter/agui/agui_protocol_adapter.py +2 -2
- agentscope_runtime/engine/deployers/utils/service_utils/fastapi_factory.py +13 -0
- agentscope_runtime/engine/runner.py +31 -6
- agentscope_runtime/engine/schemas/agent_schemas.py +28 -0
- agentscope_runtime/engine/services/sandbox/sandbox_service.py +41 -9
- agentscope_runtime/sandbox/box/base/base_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/browser/browser_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/dummy/dummy_sandbox.py +9 -2
- agentscope_runtime/sandbox/box/filesystem/filesystem_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/gui/gui_sandbox.py +5 -1
- agentscope_runtime/sandbox/box/mobile/mobile_sandbox.py +4 -0
- agentscope_runtime/sandbox/box/sandbox.py +122 -13
- agentscope_runtime/sandbox/client/async_http_client.py +1 -0
- agentscope_runtime/sandbox/client/base.py +0 -1
- agentscope_runtime/sandbox/client/http_client.py +0 -2
- agentscope_runtime/sandbox/manager/heartbeat_mixin.py +486 -0
- agentscope_runtime/sandbox/manager/sandbox_manager.py +740 -153
- agentscope_runtime/sandbox/manager/server/app.py +18 -11
- agentscope_runtime/sandbox/manager/server/config.py +10 -2
- agentscope_runtime/sandbox/mcp_server.py +0 -1
- agentscope_runtime/sandbox/model/__init__.py +2 -1
- agentscope_runtime/sandbox/model/container.py +90 -3
- agentscope_runtime/sandbox/model/manager_config.py +45 -1
- agentscope_runtime/version.py +1 -1
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b2.dist-info}/METADATA +36 -54
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b2.dist-info}/RECORD +50 -69
- agentscope_runtime/adapters/agentscope/long_term_memory/__init__.py +0 -6
- agentscope_runtime/adapters/agentscope/long_term_memory/_long_term_memory_adapter.py +0 -258
- agentscope_runtime/adapters/agentscope/memory/__init__.py +0 -6
- agentscope_runtime/adapters/agentscope/memory/_memory_adapter.py +0 -152
- agentscope_runtime/engine/services/agent_state/__init__.py +0 -25
- agentscope_runtime/engine/services/agent_state/redis_state_service.py +0 -166
- agentscope_runtime/engine/services/agent_state/state_service.py +0 -179
- agentscope_runtime/engine/services/agent_state/state_service_factory.py +0 -52
- agentscope_runtime/engine/services/memory/__init__.py +0 -33
- agentscope_runtime/engine/services/memory/mem0_memory_service.py +0 -128
- agentscope_runtime/engine/services/memory/memory_service.py +0 -292
- agentscope_runtime/engine/services/memory/memory_service_factory.py +0 -126
- agentscope_runtime/engine/services/memory/redis_memory_service.py +0 -290
- agentscope_runtime/engine/services/memory/reme_personal_memory_service.py +0 -109
- agentscope_runtime/engine/services/memory/reme_task_memory_service.py +0 -11
- agentscope_runtime/engine/services/memory/tablestore_memory_service.py +0 -301
- agentscope_runtime/engine/services/session_history/__init__.py +0 -32
- agentscope_runtime/engine/services/session_history/redis_session_history_service.py +0 -283
- agentscope_runtime/engine/services/session_history/session_history_service.py +0 -267
- agentscope_runtime/engine/services/session_history/session_history_service_factory.py +0 -73
- agentscope_runtime/engine/services/session_history/tablestore_session_history_service.py +0 -288
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b2.dist-info}/WHEEL +0 -0
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b2.dist-info}/entry_points.txt +0 -0
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b2.dist-info}/licenses/LICENSE +0 -0
- {agentscope_runtime-1.0.5.post1.dist-info → agentscope_runtime-1.1.0b2.dist-info}/top_level.txt +0 -0
|
@@ -1,301 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# pylint: disable=redefined-outer-name
|
|
3
|
-
import asyncio
|
|
4
|
-
import copy
|
|
5
|
-
from enum import Enum
|
|
6
|
-
from typing import Any, Dict, List, Optional
|
|
7
|
-
|
|
8
|
-
import tablestore
|
|
9
|
-
from langchain_community.embeddings import DashScopeEmbeddings
|
|
10
|
-
from langchain_core.embeddings import Embeddings
|
|
11
|
-
from tablestore import AsyncOTSClient as AsyncTablestoreClient
|
|
12
|
-
from tablestore import VectorMetricType
|
|
13
|
-
from tablestore_for_agent_memory.base.filter import Filters
|
|
14
|
-
from tablestore_for_agent_memory.knowledge.async_knowledge_store import (
|
|
15
|
-
AsyncKnowledgeStore,
|
|
16
|
-
)
|
|
17
|
-
|
|
18
|
-
from ...schemas.agent_schemas import Message, MessageType
|
|
19
|
-
from .memory_service import MemoryService
|
|
20
|
-
from ..utils.tablestore_service_utils import (
|
|
21
|
-
convert_messages_to_tablestore_documents,
|
|
22
|
-
convert_tablestore_document_to_message,
|
|
23
|
-
get_message_metadata_names,
|
|
24
|
-
tablestore_log,
|
|
25
|
-
)
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class SearchStrategy(Enum):
|
|
29
|
-
FULL_TEXT = "full_text"
|
|
30
|
-
VECTOR = "vector"
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
class TablestoreMemoryService(MemoryService):
|
|
34
|
-
"""
|
|
35
|
-
A Tablestore-based implementation of the memory service.
|
|
36
|
-
based on tablestore_for_agent_memory
|
|
37
|
-
(https://github.com/aliyun/
|
|
38
|
-
alibabacloud-tablestore-for-agent-memory/blob/main/python/docs/knowledge_store_tutorial.ipynb).
|
|
39
|
-
"""
|
|
40
|
-
|
|
41
|
-
_SEARCH_INDEX_NAME = "agentscope_runtime_knowledge_search_index_name"
|
|
42
|
-
_DEFAULT_SESSION_ID = "default"
|
|
43
|
-
|
|
44
|
-
def __init__(
|
|
45
|
-
self,
|
|
46
|
-
tablestore_client: AsyncTablestoreClient,
|
|
47
|
-
search_strategy: SearchStrategy = SearchStrategy.FULL_TEXT,
|
|
48
|
-
embedding_model: Optional[Embeddings] = None,
|
|
49
|
-
vector_dimension: int = 1536,
|
|
50
|
-
table_name: Optional[str] = "agentscope_runtime_memory",
|
|
51
|
-
search_index_schema: Optional[List[tablestore.FieldSchema]] = (
|
|
52
|
-
tablestore.FieldSchema("user_id", tablestore.FieldType.KEYWORD),
|
|
53
|
-
tablestore.FieldSchema("session_id", tablestore.FieldType.KEYWORD),
|
|
54
|
-
),
|
|
55
|
-
text_field: Optional[str] = "text",
|
|
56
|
-
embedding_field: Optional[str] = "embedding",
|
|
57
|
-
vector_metric_type: VectorMetricType = VectorMetricType.VM_COSINE,
|
|
58
|
-
**kwargs: Any,
|
|
59
|
-
):
|
|
60
|
-
if embedding_model is None:
|
|
61
|
-
embedding_model = DashScopeEmbeddings()
|
|
62
|
-
|
|
63
|
-
self._search_strategy = search_strategy
|
|
64
|
-
self._embedding_model = (
|
|
65
|
-
embedding_model # the parameter is None, don't store vector.
|
|
66
|
-
)
|
|
67
|
-
|
|
68
|
-
if (
|
|
69
|
-
self._search_strategy == SearchStrategy.VECTOR
|
|
70
|
-
and self._embedding_model is None
|
|
71
|
-
):
|
|
72
|
-
raise ValueError(
|
|
73
|
-
"Embedding model is required when search strategy is VECTOR.",
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
self._tablestore_client = tablestore_client
|
|
77
|
-
self._vector_dimension = vector_dimension
|
|
78
|
-
self._table_name = table_name
|
|
79
|
-
self._search_index_schema = (
|
|
80
|
-
list(search_index_schema)
|
|
81
|
-
if search_index_schema is not None
|
|
82
|
-
else None
|
|
83
|
-
)
|
|
84
|
-
self._text_field = text_field
|
|
85
|
-
self._embedding_field = embedding_field
|
|
86
|
-
self._vector_metric_type = vector_metric_type
|
|
87
|
-
self._knowledge_store: Optional[AsyncKnowledgeStore] = None
|
|
88
|
-
self._knowledge_store_init_parameter_kwargs = kwargs
|
|
89
|
-
|
|
90
|
-
async def _init_knowledge_store(self) -> None:
|
|
91
|
-
self._knowledge_store = AsyncKnowledgeStore(
|
|
92
|
-
tablestore_client=self._tablestore_client,
|
|
93
|
-
vector_dimension=self._vector_dimension,
|
|
94
|
-
enable_multi_tenant=False,
|
|
95
|
-
# enable multi tenant will make user be confused,
|
|
96
|
-
# we unify the usage of session id and user id,
|
|
97
|
-
# and allow users to configure the index themselves.
|
|
98
|
-
table_name=self._table_name,
|
|
99
|
-
search_index_name=TablestoreMemoryService._SEARCH_INDEX_NAME,
|
|
100
|
-
search_index_schema=copy.deepcopy(self._search_index_schema),
|
|
101
|
-
text_field=self._text_field,
|
|
102
|
-
embedding_field=self._embedding_field,
|
|
103
|
-
vector_metric_type=self._vector_metric_type,
|
|
104
|
-
**self._knowledge_store_init_parameter_kwargs,
|
|
105
|
-
)
|
|
106
|
-
|
|
107
|
-
await self._knowledge_store.init_table()
|
|
108
|
-
|
|
109
|
-
async def start(self) -> None:
|
|
110
|
-
"""Start the tablestore service"""
|
|
111
|
-
if self._knowledge_store:
|
|
112
|
-
return
|
|
113
|
-
await self._init_knowledge_store()
|
|
114
|
-
|
|
115
|
-
async def stop(self) -> None:
|
|
116
|
-
"""Close the tablestore service"""
|
|
117
|
-
if self._knowledge_store is None:
|
|
118
|
-
return
|
|
119
|
-
knowledge_store = self._knowledge_store
|
|
120
|
-
self._knowledge_store = None
|
|
121
|
-
await knowledge_store.close()
|
|
122
|
-
|
|
123
|
-
async def health(self) -> bool:
|
|
124
|
-
"""Checks the health of the service."""
|
|
125
|
-
if self._knowledge_store is None:
|
|
126
|
-
tablestore_log("Tablestore memory service is not started.")
|
|
127
|
-
return False
|
|
128
|
-
|
|
129
|
-
try:
|
|
130
|
-
async for _ in await self._knowledge_store.get_all_documents():
|
|
131
|
-
return True
|
|
132
|
-
return True
|
|
133
|
-
except Exception as e:
|
|
134
|
-
tablestore_log(
|
|
135
|
-
f"Tablestore memory service "
|
|
136
|
-
f"cannot access Tablestore, error: {str(e)}.",
|
|
137
|
-
)
|
|
138
|
-
return False
|
|
139
|
-
|
|
140
|
-
async def add_memory(
|
|
141
|
-
self,
|
|
142
|
-
user_id: str,
|
|
143
|
-
messages: list,
|
|
144
|
-
session_id: Optional[str] = None,
|
|
145
|
-
) -> None:
|
|
146
|
-
if not session_id:
|
|
147
|
-
session_id = TablestoreMemoryService._DEFAULT_SESSION_ID
|
|
148
|
-
|
|
149
|
-
if not messages:
|
|
150
|
-
return
|
|
151
|
-
|
|
152
|
-
tablestore_documents = convert_messages_to_tablestore_documents(
|
|
153
|
-
messages,
|
|
154
|
-
user_id,
|
|
155
|
-
session_id,
|
|
156
|
-
self._embedding_model,
|
|
157
|
-
)
|
|
158
|
-
|
|
159
|
-
put_tasks = [
|
|
160
|
-
self._knowledge_store.put_document(tablestore_document)
|
|
161
|
-
for tablestore_document in tablestore_documents
|
|
162
|
-
]
|
|
163
|
-
await asyncio.gather(*put_tasks)
|
|
164
|
-
|
|
165
|
-
@staticmethod
|
|
166
|
-
async def get_query_text(message: Message) -> str:
|
|
167
|
-
if not message or message.type != MessageType.MESSAGE:
|
|
168
|
-
return ""
|
|
169
|
-
|
|
170
|
-
for content in message.content:
|
|
171
|
-
if content.type == "text":
|
|
172
|
-
return content.text
|
|
173
|
-
|
|
174
|
-
return ""
|
|
175
|
-
|
|
176
|
-
async def search_memory(
|
|
177
|
-
self,
|
|
178
|
-
user_id: str,
|
|
179
|
-
messages: list,
|
|
180
|
-
filters: Optional[Dict[str, Any]] = None,
|
|
181
|
-
) -> list:
|
|
182
|
-
if (
|
|
183
|
-
not messages
|
|
184
|
-
or not isinstance(messages, list)
|
|
185
|
-
or len(messages) == 0
|
|
186
|
-
):
|
|
187
|
-
return []
|
|
188
|
-
|
|
189
|
-
query = await TablestoreMemoryService.get_query_text(messages[-1])
|
|
190
|
-
if not query:
|
|
191
|
-
return []
|
|
192
|
-
|
|
193
|
-
top_k = 100
|
|
194
|
-
if (
|
|
195
|
-
filters
|
|
196
|
-
and "top_k" in filters
|
|
197
|
-
and isinstance(filters["top_k"], int)
|
|
198
|
-
):
|
|
199
|
-
top_k = filters["top_k"]
|
|
200
|
-
|
|
201
|
-
if self._search_strategy == SearchStrategy.FULL_TEXT:
|
|
202
|
-
matched_messages = [
|
|
203
|
-
convert_tablestore_document_to_message(hit.document)
|
|
204
|
-
for hit in (
|
|
205
|
-
await self._knowledge_store.full_text_search(
|
|
206
|
-
query=query,
|
|
207
|
-
metadata_filter=Filters.eq("user_id", user_id),
|
|
208
|
-
limit=top_k,
|
|
209
|
-
meta_data_to_get=get_message_metadata_names(),
|
|
210
|
-
)
|
|
211
|
-
).hits
|
|
212
|
-
]
|
|
213
|
-
elif self._search_strategy == SearchStrategy.VECTOR:
|
|
214
|
-
matched_messages = [
|
|
215
|
-
convert_tablestore_document_to_message(hit.document)
|
|
216
|
-
for hit in (
|
|
217
|
-
await self._knowledge_store.vector_search(
|
|
218
|
-
query_vector=self._embedding_model.embed_query(query),
|
|
219
|
-
metadata_filter=Filters.eq("user_id", user_id),
|
|
220
|
-
top_k=top_k,
|
|
221
|
-
meta_data_to_get=get_message_metadata_names(),
|
|
222
|
-
)
|
|
223
|
-
).hits
|
|
224
|
-
]
|
|
225
|
-
else:
|
|
226
|
-
raise ValueError(
|
|
227
|
-
f"Unsupported search strategy: {self._search_strategy}",
|
|
228
|
-
)
|
|
229
|
-
|
|
230
|
-
return matched_messages
|
|
231
|
-
|
|
232
|
-
async def list_memory(
|
|
233
|
-
self,
|
|
234
|
-
user_id: str,
|
|
235
|
-
filters: Optional[Dict[str, Any]] = None,
|
|
236
|
-
) -> list:
|
|
237
|
-
page_num = filters.get("page_num", 1) if filters else 1
|
|
238
|
-
page_size = filters.get("page_size", 10) if filters else 10
|
|
239
|
-
|
|
240
|
-
if page_num < 1 or page_size < 1:
|
|
241
|
-
raise ValueError("page_num and page_size must be greater than 0.")
|
|
242
|
-
|
|
243
|
-
next_token = None
|
|
244
|
-
for _ in range(page_num - 1):
|
|
245
|
-
next_token = (
|
|
246
|
-
await self._knowledge_store.search_documents(
|
|
247
|
-
metadata_filter=Filters.eq("user_id", user_id),
|
|
248
|
-
limit=page_size,
|
|
249
|
-
next_token=next_token,
|
|
250
|
-
)
|
|
251
|
-
).next_token
|
|
252
|
-
if not next_token:
|
|
253
|
-
tablestore_log(
|
|
254
|
-
"Page number exceeds the total number of pages, "
|
|
255
|
-
"return empty list.",
|
|
256
|
-
)
|
|
257
|
-
return []
|
|
258
|
-
|
|
259
|
-
messages = [
|
|
260
|
-
convert_tablestore_document_to_message(hit.document)
|
|
261
|
-
for hit in (
|
|
262
|
-
await self._knowledge_store.search_documents(
|
|
263
|
-
metadata_filter=Filters.eq("user_id", user_id),
|
|
264
|
-
limit=page_size,
|
|
265
|
-
next_token=next_token,
|
|
266
|
-
meta_data_to_get=get_message_metadata_names(),
|
|
267
|
-
)
|
|
268
|
-
).hits
|
|
269
|
-
]
|
|
270
|
-
|
|
271
|
-
return messages
|
|
272
|
-
|
|
273
|
-
async def delete_memory(
|
|
274
|
-
self,
|
|
275
|
-
user_id: str,
|
|
276
|
-
session_id: Optional[str] = None,
|
|
277
|
-
) -> None:
|
|
278
|
-
delete_tablestore_documents = [
|
|
279
|
-
hit.document
|
|
280
|
-
for hit in (
|
|
281
|
-
await self._knowledge_store.search_documents(
|
|
282
|
-
metadata_filter=(
|
|
283
|
-
Filters.eq("user_id", user_id)
|
|
284
|
-
if not session_id
|
|
285
|
-
else Filters.logical_and(
|
|
286
|
-
[
|
|
287
|
-
Filters.eq("user_id", user_id),
|
|
288
|
-
Filters.eq("session_id", session_id),
|
|
289
|
-
],
|
|
290
|
-
)
|
|
291
|
-
),
|
|
292
|
-
)
|
|
293
|
-
).hits
|
|
294
|
-
]
|
|
295
|
-
delete_tasks = [
|
|
296
|
-
self._knowledge_store.delete_document(
|
|
297
|
-
tablestore_document.document_id,
|
|
298
|
-
)
|
|
299
|
-
for tablestore_document in delete_tablestore_documents
|
|
300
|
-
]
|
|
301
|
-
await asyncio.gather(*delete_tasks)
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
from typing import TYPE_CHECKING
|
|
3
|
-
from ....common.utils.lazy_loader import install_lazy_loader
|
|
4
|
-
from ....common.utils.deprecation import deprecated_module
|
|
5
|
-
|
|
6
|
-
deprecated_module(
|
|
7
|
-
module_name=__name__,
|
|
8
|
-
removed_in="v1.1",
|
|
9
|
-
alternative="agentscope.memory",
|
|
10
|
-
)
|
|
11
|
-
|
|
12
|
-
if TYPE_CHECKING:
|
|
13
|
-
from .session_history_service import (
|
|
14
|
-
SessionHistoryService,
|
|
15
|
-
InMemorySessionHistoryService,
|
|
16
|
-
)
|
|
17
|
-
from .redis_session_history_service import RedisSessionHistoryService
|
|
18
|
-
from .tablestore_session_history_service import (
|
|
19
|
-
TablestoreSessionHistoryService,
|
|
20
|
-
)
|
|
21
|
-
from .session_history_service_factory import SessionHistoryServiceFactory
|
|
22
|
-
|
|
23
|
-
install_lazy_loader(
|
|
24
|
-
globals(),
|
|
25
|
-
{
|
|
26
|
-
"SessionHistoryService": ".session_history_service",
|
|
27
|
-
"InMemorySessionHistoryService": ".session_history_service",
|
|
28
|
-
"RedisSessionHistoryService": ".redis_session_history_service",
|
|
29
|
-
"TablestoreSessionHistoryService": ".tablestore_session_history_service", # noqa
|
|
30
|
-
"SessionHistoryServiceFactory": ".session_history_service_factory",
|
|
31
|
-
},
|
|
32
|
-
)
|
|
@@ -1,283 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
import uuid
|
|
3
|
-
|
|
4
|
-
from typing import Optional, Dict, Any, List, Union
|
|
5
|
-
|
|
6
|
-
import redis.asyncio as aioredis
|
|
7
|
-
|
|
8
|
-
from .session_history_service import SessionHistoryService
|
|
9
|
-
from ...schemas.session import Session
|
|
10
|
-
from ...schemas.agent_schemas import Message
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class RedisSessionHistoryService(SessionHistoryService):
|
|
14
|
-
def __init__(
|
|
15
|
-
self,
|
|
16
|
-
redis_url: str = "redis://localhost:6379/0",
|
|
17
|
-
redis_client: Optional[aioredis.Redis] = None,
|
|
18
|
-
socket_timeout: Optional[float] = 5.0,
|
|
19
|
-
socket_connect_timeout: Optional[float] = 5.0,
|
|
20
|
-
max_connections: Optional[int] = None,
|
|
21
|
-
retry_on_timeout: bool = True,
|
|
22
|
-
ttl_seconds: Optional[int] = 3600, # 1 hour in seconds
|
|
23
|
-
max_messages_per_session: Optional[int] = None,
|
|
24
|
-
health_check_interval: Optional[float] = 30.0,
|
|
25
|
-
socket_keepalive: bool = True,
|
|
26
|
-
):
|
|
27
|
-
"""
|
|
28
|
-
Initialize RedisSessionHistoryService.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
redis_url: Redis connection URL
|
|
32
|
-
redis_client: Optional pre-configured Redis client
|
|
33
|
-
socket_timeout: Socket timeout in seconds (default: 5.0)
|
|
34
|
-
socket_connect_timeout: Socket connect timeout in seconds
|
|
35
|
-
(default: 5.0)
|
|
36
|
-
max_connections: Maximum number of connections in the pool
|
|
37
|
-
(default: None)
|
|
38
|
-
retry_on_timeout: Whether to retry on timeout (default: True)
|
|
39
|
-
ttl_seconds: Time-to-live in seconds for session data.
|
|
40
|
-
If None, data never expires (default: 3600, i.e., 1 hour)
|
|
41
|
-
max_messages_per_session: Maximum number of messages per session.
|
|
42
|
-
If None, no limit (default: None)
|
|
43
|
-
health_check_interval: Interval in seconds for health checks on
|
|
44
|
-
idle connections (default: 30.0).
|
|
45
|
-
Connections idle longer than this will be checked before reuse.
|
|
46
|
-
Set to 0 to disable.
|
|
47
|
-
socket_keepalive: Enable TCP keepalive to prevent
|
|
48
|
-
silent disconnections (default: True)
|
|
49
|
-
"""
|
|
50
|
-
self._redis_url = redis_url
|
|
51
|
-
self._redis = redis_client
|
|
52
|
-
self._socket_timeout = socket_timeout
|
|
53
|
-
self._socket_connect_timeout = socket_connect_timeout
|
|
54
|
-
self._max_connections = max_connections
|
|
55
|
-
self._retry_on_timeout = retry_on_timeout
|
|
56
|
-
self._ttl_seconds = ttl_seconds
|
|
57
|
-
self._max_messages_per_session = max_messages_per_session
|
|
58
|
-
self._health_check_interval = health_check_interval
|
|
59
|
-
self._socket_keepalive = socket_keepalive
|
|
60
|
-
|
|
61
|
-
async def start(self):
|
|
62
|
-
"""Starts the Redis connection with proper timeout and connection
|
|
63
|
-
pool settings."""
|
|
64
|
-
if self._redis is None:
|
|
65
|
-
self._redis = aioredis.from_url(
|
|
66
|
-
self._redis_url,
|
|
67
|
-
decode_responses=True,
|
|
68
|
-
socket_timeout=self._socket_timeout,
|
|
69
|
-
socket_connect_timeout=self._socket_connect_timeout,
|
|
70
|
-
max_connections=self._max_connections,
|
|
71
|
-
retry_on_timeout=self._retry_on_timeout,
|
|
72
|
-
health_check_interval=self._health_check_interval,
|
|
73
|
-
socket_keepalive=self._socket_keepalive,
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
async def stop(self):
|
|
77
|
-
if self._redis:
|
|
78
|
-
await self._redis.aclose()
|
|
79
|
-
self._redis = None
|
|
80
|
-
|
|
81
|
-
async def health(self) -> bool:
|
|
82
|
-
"""Checks the health of the service."""
|
|
83
|
-
if not self._redis:
|
|
84
|
-
return False
|
|
85
|
-
try:
|
|
86
|
-
pong = await self._redis.ping()
|
|
87
|
-
return pong is True or pong == "PONG"
|
|
88
|
-
except Exception:
|
|
89
|
-
return False
|
|
90
|
-
|
|
91
|
-
def _session_key(self, user_id: str, session_id: str):
|
|
92
|
-
return f"session:{user_id}:{session_id}"
|
|
93
|
-
|
|
94
|
-
def _session_pattern(self, user_id: str):
|
|
95
|
-
"""Generate the pattern for scanning session keys for a user."""
|
|
96
|
-
return f"session:{user_id}:*"
|
|
97
|
-
|
|
98
|
-
def _session_to_json(self, session: Session) -> str:
|
|
99
|
-
return session.model_dump_json()
|
|
100
|
-
|
|
101
|
-
def _session_from_json(self, s: str) -> Session:
|
|
102
|
-
return Session.model_validate_json(s)
|
|
103
|
-
|
|
104
|
-
async def create_session(
|
|
105
|
-
self,
|
|
106
|
-
user_id: str,
|
|
107
|
-
session_id: Optional[str] = None,
|
|
108
|
-
) -> Session:
|
|
109
|
-
if not self._redis:
|
|
110
|
-
raise RuntimeError("Redis connection is not available")
|
|
111
|
-
if session_id and session_id.strip():
|
|
112
|
-
sid = session_id.strip()
|
|
113
|
-
else:
|
|
114
|
-
sid = str(uuid.uuid4())
|
|
115
|
-
|
|
116
|
-
session = Session(id=sid, user_id=user_id, messages=[])
|
|
117
|
-
key = self._session_key(user_id, sid)
|
|
118
|
-
|
|
119
|
-
await self._redis.set(key, self._session_to_json(session))
|
|
120
|
-
|
|
121
|
-
# Set TTL for the session key if configured
|
|
122
|
-
if self._ttl_seconds is not None:
|
|
123
|
-
await self._redis.expire(key, self._ttl_seconds)
|
|
124
|
-
|
|
125
|
-
return session
|
|
126
|
-
|
|
127
|
-
async def get_session(
|
|
128
|
-
self,
|
|
129
|
-
user_id: str,
|
|
130
|
-
session_id: str,
|
|
131
|
-
) -> Optional[Session]:
|
|
132
|
-
if not self._redis:
|
|
133
|
-
raise RuntimeError("Redis connection is not available")
|
|
134
|
-
key = self._session_key(user_id, session_id)
|
|
135
|
-
session_json = await self._redis.get(key)
|
|
136
|
-
if session_json is None:
|
|
137
|
-
return None
|
|
138
|
-
|
|
139
|
-
try:
|
|
140
|
-
session = self._session_from_json(session_json)
|
|
141
|
-
except Exception:
|
|
142
|
-
# Return None for corrupted session data
|
|
143
|
-
return None
|
|
144
|
-
|
|
145
|
-
# Refresh TTL when accessing the session
|
|
146
|
-
if self._ttl_seconds is not None:
|
|
147
|
-
await self._redis.expire(key, self._ttl_seconds)
|
|
148
|
-
|
|
149
|
-
return session
|
|
150
|
-
|
|
151
|
-
async def delete_session(self, user_id: str, session_id: str):
|
|
152
|
-
if not self._redis:
|
|
153
|
-
raise RuntimeError("Redis connection is not available")
|
|
154
|
-
key = self._session_key(user_id, session_id)
|
|
155
|
-
await self._redis.delete(key)
|
|
156
|
-
|
|
157
|
-
async def list_sessions(self, user_id: str) -> list[Session]:
|
|
158
|
-
"""List all sessions for a user by scanning session keys.
|
|
159
|
-
|
|
160
|
-
Uses SCAN to find all session:{user_id}:* keys. Expired sessions
|
|
161
|
-
naturally disappear as their keys expire, avoiding stale entries.
|
|
162
|
-
"""
|
|
163
|
-
if not self._redis:
|
|
164
|
-
raise RuntimeError("Redis connection is not available")
|
|
165
|
-
pattern = self._session_pattern(user_id)
|
|
166
|
-
sessions = []
|
|
167
|
-
cursor = 0
|
|
168
|
-
|
|
169
|
-
while True:
|
|
170
|
-
cursor, keys = await self._redis.scan(
|
|
171
|
-
cursor,
|
|
172
|
-
match=pattern,
|
|
173
|
-
count=100,
|
|
174
|
-
)
|
|
175
|
-
for key in keys:
|
|
176
|
-
session_json = await self._redis.get(key)
|
|
177
|
-
if session_json:
|
|
178
|
-
try:
|
|
179
|
-
session = self._session_from_json(session_json)
|
|
180
|
-
session.messages = []
|
|
181
|
-
sessions.append(session)
|
|
182
|
-
except Exception:
|
|
183
|
-
# Skip corrupted session data
|
|
184
|
-
continue
|
|
185
|
-
|
|
186
|
-
if cursor == 0:
|
|
187
|
-
break
|
|
188
|
-
|
|
189
|
-
return sessions
|
|
190
|
-
|
|
191
|
-
async def append_message(
|
|
192
|
-
self,
|
|
193
|
-
session: Session,
|
|
194
|
-
message: Union[
|
|
195
|
-
"Message",
|
|
196
|
-
List["Message"],
|
|
197
|
-
Dict[str, Any],
|
|
198
|
-
List[Dict[str, Any]],
|
|
199
|
-
],
|
|
200
|
-
):
|
|
201
|
-
if not self._redis:
|
|
202
|
-
raise RuntimeError("Redis connection is not available")
|
|
203
|
-
if not isinstance(message, list):
|
|
204
|
-
message = [message]
|
|
205
|
-
norm_message = []
|
|
206
|
-
for msg in message:
|
|
207
|
-
if msg is not None:
|
|
208
|
-
if not isinstance(msg, Message):
|
|
209
|
-
msg = Message.model_validate(msg)
|
|
210
|
-
norm_message.append(msg)
|
|
211
|
-
|
|
212
|
-
session.messages.extend(norm_message)
|
|
213
|
-
|
|
214
|
-
user_id = session.user_id
|
|
215
|
-
session_id = session.id
|
|
216
|
-
key = self._session_key(user_id, session_id)
|
|
217
|
-
|
|
218
|
-
session_json = await self._redis.get(key)
|
|
219
|
-
if session_json is None:
|
|
220
|
-
# Session expired or not found, treat as a new session
|
|
221
|
-
# Create a new session with the current messages
|
|
222
|
-
stored_session = Session(
|
|
223
|
-
id=session_id,
|
|
224
|
-
user_id=user_id,
|
|
225
|
-
messages=norm_message.copy(),
|
|
226
|
-
)
|
|
227
|
-
else:
|
|
228
|
-
try:
|
|
229
|
-
stored_session = self._session_from_json(session_json)
|
|
230
|
-
stored_session.messages.extend(norm_message)
|
|
231
|
-
except Exception:
|
|
232
|
-
# Session data corrupted, treat as a new session
|
|
233
|
-
stored_session = Session(
|
|
234
|
-
id=session_id,
|
|
235
|
-
user_id=user_id,
|
|
236
|
-
messages=norm_message.copy(),
|
|
237
|
-
)
|
|
238
|
-
|
|
239
|
-
# Limit the number of messages per session to prevent memory issues
|
|
240
|
-
if self._max_messages_per_session is not None:
|
|
241
|
-
if len(stored_session.messages) > self._max_messages_per_session:
|
|
242
|
-
# Keep only the most recent messages
|
|
243
|
-
stored_session.messages = stored_session.messages[
|
|
244
|
-
-self._max_messages_per_session :
|
|
245
|
-
]
|
|
246
|
-
# Keep the in-memory session in sync with the stored session
|
|
247
|
-
session.messages = session.messages[
|
|
248
|
-
-self._max_messages_per_session :
|
|
249
|
-
]
|
|
250
|
-
|
|
251
|
-
await self._redis.set(key, self._session_to_json(stored_session))
|
|
252
|
-
|
|
253
|
-
# Set TTL for the session key if configured
|
|
254
|
-
if self._ttl_seconds is not None:
|
|
255
|
-
await self._redis.expire(key, self._ttl_seconds)
|
|
256
|
-
|
|
257
|
-
async def delete_user_sessions(self, user_id: str) -> None:
|
|
258
|
-
"""
|
|
259
|
-
Deletes all session history data for a specific user.
|
|
260
|
-
|
|
261
|
-
Uses SCAN to find all session keys for the user and deletes them.
|
|
262
|
-
|
|
263
|
-
Args:
|
|
264
|
-
user_id (str): The ID of the user whose session history data should
|
|
265
|
-
be deleted
|
|
266
|
-
"""
|
|
267
|
-
if not self._redis:
|
|
268
|
-
raise RuntimeError("Redis connection is not available")
|
|
269
|
-
|
|
270
|
-
pattern = self._session_pattern(user_id)
|
|
271
|
-
cursor = 0
|
|
272
|
-
|
|
273
|
-
while True:
|
|
274
|
-
cursor, keys = await self._redis.scan(
|
|
275
|
-
cursor,
|
|
276
|
-
match=pattern,
|
|
277
|
-
count=100,
|
|
278
|
-
)
|
|
279
|
-
if keys:
|
|
280
|
-
await self._redis.delete(*keys)
|
|
281
|
-
|
|
282
|
-
if cursor == 0:
|
|
283
|
-
break
|