memorer 0.4.0__tar.gz → 0.5.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {memorer-0.4.0 → memorer-0.5.0}/PKG-INFO +1 -1
- {memorer-0.4.0 → memorer-0.5.0}/memorer/__init__.py +15 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/_config.py +15 -3
- {memorer-0.4.0 → memorer-0.5.0}/memorer/client.py +36 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/resources/__init__.py +3 -0
- memorer-0.5.0/memorer/resources/conversations.py +396 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/types/__init__.py +15 -0
- memorer-0.5.0/memorer/types/conversations.py +70 -0
- {memorer-0.4.0 → memorer-0.5.0}/pyproject.toml +1 -1
- {memorer-0.4.0 → memorer-0.5.0}/.gitignore +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/LICENSE +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/README.md +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/_http.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/errors.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/py.typed +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/resources/_base.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/resources/entities.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/resources/graph.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/resources/knowledge.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/resources/memories.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/types/common.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/types/entities.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/types/knowledge.py +0 -0
- {memorer-0.4.0 → memorer-0.5.0}/memorer/types/memories.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: memorer
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.0
|
|
4
4
|
Summary: Semantic memory for conversational AI - remember, recall, forget. Sub-100ms retrieval with emotional ranking and graph reasoning.
|
|
5
5
|
Project-URL: Homepage, https://memorer.ai
|
|
6
6
|
Project-URL: Documentation, https://docs.memorer.ai
|
|
@@ -36,9 +36,14 @@ from memorer.errors import (
|
|
|
36
36
|
StreamingError,
|
|
37
37
|
ValidationError,
|
|
38
38
|
)
|
|
39
|
+
from memorer.resources import ConversationClient
|
|
39
40
|
from memorer.types import (
|
|
40
41
|
Citation,
|
|
41
42
|
ConsolidationReport,
|
|
43
|
+
Conversation,
|
|
44
|
+
ConversationContext,
|
|
45
|
+
ConversationList,
|
|
46
|
+
ConversationRecallResponse,
|
|
42
47
|
DerivedMemory,
|
|
43
48
|
Document,
|
|
44
49
|
DuplicateGroup,
|
|
@@ -61,6 +66,8 @@ from memorer.types import (
|
|
|
61
66
|
MemorySource,
|
|
62
67
|
MemoryStats,
|
|
63
68
|
MergeResponse,
|
|
69
|
+
Message,
|
|
70
|
+
MessageList,
|
|
64
71
|
PaginatedResponse,
|
|
65
72
|
Pagination,
|
|
66
73
|
QueryResponse,
|
|
@@ -80,6 +87,7 @@ __all__ = [
|
|
|
80
87
|
# Clients
|
|
81
88
|
"Memorer",
|
|
82
89
|
"UserClient",
|
|
90
|
+
"ConversationClient",
|
|
83
91
|
# Errors
|
|
84
92
|
"MemorerError",
|
|
85
93
|
"AuthenticationError",
|
|
@@ -99,6 +107,13 @@ __all__ = [
|
|
|
99
107
|
"RetrievalPath",
|
|
100
108
|
"Scope",
|
|
101
109
|
"TimingBreakdown",
|
|
110
|
+
# Conversation types
|
|
111
|
+
"Conversation",
|
|
112
|
+
"ConversationContext",
|
|
113
|
+
"ConversationList",
|
|
114
|
+
"ConversationRecallResponse",
|
|
115
|
+
"Message",
|
|
116
|
+
"MessageList",
|
|
102
117
|
# Knowledge types
|
|
103
118
|
"Citation",
|
|
104
119
|
"Document",
|
|
@@ -4,14 +4,20 @@ Memorer SDK Configuration
|
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
import os
|
|
8
|
+
from dataclasses import dataclass
|
|
8
9
|
|
|
9
10
|
# Internal constants
|
|
10
|
-
|
|
11
|
+
_DEFAULT_BASE_URL = "https://api.memorer.ai"
|
|
11
12
|
DEFAULT_TIMEOUT = 30.0
|
|
12
13
|
DEFAULT_MAX_RETRIES = 2
|
|
13
14
|
|
|
14
15
|
|
|
16
|
+
def _get_default_base_url() -> str:
|
|
17
|
+
"""Get base URL from environment or use default."""
|
|
18
|
+
return os.getenv("MEMORER_BASE_URL", _DEFAULT_BASE_URL)
|
|
19
|
+
|
|
20
|
+
|
|
15
21
|
@dataclass
|
|
16
22
|
class ClientConfig:
|
|
17
23
|
"""
|
|
@@ -19,12 +25,18 @@ class ClientConfig:
|
|
|
19
25
|
|
|
20
26
|
Args:
|
|
21
27
|
api_key: API key for authentication (required).
|
|
28
|
+
base_url: API base URL. Defaults to MEMORER_BASE_URL env var
|
|
29
|
+
or https://api.memorer.ai
|
|
22
30
|
timeout: Request timeout in seconds. Defaults to 30.
|
|
23
31
|
max_retries: Maximum number of retries for transient failures.
|
|
24
32
|
Defaults to 2. Set to 0 to disable retries.
|
|
25
33
|
"""
|
|
26
34
|
|
|
27
35
|
api_key: str
|
|
36
|
+
base_url: str | None = None
|
|
28
37
|
timeout: float = DEFAULT_TIMEOUT
|
|
29
38
|
max_retries: int = DEFAULT_MAX_RETRIES
|
|
30
|
-
|
|
39
|
+
|
|
40
|
+
def __post_init__(self) -> None:
|
|
41
|
+
if self.base_url is None:
|
|
42
|
+
self.base_url = _get_default_base_url()
|
|
@@ -11,6 +11,8 @@ from typing import Any
|
|
|
11
11
|
from memorer._config import ClientConfig
|
|
12
12
|
from memorer._http import HTTPClient
|
|
13
13
|
from memorer.resources import (
|
|
14
|
+
ConversationClient,
|
|
15
|
+
ConversationsResource,
|
|
14
16
|
EntitiesResource,
|
|
15
17
|
GraphResource,
|
|
16
18
|
KnowledgeResource,
|
|
@@ -40,6 +42,7 @@ class UserClient:
|
|
|
40
42
|
self.knowledge = KnowledgeResource(http, owner_id=owner_id)
|
|
41
43
|
self.memories = MemoriesResource(http, owner_id=owner_id)
|
|
42
44
|
self.entities = EntitiesResource(http, owner_id=owner_id)
|
|
45
|
+
self.conversations = ConversationsResource(http, owner_id=owner_id)
|
|
43
46
|
|
|
44
47
|
@property
|
|
45
48
|
def owner_id(self) -> str:
|
|
@@ -112,6 +115,33 @@ class UserClient:
|
|
|
112
115
|
"""
|
|
113
116
|
self.memories.delete(memory_id)
|
|
114
117
|
|
|
118
|
+
def conversation(self, session_id: str | None = None) -> ConversationClient:
|
|
119
|
+
"""
|
|
120
|
+
Get or create a conversation.
|
|
121
|
+
|
|
122
|
+
If session_id is provided, returns a client for that existing conversation.
|
|
123
|
+
If session_id is None, creates a new conversation and returns a client for it.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
session_id: Optional conversation ID. If None, creates a new conversation.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
ConversationClient scoped to the conversation
|
|
130
|
+
|
|
131
|
+
Example:
|
|
132
|
+
>>> user = client.for_user("user-123")
|
|
133
|
+
>>> # Create new conversation
|
|
134
|
+
>>> conv = user.conversation()
|
|
135
|
+
>>> # Or use existing
|
|
136
|
+
>>> conv = user.conversation("conv-abc")
|
|
137
|
+
>>> conv.add("user", "Hello!")
|
|
138
|
+
"""
|
|
139
|
+
if session_id is None:
|
|
140
|
+
# Create new conversation
|
|
141
|
+
new_conv = self.conversations.create()
|
|
142
|
+
return ConversationClient(self._http, self._owner_id, new_conv.id)
|
|
143
|
+
return ConversationClient(self._http, self._owner_id, session_id)
|
|
144
|
+
|
|
115
145
|
|
|
116
146
|
class Memorer:
|
|
117
147
|
"""
|
|
@@ -143,6 +173,7 @@ class Memorer:
|
|
|
143
173
|
self,
|
|
144
174
|
api_key: str,
|
|
145
175
|
*,
|
|
176
|
+
base_url: str | None = None,
|
|
146
177
|
timeout: float = 30.0,
|
|
147
178
|
max_retries: int = 2,
|
|
148
179
|
) -> None:
|
|
@@ -151,14 +182,19 @@ class Memorer:
|
|
|
151
182
|
|
|
152
183
|
Args:
|
|
153
184
|
api_key: Your Memorer API key (required).
|
|
185
|
+
base_url: API base URL. Defaults to MEMORER_BASE_URL env var
|
|
186
|
+
or https://api.memorer.ai. Useful for local development.
|
|
154
187
|
timeout: Request timeout in seconds (default: 30)
|
|
155
188
|
max_retries: Max retries for transient failures (default: 2)
|
|
156
189
|
|
|
157
190
|
Example:
|
|
158
191
|
>>> client = Memorer(api_key="mem_sk_...")
|
|
192
|
+
>>> # For local development:
|
|
193
|
+
>>> client = Memorer(api_key="...", base_url="http://localhost:8000")
|
|
159
194
|
"""
|
|
160
195
|
self._config = ClientConfig(
|
|
161
196
|
api_key=api_key,
|
|
197
|
+
base_url=base_url,
|
|
162
198
|
timeout=timeout,
|
|
163
199
|
max_retries=max_retries,
|
|
164
200
|
)
|
|
@@ -4,12 +4,15 @@ Memorer SDK Resources
|
|
|
4
4
|
All API resource classes.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
+
from memorer.resources.conversations import ConversationClient, ConversationsResource
|
|
7
8
|
from memorer.resources.entities import EntitiesResource
|
|
8
9
|
from memorer.resources.graph import GraphResource
|
|
9
10
|
from memorer.resources.knowledge import KnowledgeResource
|
|
10
11
|
from memorer.resources.memories import MemoriesResource
|
|
11
12
|
|
|
12
13
|
__all__ = [
|
|
14
|
+
"ConversationClient",
|
|
15
|
+
"ConversationsResource",
|
|
13
16
|
"EntitiesResource",
|
|
14
17
|
"GraphResource",
|
|
15
18
|
"KnowledgeResource",
|
|
@@ -0,0 +1,396 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Conversations resource for managing conversation sessions and messages.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
8
|
+
|
|
9
|
+
from memorer.resources._base import BaseResource
|
|
10
|
+
from memorer.types.common import TimingBreakdown
|
|
11
|
+
from memorer.types.conversations import (
|
|
12
|
+
Conversation,
|
|
13
|
+
ConversationContext,
|
|
14
|
+
ConversationList,
|
|
15
|
+
ConversationRecallResponse,
|
|
16
|
+
Message,
|
|
17
|
+
MessageList,
|
|
18
|
+
)
|
|
19
|
+
from memorer.types.knowledge import QueryResult
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from memorer._http import HTTPClient
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _parse_conversation(data: dict[str, Any]) -> Conversation:
|
|
26
|
+
"""Parse raw response into Conversation."""
|
|
27
|
+
return Conversation(**data)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _parse_message(data: dict[str, Any]) -> Message:
|
|
31
|
+
"""Parse raw response into Message."""
|
|
32
|
+
return Message(**data)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _parse_recall_response(data: dict[str, Any]) -> ConversationRecallResponse:
|
|
36
|
+
"""Parse raw response into ConversationRecallResponse."""
|
|
37
|
+
conversation_context = [
|
|
38
|
+
ConversationContext(**c) for c in data.get("conversation_context", [])
|
|
39
|
+
]
|
|
40
|
+
results = [QueryResult(**r) for r in data.get("results", [])]
|
|
41
|
+
timing = TimingBreakdown(**data["timing"]) if data.get("timing") else None
|
|
42
|
+
|
|
43
|
+
return ConversationRecallResponse(
|
|
44
|
+
conversation_context=conversation_context,
|
|
45
|
+
results=results,
|
|
46
|
+
context=data.get("context", ""),
|
|
47
|
+
timing=timing,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ConversationClient:
|
|
52
|
+
"""
|
|
53
|
+
A client scoped to a specific conversation.
|
|
54
|
+
|
|
55
|
+
All operations on this client are automatically scoped to the conversation.
|
|
56
|
+
|
|
57
|
+
Example:
|
|
58
|
+
>>> user = client.for_user("user-123")
|
|
59
|
+
>>> conv = user.conversation("session-abc")
|
|
60
|
+
>>> conv.add("user", "I live in Seattle")
|
|
61
|
+
>>> result = conv.recall("where does the user live")
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
def __init__(
|
|
65
|
+
self,
|
|
66
|
+
http: HTTPClient,
|
|
67
|
+
owner_id: str,
|
|
68
|
+
conversation_id: str,
|
|
69
|
+
) -> None:
|
|
70
|
+
self._http = http
|
|
71
|
+
self._owner_id = owner_id
|
|
72
|
+
self._conversation_id = conversation_id
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def id(self) -> str:
|
|
76
|
+
"""The conversation ID."""
|
|
77
|
+
return self._conversation_id
|
|
78
|
+
|
|
79
|
+
def add(
|
|
80
|
+
self,
|
|
81
|
+
role: Literal["user", "assistant", "system"],
|
|
82
|
+
content: str,
|
|
83
|
+
*,
|
|
84
|
+
metadata: dict[str, Any] | None = None,
|
|
85
|
+
extract_memories: bool = True,
|
|
86
|
+
) -> Message:
|
|
87
|
+
"""
|
|
88
|
+
Add a message to the conversation.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
role: Who sent the message ("user", "assistant", or "system")
|
|
92
|
+
content: The message content
|
|
93
|
+
metadata: Optional metadata to attach
|
|
94
|
+
extract_memories: Auto-extract memories from the message (default: True)
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
The created Message
|
|
98
|
+
|
|
99
|
+
Example:
|
|
100
|
+
>>> conv.add("user", "I just moved to Seattle")
|
|
101
|
+
>>> conv.add("assistant", "Welcome to Seattle!")
|
|
102
|
+
"""
|
|
103
|
+
data: dict[str, Any] = {
|
|
104
|
+
"role": role,
|
|
105
|
+
"content": content,
|
|
106
|
+
"extract_memories": extract_memories,
|
|
107
|
+
}
|
|
108
|
+
if metadata:
|
|
109
|
+
data["metadata"] = metadata
|
|
110
|
+
|
|
111
|
+
response = self._http.post(
|
|
112
|
+
f"/v1/sdk/conversations/{self._conversation_id}/messages",
|
|
113
|
+
json_data=data,
|
|
114
|
+
)
|
|
115
|
+
return _parse_message(response)
|
|
116
|
+
|
|
117
|
+
def add_batch(
|
|
118
|
+
self,
|
|
119
|
+
messages: list[dict[str, Any]],
|
|
120
|
+
*,
|
|
121
|
+
extract_memories: bool = True,
|
|
122
|
+
) -> MessageList:
|
|
123
|
+
"""
|
|
124
|
+
Add multiple messages at once.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
messages: List of message dicts with "role" and "content" keys
|
|
128
|
+
extract_memories: Auto-extract memories from messages (default: True)
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
MessageList with all created messages
|
|
132
|
+
|
|
133
|
+
Example:
|
|
134
|
+
>>> conv.add_batch([
|
|
135
|
+
... {"role": "user", "content": "Hello"},
|
|
136
|
+
... {"role": "assistant", "content": "Hi there!"},
|
|
137
|
+
... ])
|
|
138
|
+
"""
|
|
139
|
+
data: dict[str, Any] = {
|
|
140
|
+
"messages": messages,
|
|
141
|
+
"extract_memories": extract_memories,
|
|
142
|
+
}
|
|
143
|
+
response = self._http.post(
|
|
144
|
+
f"/v1/sdk/conversations/{self._conversation_id}/messages/batch",
|
|
145
|
+
json_data=data,
|
|
146
|
+
)
|
|
147
|
+
return MessageList(
|
|
148
|
+
count=response.get("count", 0),
|
|
149
|
+
messages=[_parse_message(m) for m in response.get("messages", [])],
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
def messages(
|
|
153
|
+
self,
|
|
154
|
+
*,
|
|
155
|
+
limit: int = 50,
|
|
156
|
+
offset: int = 0,
|
|
157
|
+
) -> MessageList:
|
|
158
|
+
"""
|
|
159
|
+
Get messages in the conversation.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
limit: Maximum messages to return (default: 50)
|
|
163
|
+
offset: Pagination offset
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
MessageList in chronological order (oldest first)
|
|
167
|
+
|
|
168
|
+
Example:
|
|
169
|
+
>>> messages = conv.messages(limit=20)
|
|
170
|
+
>>> for m in messages.messages:
|
|
171
|
+
... print(f"[{m.role}]: {m.content}")
|
|
172
|
+
"""
|
|
173
|
+
response = self._http.get(
|
|
174
|
+
f"/v1/sdk/conversations/{self._conversation_id}/messages",
|
|
175
|
+
params={"limit": limit, "offset": offset},
|
|
176
|
+
)
|
|
177
|
+
return MessageList(
|
|
178
|
+
count=response.get("count", 0),
|
|
179
|
+
messages=[_parse_message(m) for m in response.get("messages", [])],
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
def recall(
|
|
183
|
+
self,
|
|
184
|
+
query: str,
|
|
185
|
+
*,
|
|
186
|
+
include_conversation_context: bool = True,
|
|
187
|
+
max_recent_messages: int = 10,
|
|
188
|
+
top_k: int = 10,
|
|
189
|
+
use_cache: bool = True,
|
|
190
|
+
use_reranker: bool = False,
|
|
191
|
+
use_graph_reasoning: bool = False,
|
|
192
|
+
) -> ConversationRecallResponse:
|
|
193
|
+
"""
|
|
194
|
+
Query with conversation context + long-term memories.
|
|
195
|
+
|
|
196
|
+
Combines recent messages (short-term) with semantic memory search (long-term).
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
query: What to recall
|
|
200
|
+
include_conversation_context: Include recent messages (default: True)
|
|
201
|
+
max_recent_messages: Number of recent messages to include (default: 10)
|
|
202
|
+
top_k: Number of semantic results (default: 10)
|
|
203
|
+
use_cache: Enable semantic cache (default: True)
|
|
204
|
+
use_reranker: Enable cross-encoder reranking (default: False)
|
|
205
|
+
use_graph_reasoning: Enable graph traversal (default: False)
|
|
206
|
+
|
|
207
|
+
Returns:
|
|
208
|
+
ConversationRecallResponse with conversation context, results, and assembled context
|
|
209
|
+
|
|
210
|
+
Example:
|
|
211
|
+
>>> result = conv.recall("where does the user live")
|
|
212
|
+
>>> print(result.context) # Combined context for LLM
|
|
213
|
+
"""
|
|
214
|
+
data: dict[str, Any] = {
|
|
215
|
+
"query": query,
|
|
216
|
+
"include_conversation_context": include_conversation_context,
|
|
217
|
+
"max_recent_messages": max_recent_messages,
|
|
218
|
+
"top_k": top_k,
|
|
219
|
+
"use_cache": use_cache,
|
|
220
|
+
"use_reranker": use_reranker,
|
|
221
|
+
"use_graph_reasoning": use_graph_reasoning,
|
|
222
|
+
}
|
|
223
|
+
response = self._http.post(
|
|
224
|
+
f"/v1/sdk/conversations/{self._conversation_id}/recall",
|
|
225
|
+
json_data=data,
|
|
226
|
+
)
|
|
227
|
+
return _parse_recall_response(response)
|
|
228
|
+
|
|
229
|
+
def get(self) -> Conversation:
|
|
230
|
+
"""
|
|
231
|
+
Get the conversation details.
|
|
232
|
+
|
|
233
|
+
Returns:
|
|
234
|
+
Conversation with current state
|
|
235
|
+
"""
|
|
236
|
+
response = self._http.get(f"/v1/sdk/conversations/{self._conversation_id}")
|
|
237
|
+
return _parse_conversation(response)
|
|
238
|
+
|
|
239
|
+
def update(
|
|
240
|
+
self,
|
|
241
|
+
*,
|
|
242
|
+
title: str | None = None,
|
|
243
|
+
metadata: dict[str, Any] | None = None,
|
|
244
|
+
) -> Conversation:
|
|
245
|
+
"""
|
|
246
|
+
Update conversation title or metadata.
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
title: New title (optional)
|
|
250
|
+
metadata: Metadata to merge (optional)
|
|
251
|
+
|
|
252
|
+
Returns:
|
|
253
|
+
Updated Conversation
|
|
254
|
+
"""
|
|
255
|
+
data: dict[str, Any] = {}
|
|
256
|
+
if title is not None:
|
|
257
|
+
data["title"] = title
|
|
258
|
+
if metadata is not None:
|
|
259
|
+
data["metadata"] = metadata
|
|
260
|
+
|
|
261
|
+
response = self._http.request(
|
|
262
|
+
"PATCH",
|
|
263
|
+
f"/v1/sdk/conversations/{self._conversation_id}",
|
|
264
|
+
json_data=data,
|
|
265
|
+
)
|
|
266
|
+
return _parse_conversation(response)
|
|
267
|
+
|
|
268
|
+
def delete(self) -> None:
|
|
269
|
+
"""Delete (soft-delete) the conversation."""
|
|
270
|
+
self._http.delete(f"/v1/sdk/conversations/{self._conversation_id}")
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
class ConversationsResource(BaseResource):
|
|
274
|
+
"""
|
|
275
|
+
Conversations resource for managing conversation sessions.
|
|
276
|
+
|
|
277
|
+
Example:
|
|
278
|
+
>>> user = client.for_user("user-123")
|
|
279
|
+
>>> conv = user.conversations.create()
|
|
280
|
+
>>> conv_list = user.conversations.list()
|
|
281
|
+
"""
|
|
282
|
+
|
|
283
|
+
def create(
|
|
284
|
+
self,
|
|
285
|
+
*,
|
|
286
|
+
external_id: str | None = None,
|
|
287
|
+
title: str | None = None,
|
|
288
|
+
metadata: dict[str, Any] | None = None,
|
|
289
|
+
) -> Conversation:
|
|
290
|
+
"""
|
|
291
|
+
Create a new conversation.
|
|
292
|
+
|
|
293
|
+
Args:
|
|
294
|
+
external_id: Optional client-provided ID for idempotency
|
|
295
|
+
title: Optional title
|
|
296
|
+
metadata: Optional metadata dict
|
|
297
|
+
|
|
298
|
+
Returns:
|
|
299
|
+
The created Conversation
|
|
300
|
+
|
|
301
|
+
Example:
|
|
302
|
+
>>> conv = user.conversations.create(
|
|
303
|
+
... external_id="session-123",
|
|
304
|
+
... title="Support Chat",
|
|
305
|
+
... )
|
|
306
|
+
"""
|
|
307
|
+
if not self._owner_id:
|
|
308
|
+
raise ValueError(
|
|
309
|
+
"owner_id is required. Use client.for_user(owner_id) to create a scoped client."
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
data: dict[str, Any] = {"owner_id": self._owner_id}
|
|
313
|
+
if external_id:
|
|
314
|
+
data["external_id"] = external_id
|
|
315
|
+
if title:
|
|
316
|
+
data["title"] = title
|
|
317
|
+
if metadata:
|
|
318
|
+
data["metadata"] = metadata
|
|
319
|
+
|
|
320
|
+
response = self._post("/v1/sdk/conversations", json_data=data)
|
|
321
|
+
return _parse_conversation(response)
|
|
322
|
+
|
|
323
|
+
def list(
|
|
324
|
+
self,
|
|
325
|
+
*,
|
|
326
|
+
limit: int = 50,
|
|
327
|
+
offset: int = 0,
|
|
328
|
+
) -> ConversationList:
|
|
329
|
+
"""
|
|
330
|
+
List conversations for the current owner.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
limit: Maximum conversations to return (default: 50)
|
|
334
|
+
offset: Pagination offset
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
ConversationList ordered by most recent activity
|
|
338
|
+
|
|
339
|
+
Example:
|
|
340
|
+
>>> conversations = user.conversations.list(limit=10)
|
|
341
|
+
>>> for c in conversations.conversations:
|
|
342
|
+
... print(f"{c.title}: {c.message_count} messages")
|
|
343
|
+
"""
|
|
344
|
+
if not self._owner_id:
|
|
345
|
+
raise ValueError(
|
|
346
|
+
"owner_id is required. Use client.for_user(owner_id) to create a scoped client."
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
params: dict[str, Any] = {
|
|
350
|
+
"owner_id": self._owner_id,
|
|
351
|
+
"limit": limit,
|
|
352
|
+
"offset": offset,
|
|
353
|
+
}
|
|
354
|
+
response = self._get("/v1/sdk/conversations", params=params)
|
|
355
|
+
return ConversationList(
|
|
356
|
+
count=response.get("count", 0),
|
|
357
|
+
conversations=[_parse_conversation(c) for c in response.get("conversations", [])],
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
def get(self, conversation_id: str) -> Conversation:
|
|
361
|
+
"""
|
|
362
|
+
Get a conversation by ID.
|
|
363
|
+
|
|
364
|
+
Args:
|
|
365
|
+
conversation_id: UUID of the conversation
|
|
366
|
+
|
|
367
|
+
Returns:
|
|
368
|
+
Conversation details
|
|
369
|
+
"""
|
|
370
|
+
response = self._get(f"/v1/sdk/conversations/{conversation_id}")
|
|
371
|
+
return _parse_conversation(response)
|
|
372
|
+
|
|
373
|
+
def delete(self, conversation_id: str) -> None:
|
|
374
|
+
"""
|
|
375
|
+
Delete (soft-delete) a conversation.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
conversation_id: UUID of the conversation
|
|
379
|
+
"""
|
|
380
|
+
self._delete(f"/v1/sdk/conversations/{conversation_id}")
|
|
381
|
+
|
|
382
|
+
def client(self, conversation_id: str) -> ConversationClient:
|
|
383
|
+
"""
|
|
384
|
+
Get a client scoped to a specific conversation.
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
conversation_id: UUID of the conversation
|
|
388
|
+
|
|
389
|
+
Returns:
|
|
390
|
+
ConversationClient for the conversation
|
|
391
|
+
"""
|
|
392
|
+
if not self._owner_id:
|
|
393
|
+
raise ValueError(
|
|
394
|
+
"owner_id is required. Use client.for_user(owner_id) to create a scoped client."
|
|
395
|
+
)
|
|
396
|
+
return ConversationClient(self._http, self._owner_id, conversation_id)
|
|
@@ -13,6 +13,14 @@ from .common import (
|
|
|
13
13
|
Scope,
|
|
14
14
|
TimingBreakdown,
|
|
15
15
|
)
|
|
16
|
+
from .conversations import (
|
|
17
|
+
Conversation,
|
|
18
|
+
ConversationContext,
|
|
19
|
+
ConversationList,
|
|
20
|
+
ConversationRecallResponse,
|
|
21
|
+
Message,
|
|
22
|
+
MessageList,
|
|
23
|
+
)
|
|
16
24
|
from .entities import (
|
|
17
25
|
DuplicateGroup,
|
|
18
26
|
DuplicatesResponse,
|
|
@@ -58,6 +66,13 @@ __all__ = [
|
|
|
58
66
|
"RetrievalPath",
|
|
59
67
|
"Scope",
|
|
60
68
|
"TimingBreakdown",
|
|
69
|
+
# Conversations
|
|
70
|
+
"Conversation",
|
|
71
|
+
"ConversationContext",
|
|
72
|
+
"ConversationList",
|
|
73
|
+
"ConversationRecallResponse",
|
|
74
|
+
"Message",
|
|
75
|
+
"MessageList",
|
|
61
76
|
# Knowledge
|
|
62
77
|
"Citation",
|
|
63
78
|
"Document",
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Conversation and Message types for conversation support.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import Any, Literal
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel, Field
|
|
11
|
+
|
|
12
|
+
from memorer.types.common import TimingBreakdown
|
|
13
|
+
from memorer.types.knowledge import QueryResult
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Message(BaseModel):
|
|
17
|
+
"""A message in a conversation."""
|
|
18
|
+
|
|
19
|
+
id: str
|
|
20
|
+
conversation_id: str
|
|
21
|
+
role: Literal["user", "assistant", "system"]
|
|
22
|
+
content: str
|
|
23
|
+
metadata: dict[str, Any] | None = None
|
|
24
|
+
created_at: datetime
|
|
25
|
+
extracted_entity_ids: list[str] | None = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class MessageList(BaseModel):
|
|
29
|
+
"""Paginated list of messages."""
|
|
30
|
+
|
|
31
|
+
count: int
|
|
32
|
+
messages: list[Message]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class Conversation(BaseModel):
|
|
36
|
+
"""A conversation session."""
|
|
37
|
+
|
|
38
|
+
id: str
|
|
39
|
+
owner_id: str
|
|
40
|
+
external_id: str | None = None
|
|
41
|
+
title: str | None = None
|
|
42
|
+
metadata: dict[str, Any] | None = None
|
|
43
|
+
message_count: int = 0
|
|
44
|
+
started_at: datetime
|
|
45
|
+
last_message_at: datetime | None = None
|
|
46
|
+
created_at: datetime
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ConversationList(BaseModel):
|
|
50
|
+
"""Paginated list of conversations."""
|
|
51
|
+
|
|
52
|
+
count: int
|
|
53
|
+
conversations: list[Conversation]
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ConversationContext(BaseModel):
|
|
57
|
+
"""Message in conversation context format."""
|
|
58
|
+
|
|
59
|
+
role: str
|
|
60
|
+
content: str
|
|
61
|
+
timestamp: str
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class ConversationRecallResponse(BaseModel):
|
|
65
|
+
"""Response from recall with conversation context."""
|
|
66
|
+
|
|
67
|
+
conversation_context: list[ConversationContext] = Field(default_factory=list)
|
|
68
|
+
results: list[QueryResult] = Field(default_factory=list)
|
|
69
|
+
context: str = ""
|
|
70
|
+
timing: TimingBreakdown | None = None
|
|
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "memorer"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "0.5.0"
|
|
8
8
|
description = "Semantic memory for conversational AI - remember, recall, forget. Sub-100ms retrieval with emotional ranking and graph reasoning."
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.9"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|