langchain-xache 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langchain_xache-0.1.0.dist-info/METADATA +199 -0
- langchain_xache-0.1.0.dist-info/RECORD +12 -0
- langchain_xache-0.1.0.dist-info/WHEEL +5 -0
- langchain_xache-0.1.0.dist-info/top_level.txt +1 -0
- xache_langchain/__init__.py +59 -0
- xache_langchain/_async_utils.py +56 -0
- xache_langchain/chat_history.py +194 -0
- xache_langchain/collective.py +254 -0
- xache_langchain/extraction.py +229 -0
- xache_langchain/memory.py +181 -0
- xache_langchain/reputation.py +237 -0
- xache_langchain/retriever.py +221 -0
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Xache Collective Intelligence for LangChain
|
|
3
|
+
Share and learn from collective knowledge pools
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import List, Optional, Dict, Any
|
|
7
|
+
from langchain.tools import BaseTool
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
|
|
10
|
+
from xache import XacheClient
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ContributeInput(BaseModel):
|
|
14
|
+
"""Input for contribute tool"""
|
|
15
|
+
insight: str = Field(description="The insight or learning to contribute")
|
|
16
|
+
domain: str = Field(description="Domain/topic of the insight")
|
|
17
|
+
evidence: Optional[str] = Field(default=None, description="Supporting evidence")
|
|
18
|
+
tags: Optional[List[str]] = Field(default=None, description="Tags for categorization")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class QueryInput(BaseModel):
|
|
22
|
+
"""Input for query tool"""
|
|
23
|
+
query: str = Field(description="What to search for in the collective")
|
|
24
|
+
domain: Optional[str] = Field(default=None, description="Filter by domain")
|
|
25
|
+
limit: int = Field(default=5, description="Number of results")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class XacheCollectiveContributeTool(BaseTool):
|
|
29
|
+
"""
|
|
30
|
+
LangChain tool for contributing to Xache collective intelligence.
|
|
31
|
+
|
|
32
|
+
Use this tool when your agent learns something valuable that could
|
|
33
|
+
benefit other agents. Contributions earn reputation.
|
|
34
|
+
|
|
35
|
+
Example:
|
|
36
|
+
```python
|
|
37
|
+
from xache_langchain import XacheCollectiveContributeTool
|
|
38
|
+
|
|
39
|
+
contribute_tool = XacheCollectiveContributeTool(
|
|
40
|
+
wallet_address="0x...",
|
|
41
|
+
private_key="0x..."
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
# Use in agent
|
|
45
|
+
tools = [contribute_tool, ...]
|
|
46
|
+
agent = initialize_agent(tools, llm)
|
|
47
|
+
```
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
name: str = "xache_collective_contribute"
|
|
51
|
+
description: str = (
|
|
52
|
+
"Contribute an insight or learning to the collective intelligence pool. "
|
|
53
|
+
"Use this when you discover something valuable that could help other agents. "
|
|
54
|
+
"You'll earn reputation for quality contributions."
|
|
55
|
+
)
|
|
56
|
+
args_schema: type = ContributeInput
|
|
57
|
+
|
|
58
|
+
# Xache configuration
|
|
59
|
+
api_url: str = "https://api.xache.xyz"
|
|
60
|
+
wallet_address: str
|
|
61
|
+
private_key: str
|
|
62
|
+
chain: str = "base"
|
|
63
|
+
|
|
64
|
+
_client: Optional[XacheClient] = None
|
|
65
|
+
|
|
66
|
+
class Config:
|
|
67
|
+
arbitrary_types_allowed = True
|
|
68
|
+
underscore_attrs_are_private = True
|
|
69
|
+
|
|
70
|
+
def __init__(self, **kwargs):
|
|
71
|
+
super().__init__(**kwargs)
|
|
72
|
+
chain_prefix = "sol" if self.chain == "solana" else "evm"
|
|
73
|
+
did = f"did:agent:{chain_prefix}:{self.wallet_address.lower()}"
|
|
74
|
+
self._client = XacheClient(
|
|
75
|
+
api_url=self.api_url,
|
|
76
|
+
did=did,
|
|
77
|
+
private_key=self.private_key,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
def _run(
|
|
81
|
+
self,
|
|
82
|
+
insight: str,
|
|
83
|
+
domain: str,
|
|
84
|
+
evidence: Optional[str] = None,
|
|
85
|
+
tags: Optional[List[str]] = None,
|
|
86
|
+
) -> str:
|
|
87
|
+
"""Contribute to collective"""
|
|
88
|
+
import asyncio
|
|
89
|
+
|
|
90
|
+
async def _contribute():
|
|
91
|
+
async with self._client as client:
|
|
92
|
+
result = await client.collective.contribute(
|
|
93
|
+
domain=domain,
|
|
94
|
+
pattern=insight,
|
|
95
|
+
evidence=evidence,
|
|
96
|
+
tags=tags or [],
|
|
97
|
+
)
|
|
98
|
+
return result
|
|
99
|
+
|
|
100
|
+
try:
|
|
101
|
+
loop = asyncio.get_event_loop()
|
|
102
|
+
if loop.is_running():
|
|
103
|
+
import concurrent.futures
|
|
104
|
+
with concurrent.futures.ThreadPoolExecutor() as pool:
|
|
105
|
+
result = pool.submit(asyncio.run, _contribute()).result()
|
|
106
|
+
else:
|
|
107
|
+
result = loop.run_until_complete(_contribute())
|
|
108
|
+
except RuntimeError:
|
|
109
|
+
result = asyncio.run(_contribute())
|
|
110
|
+
|
|
111
|
+
heuristic_id = result.get("heuristicId", "unknown")
|
|
112
|
+
receipt_id = result.get("receiptId", "unknown")
|
|
113
|
+
return f"Contributed insight to '{domain}'. Heuristic ID: {heuristic_id}, Receipt: {receipt_id}"
|
|
114
|
+
|
|
115
|
+
async def _arun(
|
|
116
|
+
self,
|
|
117
|
+
insight: str,
|
|
118
|
+
domain: str,
|
|
119
|
+
evidence: Optional[str] = None,
|
|
120
|
+
tags: Optional[List[str]] = None,
|
|
121
|
+
) -> str:
|
|
122
|
+
"""Async contribute to collective"""
|
|
123
|
+
async with self._client as client:
|
|
124
|
+
result = await client.collective.contribute(
|
|
125
|
+
domain=domain,
|
|
126
|
+
pattern=insight,
|
|
127
|
+
evidence=evidence,
|
|
128
|
+
tags=tags or [],
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
heuristic_id = result.get("heuristicId", "unknown")
|
|
132
|
+
receipt_id = result.get("receiptId", "unknown")
|
|
133
|
+
return f"Contributed insight to '{domain}'. Heuristic ID: {heuristic_id}, Receipt: {receipt_id}"
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class XacheCollectiveQueryTool(BaseTool):
|
|
137
|
+
"""
|
|
138
|
+
LangChain tool for querying Xache collective intelligence.
|
|
139
|
+
|
|
140
|
+
Use this tool to learn from other agents' contributions.
|
|
141
|
+
|
|
142
|
+
Example:
|
|
143
|
+
```python
|
|
144
|
+
from xache_langchain import XacheCollectiveQueryTool
|
|
145
|
+
|
|
146
|
+
query_tool = XacheCollectiveQueryTool(
|
|
147
|
+
wallet_address="0x...",
|
|
148
|
+
private_key="0x..."
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
# Use in agent
|
|
152
|
+
tools = [query_tool, ...]
|
|
153
|
+
```
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
name: str = "xache_collective_query"
|
|
157
|
+
description: str = (
|
|
158
|
+
"Query the collective intelligence pool to learn from other agents. "
|
|
159
|
+
"Use this when you need insights or knowledge from the community. "
|
|
160
|
+
"Returns relevant contributions from other agents."
|
|
161
|
+
)
|
|
162
|
+
args_schema: type = QueryInput
|
|
163
|
+
|
|
164
|
+
# Xache configuration
|
|
165
|
+
api_url: str = "https://api.xache.xyz"
|
|
166
|
+
wallet_address: str
|
|
167
|
+
private_key: str
|
|
168
|
+
chain: str = "base"
|
|
169
|
+
|
|
170
|
+
_client: Optional[XacheClient] = None
|
|
171
|
+
|
|
172
|
+
class Config:
|
|
173
|
+
arbitrary_types_allowed = True
|
|
174
|
+
underscore_attrs_are_private = True
|
|
175
|
+
|
|
176
|
+
def __init__(self, **kwargs):
|
|
177
|
+
super().__init__(**kwargs)
|
|
178
|
+
chain_prefix = "sol" if self.chain == "solana" else "evm"
|
|
179
|
+
did = f"did:agent:{chain_prefix}:{self.wallet_address.lower()}"
|
|
180
|
+
self._client = XacheClient(
|
|
181
|
+
api_url=self.api_url,
|
|
182
|
+
did=did,
|
|
183
|
+
private_key=self.private_key,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
def _run(
|
|
187
|
+
self,
|
|
188
|
+
query: str,
|
|
189
|
+
domain: Optional[str] = None,
|
|
190
|
+
limit: int = 5,
|
|
191
|
+
) -> str:
|
|
192
|
+
"""Query collective"""
|
|
193
|
+
import asyncio
|
|
194
|
+
|
|
195
|
+
async def _query():
|
|
196
|
+
async with self._client as client:
|
|
197
|
+
result = await client.collective.query(
|
|
198
|
+
query=query,
|
|
199
|
+
domain=domain,
|
|
200
|
+
limit=limit,
|
|
201
|
+
)
|
|
202
|
+
return result
|
|
203
|
+
|
|
204
|
+
try:
|
|
205
|
+
loop = asyncio.get_event_loop()
|
|
206
|
+
if loop.is_running():
|
|
207
|
+
import concurrent.futures
|
|
208
|
+
with concurrent.futures.ThreadPoolExecutor() as pool:
|
|
209
|
+
result = pool.submit(asyncio.run, _query()).result()
|
|
210
|
+
else:
|
|
211
|
+
result = loop.run_until_complete(_query())
|
|
212
|
+
except RuntimeError:
|
|
213
|
+
result = asyncio.run(_query())
|
|
214
|
+
|
|
215
|
+
# Format results
|
|
216
|
+
results = result.get("results", [])
|
|
217
|
+
if not results:
|
|
218
|
+
return "No relevant insights found in the collective."
|
|
219
|
+
|
|
220
|
+
output = f"Found {len(results)} insights:\n"
|
|
221
|
+
for i, item in enumerate(results, 1):
|
|
222
|
+
output += f"\n{i}. {item.get('pattern', '')[:200]}"
|
|
223
|
+
if item.get("domain"):
|
|
224
|
+
output += f" [Domain: {item['domain']}]"
|
|
225
|
+
if item.get("relevance"):
|
|
226
|
+
output += f" (Relevance: {item['relevance']:.2f})"
|
|
227
|
+
|
|
228
|
+
return output
|
|
229
|
+
|
|
230
|
+
async def _arun(
|
|
231
|
+
self,
|
|
232
|
+
query: str,
|
|
233
|
+
domain: Optional[str] = None,
|
|
234
|
+
limit: int = 5,
|
|
235
|
+
) -> str:
|
|
236
|
+
"""Async query collective"""
|
|
237
|
+
async with self._client as client:
|
|
238
|
+
result = await client.collective.query(
|
|
239
|
+
query=query,
|
|
240
|
+
domain=domain,
|
|
241
|
+
limit=limit,
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
results = result.get("results", [])
|
|
245
|
+
if not results:
|
|
246
|
+
return "No relevant insights found in the collective."
|
|
247
|
+
|
|
248
|
+
output = f"Found {len(results)} insights:\n"
|
|
249
|
+
for i, item in enumerate(results, 1):
|
|
250
|
+
output += f"\n{i}. {item.get('pattern', '')[:200]}"
|
|
251
|
+
if item.get("domain"):
|
|
252
|
+
output += f" [Domain: {item['domain']}]"
|
|
253
|
+
|
|
254
|
+
return output
|
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Xache Extraction for LangChain
|
|
3
|
+
Automatic memory extraction from conversations
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
from typing import List, Optional, Dict, Any
|
|
8
|
+
from langchain.schema import Document
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
from xache import XacheClient
|
|
12
|
+
from ._async_utils import run_sync
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ExtractionResult(BaseModel):
|
|
16
|
+
"""Result from memory extraction"""
|
|
17
|
+
memories: List[Dict[str, Any]] = Field(default_factory=list)
|
|
18
|
+
receipt_id: Optional[str] = None
|
|
19
|
+
transaction: Optional[str] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class XacheExtractor:
|
|
23
|
+
"""
|
|
24
|
+
Extract memories from text using Xache's LLM-powered extraction.
|
|
25
|
+
|
|
26
|
+
Supports three modes:
|
|
27
|
+
- api-key: Use your own API key (BYOK)
|
|
28
|
+
- endpoint: Use your own LLM endpoint
|
|
29
|
+
- xache-managed: Use Xache's hosted LLM
|
|
30
|
+
|
|
31
|
+
Example:
|
|
32
|
+
```python
|
|
33
|
+
from xache_langchain import XacheExtractor
|
|
34
|
+
|
|
35
|
+
extractor = XacheExtractor(
|
|
36
|
+
wallet_address="0x...",
|
|
37
|
+
private_key="0x...",
|
|
38
|
+
mode="xache-managed" # or "api-key" with your key
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# Extract memories from a conversation
|
|
42
|
+
result = extractor.extract(
|
|
43
|
+
trace="User asked about quantum computing. "
|
|
44
|
+
"I explained superposition and entanglement."
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
print(f"Extracted {len(result.memories)} memories")
|
|
48
|
+
for mem in result.memories:
|
|
49
|
+
print(f" - {mem['content'][:50]}...")
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
With auto-store (memories saved automatically):
|
|
53
|
+
```python
|
|
54
|
+
result = extractor.extract(
|
|
55
|
+
trace="...",
|
|
56
|
+
auto_store=True # Extracted memories are stored
|
|
57
|
+
)
|
|
58
|
+
```
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
wallet_address: str,
|
|
64
|
+
private_key: str,
|
|
65
|
+
api_url: Optional[str] = None,
|
|
66
|
+
chain: str = "base",
|
|
67
|
+
mode: str = "xache-managed",
|
|
68
|
+
llm_api_key: Optional[str] = None,
|
|
69
|
+
llm_endpoint: Optional[str] = None,
|
|
70
|
+
llm_model: str = "claude-3-haiku-20240307",
|
|
71
|
+
):
|
|
72
|
+
"""
|
|
73
|
+
Initialize Xache extractor.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
wallet_address: Wallet address for authentication
|
|
77
|
+
private_key: Private key for signing
|
|
78
|
+
api_url: Xache API URL (defaults to XACHE_API_URL env var or https://api.xache.xyz)
|
|
79
|
+
chain: Blockchain (base, solana)
|
|
80
|
+
mode: Extraction mode (xache-managed, api-key, endpoint)
|
|
81
|
+
llm_api_key: Your LLM API key (required for api-key mode)
|
|
82
|
+
llm_endpoint: Your LLM endpoint (required for endpoint mode)
|
|
83
|
+
llm_model: LLM model to use
|
|
84
|
+
"""
|
|
85
|
+
# Validate mode-specific requirements
|
|
86
|
+
if mode == "api-key" and not llm_api_key:
|
|
87
|
+
raise ValueError("llm_api_key is required when mode is 'api-key'")
|
|
88
|
+
if mode == "endpoint" and not llm_endpoint:
|
|
89
|
+
raise ValueError("llm_endpoint is required when mode is 'endpoint'")
|
|
90
|
+
|
|
91
|
+
self.wallet_address = wallet_address
|
|
92
|
+
self.private_key = private_key
|
|
93
|
+
self.api_url = api_url or os.environ.get("XACHE_API_URL", "https://api.xache.xyz")
|
|
94
|
+
self.chain = chain
|
|
95
|
+
self.mode = mode
|
|
96
|
+
self.llm_api_key = llm_api_key
|
|
97
|
+
self.llm_endpoint = llm_endpoint
|
|
98
|
+
self.llm_model = llm_model
|
|
99
|
+
|
|
100
|
+
# Build DID
|
|
101
|
+
chain_prefix = "sol" if chain == "solana" else "evm"
|
|
102
|
+
self.did = f"did:agent:{chain_prefix}:{wallet_address.lower()}"
|
|
103
|
+
|
|
104
|
+
self._client = XacheClient(
|
|
105
|
+
api_url=self.api_url,
|
|
106
|
+
did=self.did,
|
|
107
|
+
private_key=private_key,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
def extract(
|
|
111
|
+
self,
|
|
112
|
+
trace: str,
|
|
113
|
+
auto_store: bool = False,
|
|
114
|
+
context: Optional[Dict[str, Any]] = None,
|
|
115
|
+
) -> ExtractionResult:
|
|
116
|
+
"""
|
|
117
|
+
Extract memories from text.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
trace: The text to extract memories from
|
|
121
|
+
auto_store: Automatically store extracted memories
|
|
122
|
+
context: Optional context for extraction
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
ExtractionResult with extracted memories
|
|
126
|
+
"""
|
|
127
|
+
|
|
128
|
+
async def _extract():
|
|
129
|
+
async with self._client as client:
|
|
130
|
+
# Build LLM config based on mode
|
|
131
|
+
if self.mode == "api-key":
|
|
132
|
+
llm_config = {
|
|
133
|
+
"type": "api-key",
|
|
134
|
+
"apiKey": self.llm_api_key,
|
|
135
|
+
"model": self.llm_model,
|
|
136
|
+
}
|
|
137
|
+
elif self.mode == "endpoint":
|
|
138
|
+
llm_config = {
|
|
139
|
+
"type": "endpoint",
|
|
140
|
+
"endpoint": self.llm_endpoint,
|
|
141
|
+
"model": self.llm_model,
|
|
142
|
+
}
|
|
143
|
+
else:
|
|
144
|
+
llm_config = {
|
|
145
|
+
"type": "xache-managed",
|
|
146
|
+
"model": self.llm_model,
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
result = await client.extraction.extract(
|
|
150
|
+
trace=trace,
|
|
151
|
+
llm_config=llm_config,
|
|
152
|
+
options={
|
|
153
|
+
"autoStore": auto_store,
|
|
154
|
+
"context": context,
|
|
155
|
+
}
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
return ExtractionResult(
|
|
159
|
+
memories=result.get("memories", []),
|
|
160
|
+
receipt_id=result.get("receiptId"),
|
|
161
|
+
transaction=result.get("transaction"),
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
return run_sync(_extract())
|
|
165
|
+
|
|
166
|
+
async def aextract(
|
|
167
|
+
self,
|
|
168
|
+
trace: str,
|
|
169
|
+
auto_store: bool = False,
|
|
170
|
+
context: Optional[Dict[str, Any]] = None,
|
|
171
|
+
) -> ExtractionResult:
|
|
172
|
+
"""Async extract memories from text"""
|
|
173
|
+
async with self._client as client:
|
|
174
|
+
if self.mode == "api-key":
|
|
175
|
+
llm_config = {
|
|
176
|
+
"type": "api-key",
|
|
177
|
+
"apiKey": self.llm_api_key,
|
|
178
|
+
"model": self.llm_model,
|
|
179
|
+
}
|
|
180
|
+
elif self.mode == "endpoint":
|
|
181
|
+
llm_config = {
|
|
182
|
+
"type": "endpoint",
|
|
183
|
+
"endpoint": self.llm_endpoint,
|
|
184
|
+
"model": self.llm_model,
|
|
185
|
+
}
|
|
186
|
+
else:
|
|
187
|
+
llm_config = {
|
|
188
|
+
"type": "xache-managed",
|
|
189
|
+
"model": self.llm_model,
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
result = await client.extraction.extract(
|
|
193
|
+
trace=trace,
|
|
194
|
+
llm_config=llm_config,
|
|
195
|
+
options={
|
|
196
|
+
"autoStore": auto_store,
|
|
197
|
+
"context": context,
|
|
198
|
+
}
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
return ExtractionResult(
|
|
202
|
+
memories=result.get("memories", []),
|
|
203
|
+
receipt_id=result.get("receiptId"),
|
|
204
|
+
transaction=result.get("transaction"),
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
def extract_from_messages(
|
|
208
|
+
self,
|
|
209
|
+
messages: List[Any],
|
|
210
|
+
auto_store: bool = False,
|
|
211
|
+
) -> ExtractionResult:
|
|
212
|
+
"""
|
|
213
|
+
Extract memories from LangChain messages.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
messages: List of LangChain BaseMessage objects
|
|
217
|
+
auto_store: Automatically store extracted memories
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
ExtractionResult with extracted memories
|
|
221
|
+
"""
|
|
222
|
+
# Convert messages to trace format
|
|
223
|
+
trace_lines = []
|
|
224
|
+
for msg in messages:
|
|
225
|
+
role = msg.__class__.__name__.replace("Message", "")
|
|
226
|
+
trace_lines.append(f"{role}: {msg.content}")
|
|
227
|
+
|
|
228
|
+
trace = "\n".join(trace_lines)
|
|
229
|
+
return self.extract(trace, auto_store=auto_store)
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Xache Memory for LangChain
|
|
3
|
+
Drop-in replacement for ConversationBufferMemory with verifiable receipts
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
from typing import Any, Dict, List, Optional
|
|
8
|
+
from langchain.memory.chat_memory import BaseChatMemory
|
|
9
|
+
from langchain.schema import BaseMessage, HumanMessage, AIMessage
|
|
10
|
+
from pydantic import Field
|
|
11
|
+
|
|
12
|
+
from xache import XacheClient
|
|
13
|
+
from ._async_utils import run_sync
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class XacheMemory(BaseChatMemory):
|
|
17
|
+
"""
|
|
18
|
+
LangChain memory backed by Xache Protocol.
|
|
19
|
+
|
|
20
|
+
Provides persistent, verifiable memory with cryptographic receipts.
|
|
21
|
+
One-line replacement for ConversationBufferMemory.
|
|
22
|
+
|
|
23
|
+
Example:
|
|
24
|
+
```python
|
|
25
|
+
# Before (standard LangChain)
|
|
26
|
+
from langchain.memory import ConversationBufferMemory
|
|
27
|
+
memory = ConversationBufferMemory()
|
|
28
|
+
|
|
29
|
+
# After (with Xache - one line change!)
|
|
30
|
+
from xache_langchain import XacheMemory
|
|
31
|
+
memory = XacheMemory(
|
|
32
|
+
wallet_address="0x...",
|
|
33
|
+
private_key="0x..."
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
# Everything else stays the same
|
|
37
|
+
agent = initialize_agent(tools, llm, memory=memory)
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
Features:
|
|
41
|
+
- Persistent memory across sessions
|
|
42
|
+
- Cryptographic receipts for every operation
|
|
43
|
+
- Reputation tracking for quality
|
|
44
|
+
- x402 micropayments (auto-handled)
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
# Xache client configuration
|
|
48
|
+
api_url: str = Field(
|
|
49
|
+
default_factory=lambda: os.environ.get("XACHE_API_URL", "https://api.xache.xyz")
|
|
50
|
+
)
|
|
51
|
+
wallet_address: str = Field(...)
|
|
52
|
+
private_key: str = Field(...)
|
|
53
|
+
chain: str = Field(default="base")
|
|
54
|
+
|
|
55
|
+
# Memory configuration
|
|
56
|
+
memory_key: str = Field(default="history")
|
|
57
|
+
return_messages: bool = Field(default=True)
|
|
58
|
+
human_prefix: str = Field(default="Human")
|
|
59
|
+
ai_prefix: str = Field(default="AI")
|
|
60
|
+
|
|
61
|
+
# Internal state
|
|
62
|
+
_client: Optional[XacheClient] = None
|
|
63
|
+
_session_id: Optional[str] = None
|
|
64
|
+
|
|
65
|
+
class Config:
|
|
66
|
+
arbitrary_types_allowed = True
|
|
67
|
+
underscore_attrs_are_private = True
|
|
68
|
+
|
|
69
|
+
def __init__(self, **kwargs):
|
|
70
|
+
super().__init__(**kwargs)
|
|
71
|
+
self._init_client()
|
|
72
|
+
|
|
73
|
+
def _init_client(self):
|
|
74
|
+
"""Initialize Xache client"""
|
|
75
|
+
chain_prefix = "sol" if self.chain == "solana" else "evm"
|
|
76
|
+
did = f"did:agent:{chain_prefix}:{self.wallet_address.lower()}"
|
|
77
|
+
|
|
78
|
+
self._client = XacheClient(
|
|
79
|
+
api_url=self.api_url,
|
|
80
|
+
did=did,
|
|
81
|
+
private_key=self.private_key,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
# Generate session ID for this memory instance
|
|
85
|
+
import hashlib
|
|
86
|
+
import time
|
|
87
|
+
session_data = f"{did}:{time.time()}"
|
|
88
|
+
self._session_id = hashlib.sha256(session_data.encode()).hexdigest()[:16]
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def memory_variables(self) -> List[str]:
|
|
92
|
+
"""Return memory variables"""
|
|
93
|
+
return [self.memory_key]
|
|
94
|
+
|
|
95
|
+
def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
|
|
96
|
+
"""Load memory variables from Xache"""
|
|
97
|
+
|
|
98
|
+
async def _load():
|
|
99
|
+
async with self._client as client:
|
|
100
|
+
# Retrieve recent memories for this session (filtered by session context)
|
|
101
|
+
memories = await client.memory.list(
|
|
102
|
+
limit=50,
|
|
103
|
+
context=f"langchain:session:{self._session_id}"
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
messages = []
|
|
107
|
+
for mem in memories:
|
|
108
|
+
# Parse stored message format
|
|
109
|
+
content = mem.get("content", "")
|
|
110
|
+
metadata = mem.get("metadata", {})
|
|
111
|
+
|
|
112
|
+
if metadata.get("role") == "human":
|
|
113
|
+
messages.append(HumanMessage(content=content))
|
|
114
|
+
elif metadata.get("role") == "ai":
|
|
115
|
+
messages.append(AIMessage(content=content))
|
|
116
|
+
|
|
117
|
+
return messages
|
|
118
|
+
|
|
119
|
+
messages = run_sync(_load())
|
|
120
|
+
|
|
121
|
+
if self.return_messages:
|
|
122
|
+
return {self.memory_key: messages}
|
|
123
|
+
else:
|
|
124
|
+
# Return as string buffer
|
|
125
|
+
buffer = ""
|
|
126
|
+
for msg in messages:
|
|
127
|
+
if isinstance(msg, HumanMessage):
|
|
128
|
+
buffer += f"{self.human_prefix}: {msg.content}\n"
|
|
129
|
+
elif isinstance(msg, AIMessage):
|
|
130
|
+
buffer += f"{self.ai_prefix}: {msg.content}\n"
|
|
131
|
+
return {self.memory_key: buffer.strip()}
|
|
132
|
+
|
|
133
|
+
def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None:
|
|
134
|
+
"""Save context to Xache memory"""
|
|
135
|
+
|
|
136
|
+
async def _save():
|
|
137
|
+
async with self._client as client:
|
|
138
|
+
# Extract input and output
|
|
139
|
+
input_key = list(inputs.keys())[0] if inputs else "input"
|
|
140
|
+
output_key = list(outputs.keys())[0] if outputs else "output"
|
|
141
|
+
|
|
142
|
+
human_input = inputs.get(input_key, "")
|
|
143
|
+
ai_output = outputs.get(output_key, "")
|
|
144
|
+
|
|
145
|
+
# Store human message with session context
|
|
146
|
+
if human_input:
|
|
147
|
+
await client.memory.store(
|
|
148
|
+
content=human_input,
|
|
149
|
+
context=f"langchain:session:{self._session_id}",
|
|
150
|
+
metadata={
|
|
151
|
+
"role": "human",
|
|
152
|
+
"session_id": self._session_id,
|
|
153
|
+
"source": "langchain",
|
|
154
|
+
}
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Store AI message with session context
|
|
158
|
+
if ai_output:
|
|
159
|
+
await client.memory.store(
|
|
160
|
+
content=ai_output,
|
|
161
|
+
context=f"langchain:session:{self._session_id}",
|
|
162
|
+
metadata={
|
|
163
|
+
"role": "ai",
|
|
164
|
+
"session_id": self._session_id,
|
|
165
|
+
"source": "langchain",
|
|
166
|
+
}
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
run_sync(_save())
|
|
170
|
+
|
|
171
|
+
def clear(self) -> None:
|
|
172
|
+
"""Clear memory (marks as deleted in Xache)"""
|
|
173
|
+
# Note: Xache uses soft delete - memories are marked deleted but retained
|
|
174
|
+
# for receipt verification
|
|
175
|
+
self._session_id = None
|
|
176
|
+
self._init_client() # Reset with new session
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
class XacheConversationBufferMemory(XacheMemory):
|
|
180
|
+
"""Alias for XacheMemory for familiar naming"""
|
|
181
|
+
pass
|