@ai.ntellect/core 0.1.3 ā 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +0 -1
- package/dist/llm/evaluator/context.js +5 -1
- package/dist/llm/evaluator/index.js +14 -4
- package/dist/memory/persistent.d.ts +60 -0
- package/dist/memory/persistent.js +207 -0
- package/dist/test.js +2 -2
- package/index.ts +0 -1
- package/llm/evaluator/context.ts +5 -1
- package/llm/evaluator/index.ts +15 -4
- package/memory/persistent.ts +300 -0
- package/package.json +1 -1
- package/test.ts +2 -3
- package/memory/persistent.d.ts +0 -28
- package/memory/persistent.js +0 -198
package/dist/index.d.ts
CHANGED
@@ -9,6 +9,7 @@ exports.evaluatorContext = {
|
|
9
9
|
"Verify if all required actions were executed successfully",
|
10
10
|
"Check if the results match the initial goal",
|
11
11
|
"Identify any missing or incomplete information",
|
12
|
+
"Examples of relavant information: link symbol to token address, name to wallet, etc.",
|
12
13
|
],
|
13
14
|
warnings: [
|
14
15
|
"NEVER modify the results directly",
|
@@ -30,7 +31,10 @@ exports.evaluatorContext = {
|
|
30
31
|
1. Success status with explanation (no action needed)
|
31
32
|
2. Next actions needed (if any)
|
32
33
|
3. Why you are doing the next actions or why you are not doing them
|
33
|
-
4. Extract relevant
|
34
|
+
4. Extract relevant information to remember. No need to remember specific numbers.
|
35
|
+
5. If there are no important results, let importantToRemembers be empty. No need to say something like "No relevant information found".
|
36
|
+
6. For each facts, generate a hypothetical query to search in the persistent memory.
|
37
|
+
7. For each facts, generate a memoryType (You have 3 memory types: episodic, semantic, procedural)
|
34
38
|
`;
|
35
39
|
},
|
36
40
|
};
|
@@ -25,7 +25,9 @@ class Evaluator {
|
|
25
25
|
}),
|
26
26
|
})),
|
27
27
|
why: zod_1.z.string(),
|
28
|
+
isImportantToRemember: zod_1.z.boolean(),
|
28
29
|
importantToRemembers: zod_1.z.array(zod_1.z.object({
|
30
|
+
memoryType: zod_1.z.string(),
|
29
31
|
hypotheticalQuery: zod_1.z.string(),
|
30
32
|
result: zod_1.z.string(),
|
31
33
|
})),
|
@@ -40,10 +42,18 @@ class Evaluator {
|
|
40
42
|
parameters: action.parameters || {},
|
41
43
|
})),
|
42
44
|
};
|
43
|
-
if (validatedResponse.
|
45
|
+
if (validatedResponse.isImportantToRemember) {
|
44
46
|
for (const item of validatedResponse.importantToRemembers) {
|
45
47
|
// Check if the item is already in the memory
|
46
|
-
const memories = await this.memory.findBestMatches(item.hypotheticalQuery
|
48
|
+
const memories = await this.memory.findBestMatches(item.hypotheticalQuery, {
|
49
|
+
similarityThreshold: 95,
|
50
|
+
});
|
51
|
+
if (memories.length > 0) {
|
52
|
+
console.log("Similar memorie found, no need to remember", {
|
53
|
+
memories,
|
54
|
+
});
|
55
|
+
continue;
|
56
|
+
}
|
47
57
|
if (memories.length === 0) {
|
48
58
|
console.log("Adding to memory", {
|
49
59
|
query: item.hypotheticalQuery,
|
@@ -51,10 +61,10 @@ class Evaluator {
|
|
51
61
|
});
|
52
62
|
await this.memory.storeMemory({
|
53
63
|
id: crypto.randomUUID(),
|
54
|
-
purpose:
|
64
|
+
purpose: item.memoryType,
|
55
65
|
query: item.hypotheticalQuery,
|
56
66
|
data: item.result,
|
57
|
-
scope: types_1.MemoryScope.
|
67
|
+
scope: types_1.MemoryScope.GLOBAL,
|
58
68
|
createdAt: new Date(),
|
59
69
|
});
|
60
70
|
}
|
@@ -0,0 +1,60 @@
|
|
1
|
+
import { Memory, MemoryScope } from "../types";
|
2
|
+
interface SearchOptions {
|
3
|
+
scope?: MemoryScope;
|
4
|
+
userId?: string;
|
5
|
+
maxResults?: number;
|
6
|
+
similarityThreshold?: number;
|
7
|
+
}
|
8
|
+
interface ProcessedChunk {
|
9
|
+
content: string;
|
10
|
+
embedding: number[];
|
11
|
+
}
|
12
|
+
/**
|
13
|
+
* Handles persistent memory storage using Meilisearch API
|
14
|
+
*/
|
15
|
+
export declare class PersistentMemory {
|
16
|
+
private readonly host;
|
17
|
+
private readonly apiKey;
|
18
|
+
private readonly INDEX_PREFIX;
|
19
|
+
constructor(options: {
|
20
|
+
host: string;
|
21
|
+
apiKey: string;
|
22
|
+
indexPrefix?: string;
|
23
|
+
});
|
24
|
+
/**
|
25
|
+
* Initialize indexes
|
26
|
+
*/
|
27
|
+
init(): Promise<void>;
|
28
|
+
/**
|
29
|
+
* Make API request to Meilisearch
|
30
|
+
*/
|
31
|
+
private _makeRequest;
|
32
|
+
/**
|
33
|
+
* Get index name based on scope and userId
|
34
|
+
*/
|
35
|
+
private _getIndexName;
|
36
|
+
/**
|
37
|
+
* Get or create an index with proper settings
|
38
|
+
*/
|
39
|
+
private _getOrCreateIndex;
|
40
|
+
processContent(content: string): Promise<ProcessedChunk[]>;
|
41
|
+
/**
|
42
|
+
* Store a memory in the database
|
43
|
+
*/
|
44
|
+
storeMemory(memory: Memory): Promise<unknown>;
|
45
|
+
/**
|
46
|
+
* Find best matching memories
|
47
|
+
*/
|
48
|
+
findBestMatches(query: string, options?: SearchOptions): Promise<{
|
49
|
+
data: any;
|
50
|
+
purpose: string;
|
51
|
+
query: string;
|
52
|
+
chunk: string;
|
53
|
+
similarityPercentage: number;
|
54
|
+
}[]>;
|
55
|
+
/**
|
56
|
+
* Delete memories for a given scope and user
|
57
|
+
*/
|
58
|
+
deleteMemories(scope: MemoryScope, userId?: string): Promise<unknown>;
|
59
|
+
}
|
60
|
+
export {};
|
@@ -0,0 +1,207 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.PersistentMemory = void 0;
|
4
|
+
const openai_1 = require("@ai-sdk/openai");
|
5
|
+
const ai_1 = require("ai");
|
6
|
+
const text_splitter_1 = require("langchain/text_splitter");
|
7
|
+
const types_1 = require("../types");
|
8
|
+
/**
|
9
|
+
* Handles persistent memory storage using Meilisearch API
|
10
|
+
*/
|
11
|
+
class PersistentMemory {
|
12
|
+
constructor(options) {
|
13
|
+
this.host = options.host;
|
14
|
+
this.apiKey = options.apiKey;
|
15
|
+
this.INDEX_PREFIX = options.indexPrefix || "memory_";
|
16
|
+
}
|
17
|
+
/**
|
18
|
+
* Initialize indexes
|
19
|
+
*/
|
20
|
+
async init() {
|
21
|
+
// Create global index
|
22
|
+
await this._getOrCreateIndex(this._getIndexName(types_1.MemoryScope.GLOBAL));
|
23
|
+
// Create user index
|
24
|
+
await this._getOrCreateIndex(this._getIndexName(types_1.MemoryScope.USER));
|
25
|
+
}
|
26
|
+
/**
|
27
|
+
* Make API request to Meilisearch
|
28
|
+
*/
|
29
|
+
async _makeRequest(path, options = {}) {
|
30
|
+
const url = `${this.host}${path}`;
|
31
|
+
console.log("Making request to:", url);
|
32
|
+
const response = await fetch(url, {
|
33
|
+
...options,
|
34
|
+
headers: {
|
35
|
+
"Content-Type": "application/json",
|
36
|
+
Authorization: `Bearer ${this.apiKey}`,
|
37
|
+
...options.headers,
|
38
|
+
},
|
39
|
+
});
|
40
|
+
if (!response.ok) {
|
41
|
+
console.log({ response });
|
42
|
+
throw new Error(`Meilisearch API error: ${response.statusText}`);
|
43
|
+
}
|
44
|
+
return response.json();
|
45
|
+
}
|
46
|
+
/**
|
47
|
+
* Get index name based on scope and userId
|
48
|
+
*/
|
49
|
+
_getIndexName(scope, userId) {
|
50
|
+
if (scope === "global") {
|
51
|
+
return `${this.INDEX_PREFIX}global`;
|
52
|
+
}
|
53
|
+
return `${this.INDEX_PREFIX}user_${userId}`;
|
54
|
+
}
|
55
|
+
/**
|
56
|
+
* Get or create an index with proper settings
|
57
|
+
*/
|
58
|
+
async _getOrCreateIndex(indexName) {
|
59
|
+
try {
|
60
|
+
// Try to create index
|
61
|
+
await this._makeRequest("/indexes", {
|
62
|
+
method: "POST",
|
63
|
+
body: JSON.stringify({
|
64
|
+
uid: indexName,
|
65
|
+
primaryKey: "id",
|
66
|
+
}),
|
67
|
+
});
|
68
|
+
// Update index settings
|
69
|
+
const settings = {
|
70
|
+
searchableAttributes: ["query", "purpose", "chunks.content"],
|
71
|
+
sortableAttributes: ["createdAt"],
|
72
|
+
};
|
73
|
+
await this._makeRequest(`/indexes/${indexName}/settings`, {
|
74
|
+
method: "PATCH",
|
75
|
+
body: JSON.stringify(settings),
|
76
|
+
});
|
77
|
+
}
|
78
|
+
catch (error) {
|
79
|
+
// Index might already exist, which is fine
|
80
|
+
if (!error.message.includes("already exists")) {
|
81
|
+
throw error;
|
82
|
+
}
|
83
|
+
}
|
84
|
+
}
|
85
|
+
async processContent(content) {
|
86
|
+
// Split content into chunks
|
87
|
+
const textSplitter = new text_splitter_1.RecursiveCharacterTextSplitter({
|
88
|
+
chunkSize: 1000,
|
89
|
+
});
|
90
|
+
const chunks = await textSplitter.createDocuments([content]);
|
91
|
+
// Generate embeddings for all chunks
|
92
|
+
const { embeddings } = await (0, ai_1.embedMany)({
|
93
|
+
model: openai_1.openai.embedding("text-embedding-3-small"),
|
94
|
+
values: chunks.map((chunk) => chunk.pageContent),
|
95
|
+
});
|
96
|
+
// Create processed chunks with embeddings
|
97
|
+
return chunks.map((chunk, i) => ({
|
98
|
+
content: chunk.pageContent,
|
99
|
+
embedding: embeddings[i],
|
100
|
+
}));
|
101
|
+
}
|
102
|
+
/**
|
103
|
+
* Store a memory in the database
|
104
|
+
*/
|
105
|
+
async storeMemory(memory) {
|
106
|
+
const indexName = this._getIndexName(memory.scope, memory.userId);
|
107
|
+
await this._getOrCreateIndex(indexName);
|
108
|
+
const chunks = await this.processContent(memory.query);
|
109
|
+
const document = {
|
110
|
+
...memory,
|
111
|
+
chunks,
|
112
|
+
createdAt: memory.createdAt.toISOString(),
|
113
|
+
};
|
114
|
+
const response = await this._makeRequest(`/indexes/${indexName}/documents`, {
|
115
|
+
method: "POST",
|
116
|
+
body: JSON.stringify([document]),
|
117
|
+
});
|
118
|
+
console.log("Stored memory response:", response);
|
119
|
+
return response;
|
120
|
+
}
|
121
|
+
/**
|
122
|
+
* Find best matching memories
|
123
|
+
*/
|
124
|
+
async findBestMatches(query, options = {}) {
|
125
|
+
console.log("\nš Searching in persistent memory:", query);
|
126
|
+
// Generate embedding for the query
|
127
|
+
const { embedding: queryEmbedding } = await (0, ai_1.embed)({
|
128
|
+
model: openai_1.openai.embedding("text-embedding-3-small"),
|
129
|
+
value: query,
|
130
|
+
});
|
131
|
+
const searchResults = [];
|
132
|
+
// RequĆŖte Meilisearch
|
133
|
+
const searchBody = {
|
134
|
+
q: query,
|
135
|
+
};
|
136
|
+
// Search in global memories
|
137
|
+
if (!options.scope || options.scope === "global") {
|
138
|
+
const globalIndex = this._getIndexName(types_1.MemoryScope.GLOBAL);
|
139
|
+
console.log("Searching in global index:", globalIndex);
|
140
|
+
try {
|
141
|
+
const globalResults = await this._makeRequest(`/indexes/${globalIndex}/search`, {
|
142
|
+
method: "POST",
|
143
|
+
body: JSON.stringify(searchBody),
|
144
|
+
});
|
145
|
+
if (globalResults?.hits) {
|
146
|
+
searchResults.push(...globalResults.hits);
|
147
|
+
}
|
148
|
+
}
|
149
|
+
catch (error) {
|
150
|
+
console.error("Error searching global index:", error);
|
151
|
+
}
|
152
|
+
}
|
153
|
+
// Search in user memories
|
154
|
+
if (options.userId &&
|
155
|
+
(!options.scope || options.scope === types_1.MemoryScope.USER)) {
|
156
|
+
const userIndex = this._getIndexName(types_1.MemoryScope.USER, options.userId);
|
157
|
+
const userResults = await this._makeRequest(`/indexes/${userIndex}/search`, {
|
158
|
+
method: "POST",
|
159
|
+
body: JSON.stringify(searchBody),
|
160
|
+
});
|
161
|
+
if (userResults.hits) {
|
162
|
+
searchResults.push(...userResults.hits);
|
163
|
+
}
|
164
|
+
}
|
165
|
+
console.log("Found in persistent memory:", searchResults);
|
166
|
+
// Process and filter results using cosine similarity
|
167
|
+
const results = searchResults
|
168
|
+
.flatMap((hit) => {
|
169
|
+
// Calculate similarities for each chunk
|
170
|
+
const chunkSimilarities = hit.chunks.map((chunk) => ({
|
171
|
+
data: hit.data,
|
172
|
+
purpose: hit.purpose,
|
173
|
+
query: hit.query,
|
174
|
+
chunk: chunk.content,
|
175
|
+
similarityPercentage: ((0, ai_1.cosineSimilarity)(queryEmbedding, chunk.embedding) + 1) * 50,
|
176
|
+
}));
|
177
|
+
// Return the chunk with highest similarity
|
178
|
+
return chunkSimilarities.reduce((best, current) => current.similarityPercentage > best.similarityPercentage
|
179
|
+
? current
|
180
|
+
: best, chunkSimilarities[0]);
|
181
|
+
})
|
182
|
+
.filter((match) => match.similarityPercentage >= (options.similarityThreshold || 70))
|
183
|
+
.sort((a, b) => b.similarityPercentage - a.similarityPercentage);
|
184
|
+
// Log results
|
185
|
+
if (results.length > 0) {
|
186
|
+
console.log("\n⨠Best matches found:");
|
187
|
+
results.forEach((match) => {
|
188
|
+
console.log(`- ${match.query} : ${match.similarityPercentage.toFixed(2)}% (${match.purpose})`);
|
189
|
+
console.log(` Matching content: "${match.chunk}"`);
|
190
|
+
});
|
191
|
+
}
|
192
|
+
else {
|
193
|
+
console.log("No matches found");
|
194
|
+
}
|
195
|
+
return results;
|
196
|
+
}
|
197
|
+
/**
|
198
|
+
* Delete memories for a given scope and user
|
199
|
+
*/
|
200
|
+
async deleteMemories(scope, userId) {
|
201
|
+
const indexName = this._getIndexName(scope, userId);
|
202
|
+
return this._makeRequest(`/indexes/${indexName}`, {
|
203
|
+
method: "DELETE",
|
204
|
+
});
|
205
|
+
}
|
206
|
+
}
|
207
|
+
exports.PersistentMemory = PersistentMemory;
|
package/dist/test.js
CHANGED
@@ -107,7 +107,7 @@ exports.getRssNews = {
|
|
107
107
|
host: "http://localhost:7700",
|
108
108
|
apiKey: "aSampleMasterKey",
|
109
109
|
});
|
110
|
-
const orchestrator = new orchestrator_1.Orchestrator([
|
110
|
+
const orchestrator = new orchestrator_1.Orchestrator([], memory);
|
111
111
|
const agent = new agent_1.Agent({
|
112
112
|
user: {
|
113
113
|
id: "1",
|
@@ -117,7 +117,7 @@ exports.getRssNews = {
|
|
117
117
|
stream: false,
|
118
118
|
maxEvaluatorIteration: 1,
|
119
119
|
});
|
120
|
-
const prompt = "
|
120
|
+
const prompt = "fais moi une analyse ethereum";
|
121
121
|
const context = prompt;
|
122
122
|
const result = await agent.process(prompt, context, {
|
123
123
|
onMessage: (message) => {
|
package/index.ts
CHANGED
package/llm/evaluator/context.ts
CHANGED
@@ -8,6 +8,7 @@ export const evaluatorContext = {
|
|
8
8
|
"Verify if all required actions were executed successfully",
|
9
9
|
"Check if the results match the initial goal",
|
10
10
|
"Identify any missing or incomplete information",
|
11
|
+
"Examples of relavant information: link symbol to token address, name to wallet, etc.",
|
11
12
|
],
|
12
13
|
warnings: [
|
13
14
|
"NEVER modify the results directly",
|
@@ -29,7 +30,10 @@ export const evaluatorContext = {
|
|
29
30
|
1. Success status with explanation (no action needed)
|
30
31
|
2. Next actions needed (if any)
|
31
32
|
3. Why you are doing the next actions or why you are not doing them
|
32
|
-
4. Extract relevant
|
33
|
+
4. Extract relevant information to remember. No need to remember specific numbers.
|
34
|
+
5. If there are no important results, let importantToRemembers be empty. No need to say something like "No relevant information found".
|
35
|
+
6. For each facts, generate a hypothetical query to search in the persistent memory.
|
36
|
+
7. For each facts, generate a memoryType (You have 3 memory types: episodic, semantic, procedural)
|
33
37
|
`;
|
34
38
|
},
|
35
39
|
};
|
package/llm/evaluator/index.ts
CHANGED
@@ -30,8 +30,10 @@ export class Evaluator {
|
|
30
30
|
})
|
31
31
|
),
|
32
32
|
why: z.string(),
|
33
|
+
isImportantToRemember: z.boolean(),
|
33
34
|
importantToRemembers: z.array(
|
34
35
|
z.object({
|
36
|
+
memoryType: z.string(),
|
35
37
|
hypotheticalQuery: z.string(),
|
36
38
|
result: z.string(),
|
37
39
|
})
|
@@ -49,12 +51,21 @@ export class Evaluator {
|
|
49
51
|
})),
|
50
52
|
};
|
51
53
|
|
52
|
-
if (validatedResponse.
|
54
|
+
if (validatedResponse.isImportantToRemember) {
|
53
55
|
for (const item of validatedResponse.importantToRemembers) {
|
54
56
|
// Check if the item is already in the memory
|
55
57
|
const memories = await this.memory.findBestMatches(
|
56
|
-
item.hypotheticalQuery
|
58
|
+
item.hypotheticalQuery,
|
59
|
+
{
|
60
|
+
similarityThreshold: 95,
|
61
|
+
}
|
57
62
|
);
|
63
|
+
if (memories.length > 0) {
|
64
|
+
console.log("Similar memorie found, no need to remember", {
|
65
|
+
memories,
|
66
|
+
});
|
67
|
+
continue;
|
68
|
+
}
|
58
69
|
if (memories.length === 0) {
|
59
70
|
console.log("Adding to memory", {
|
60
71
|
query: item.hypotheticalQuery,
|
@@ -62,10 +73,10 @@ export class Evaluator {
|
|
62
73
|
});
|
63
74
|
await this.memory.storeMemory({
|
64
75
|
id: crypto.randomUUID(),
|
65
|
-
purpose:
|
76
|
+
purpose: item.memoryType,
|
66
77
|
query: item.hypotheticalQuery,
|
67
78
|
data: item.result,
|
68
|
-
scope: MemoryScope.
|
79
|
+
scope: MemoryScope.GLOBAL,
|
69
80
|
createdAt: new Date(),
|
70
81
|
});
|
71
82
|
}
|
@@ -0,0 +1,300 @@
|
|
1
|
+
import { openai } from "@ai-sdk/openai";
|
2
|
+
import { cosineSimilarity, embed, embedMany } from "ai";
|
3
|
+
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
|
4
|
+
import { Memory, MemoryScope } from "../types";
|
5
|
+
|
6
|
+
interface SearchOptions {
|
7
|
+
scope?: MemoryScope;
|
8
|
+
userId?: string;
|
9
|
+
maxResults?: number;
|
10
|
+
similarityThreshold?: number;
|
11
|
+
}
|
12
|
+
|
13
|
+
interface MeilisearchSettings {
|
14
|
+
searchableAttributes?: string[];
|
15
|
+
sortableAttributes?: string[];
|
16
|
+
}
|
17
|
+
|
18
|
+
interface MeilisearchResponse {
|
19
|
+
hits: Array<{
|
20
|
+
query: string;
|
21
|
+
purpose: string;
|
22
|
+
data?: any;
|
23
|
+
chunks: Array<{
|
24
|
+
content: string;
|
25
|
+
embedding: number[];
|
26
|
+
}>;
|
27
|
+
}>;
|
28
|
+
}
|
29
|
+
|
30
|
+
interface SearchParams {
|
31
|
+
q?: string;
|
32
|
+
offset?: number;
|
33
|
+
limit?: number;
|
34
|
+
filter?: string | string[];
|
35
|
+
facets?: string[];
|
36
|
+
attributesToRetrieve?: string[];
|
37
|
+
attributesToSearchOn?: string[];
|
38
|
+
sort?: string[];
|
39
|
+
matchingStrategy?: "last" | "all" | "frequency";
|
40
|
+
}
|
41
|
+
|
42
|
+
interface ProcessedChunk {
|
43
|
+
content: string;
|
44
|
+
embedding: number[];
|
45
|
+
}
|
46
|
+
|
47
|
+
/**
|
48
|
+
* Handles persistent memory storage using Meilisearch API
|
49
|
+
*/
|
50
|
+
export class PersistentMemory {
|
51
|
+
private readonly host: string;
|
52
|
+
private readonly apiKey: string;
|
53
|
+
private readonly INDEX_PREFIX: string;
|
54
|
+
|
55
|
+
constructor(options: { host: string; apiKey: string; indexPrefix?: string }) {
|
56
|
+
this.host = options.host;
|
57
|
+
this.apiKey = options.apiKey;
|
58
|
+
this.INDEX_PREFIX = options.indexPrefix || "memory_";
|
59
|
+
}
|
60
|
+
|
61
|
+
/**
|
62
|
+
* Initialize indexes
|
63
|
+
*/
|
64
|
+
async init() {
|
65
|
+
// Create global index
|
66
|
+
await this._getOrCreateIndex(this._getIndexName(MemoryScope.GLOBAL));
|
67
|
+
|
68
|
+
// Create user index
|
69
|
+
await this._getOrCreateIndex(this._getIndexName(MemoryScope.USER));
|
70
|
+
}
|
71
|
+
|
72
|
+
/**
|
73
|
+
* Make API request to Meilisearch
|
74
|
+
*/
|
75
|
+
private async _makeRequest<T = unknown>(
|
76
|
+
path: string,
|
77
|
+
options: RequestInit = {}
|
78
|
+
): Promise<T> {
|
79
|
+
const url = `${this.host}${path}`;
|
80
|
+
console.log("Making request to:", url);
|
81
|
+
const response = await fetch(url, {
|
82
|
+
...options,
|
83
|
+
headers: {
|
84
|
+
"Content-Type": "application/json",
|
85
|
+
Authorization: `Bearer ${this.apiKey}`,
|
86
|
+
...options.headers,
|
87
|
+
},
|
88
|
+
});
|
89
|
+
|
90
|
+
if (!response.ok) {
|
91
|
+
console.log({ response });
|
92
|
+
throw new Error(`Meilisearch API error: ${response.statusText}`);
|
93
|
+
}
|
94
|
+
|
95
|
+
return response.json() as Promise<T>;
|
96
|
+
}
|
97
|
+
|
98
|
+
/**
|
99
|
+
* Get index name based on scope and userId
|
100
|
+
*/
|
101
|
+
private _getIndexName(scope: MemoryScope, userId?: string): string {
|
102
|
+
if (scope === "global") {
|
103
|
+
return `${this.INDEX_PREFIX}global`;
|
104
|
+
}
|
105
|
+
return `${this.INDEX_PREFIX}user_${userId}`;
|
106
|
+
}
|
107
|
+
|
108
|
+
/**
|
109
|
+
* Get or create an index with proper settings
|
110
|
+
*/
|
111
|
+
private async _getOrCreateIndex(indexName: string) {
|
112
|
+
try {
|
113
|
+
// Try to create index
|
114
|
+
await this._makeRequest("/indexes", {
|
115
|
+
method: "POST",
|
116
|
+
body: JSON.stringify({
|
117
|
+
uid: indexName,
|
118
|
+
primaryKey: "id",
|
119
|
+
}),
|
120
|
+
});
|
121
|
+
|
122
|
+
// Update index settings
|
123
|
+
const settings: MeilisearchSettings = {
|
124
|
+
searchableAttributes: ["query", "purpose", "chunks.content"],
|
125
|
+
sortableAttributes: ["createdAt"],
|
126
|
+
};
|
127
|
+
|
128
|
+
await this._makeRequest(`/indexes/${indexName}/settings`, {
|
129
|
+
method: "PATCH",
|
130
|
+
body: JSON.stringify(settings),
|
131
|
+
});
|
132
|
+
} catch (error: any) {
|
133
|
+
// Index might already exist, which is fine
|
134
|
+
if (!error.message.includes("already exists")) {
|
135
|
+
throw error;
|
136
|
+
}
|
137
|
+
}
|
138
|
+
}
|
139
|
+
|
140
|
+
async processContent(content: string): Promise<ProcessedChunk[]> {
|
141
|
+
// Split content into chunks
|
142
|
+
const textSplitter = new RecursiveCharacterTextSplitter({
|
143
|
+
chunkSize: 1000,
|
144
|
+
});
|
145
|
+
const chunks = await textSplitter.createDocuments([content]);
|
146
|
+
|
147
|
+
// Generate embeddings for all chunks
|
148
|
+
const { embeddings } = await embedMany({
|
149
|
+
model: openai.embedding("text-embedding-3-small"),
|
150
|
+
values: chunks.map((chunk) => chunk.pageContent),
|
151
|
+
});
|
152
|
+
|
153
|
+
// Create processed chunks with embeddings
|
154
|
+
return chunks.map((chunk, i) => ({
|
155
|
+
content: chunk.pageContent,
|
156
|
+
embedding: embeddings[i],
|
157
|
+
}));
|
158
|
+
}
|
159
|
+
|
160
|
+
/**
|
161
|
+
* Store a memory in the database
|
162
|
+
*/
|
163
|
+
async storeMemory(memory: Memory) {
|
164
|
+
const indexName = this._getIndexName(memory.scope, memory.userId);
|
165
|
+
await this._getOrCreateIndex(indexName);
|
166
|
+
|
167
|
+
const chunks = await this.processContent(memory.query);
|
168
|
+
|
169
|
+
const document = {
|
170
|
+
...memory,
|
171
|
+
chunks,
|
172
|
+
createdAt: memory.createdAt.toISOString(),
|
173
|
+
};
|
174
|
+
|
175
|
+
const response = await this._makeRequest(
|
176
|
+
`/indexes/${indexName}/documents`,
|
177
|
+
{
|
178
|
+
method: "POST",
|
179
|
+
body: JSON.stringify([document]),
|
180
|
+
}
|
181
|
+
);
|
182
|
+
console.log("Stored memory response:", response);
|
183
|
+
return response;
|
184
|
+
}
|
185
|
+
|
186
|
+
/**
|
187
|
+
* Find best matching memories
|
188
|
+
*/
|
189
|
+
async findBestMatches(query: string, options: SearchOptions = {}) {
|
190
|
+
console.log("\nš Searching in persistent memory:", query);
|
191
|
+
|
192
|
+
// Generate embedding for the query
|
193
|
+
const { embedding: queryEmbedding } = await embed({
|
194
|
+
model: openai.embedding("text-embedding-3-small"),
|
195
|
+
value: query,
|
196
|
+
});
|
197
|
+
|
198
|
+
const searchResults = [];
|
199
|
+
|
200
|
+
// RequĆŖte Meilisearch
|
201
|
+
const searchBody = {
|
202
|
+
q: query,
|
203
|
+
};
|
204
|
+
|
205
|
+
// Search in global memories
|
206
|
+
if (!options.scope || options.scope === "global") {
|
207
|
+
const globalIndex = this._getIndexName(MemoryScope.GLOBAL);
|
208
|
+
console.log("Searching in global index:", globalIndex);
|
209
|
+
try {
|
210
|
+
const globalResults = await this._makeRequest<MeilisearchResponse>(
|
211
|
+
`/indexes/${globalIndex}/search`,
|
212
|
+
{
|
213
|
+
method: "POST",
|
214
|
+
body: JSON.stringify(searchBody),
|
215
|
+
}
|
216
|
+
);
|
217
|
+
if (globalResults?.hits) {
|
218
|
+
searchResults.push(...globalResults.hits);
|
219
|
+
}
|
220
|
+
} catch (error) {
|
221
|
+
console.error("Error searching global index:", error);
|
222
|
+
}
|
223
|
+
}
|
224
|
+
|
225
|
+
// Search in user memories
|
226
|
+
if (
|
227
|
+
options.userId &&
|
228
|
+
(!options.scope || options.scope === MemoryScope.USER)
|
229
|
+
) {
|
230
|
+
const userIndex = this._getIndexName(MemoryScope.USER, options.userId);
|
231
|
+
const userResults = await this._makeRequest<MeilisearchResponse>(
|
232
|
+
`/indexes/${userIndex}/search`,
|
233
|
+
{
|
234
|
+
method: "POST",
|
235
|
+
body: JSON.stringify(searchBody),
|
236
|
+
}
|
237
|
+
);
|
238
|
+
if (userResults.hits) {
|
239
|
+
searchResults.push(...userResults.hits);
|
240
|
+
}
|
241
|
+
}
|
242
|
+
|
243
|
+
console.log("Found in persistent memory:", searchResults);
|
244
|
+
|
245
|
+
// Process and filter results using cosine similarity
|
246
|
+
const results = searchResults
|
247
|
+
.flatMap((hit) => {
|
248
|
+
// Calculate similarities for each chunk
|
249
|
+
const chunkSimilarities = hit.chunks.map((chunk) => ({
|
250
|
+
data: hit.data,
|
251
|
+
purpose: hit.purpose,
|
252
|
+
query: hit.query,
|
253
|
+
chunk: chunk.content,
|
254
|
+
similarityPercentage:
|
255
|
+
(cosineSimilarity(queryEmbedding, chunk.embedding) + 1) * 50,
|
256
|
+
}));
|
257
|
+
|
258
|
+
// Return the chunk with highest similarity
|
259
|
+
return chunkSimilarities.reduce(
|
260
|
+
(best, current) =>
|
261
|
+
current.similarityPercentage > best.similarityPercentage
|
262
|
+
? current
|
263
|
+
: best,
|
264
|
+
chunkSimilarities[0]
|
265
|
+
);
|
266
|
+
})
|
267
|
+
.filter(
|
268
|
+
(match) =>
|
269
|
+
match.similarityPercentage >= (options.similarityThreshold || 70)
|
270
|
+
)
|
271
|
+
.sort((a, b) => b.similarityPercentage - a.similarityPercentage);
|
272
|
+
|
273
|
+
// Log results
|
274
|
+
if (results.length > 0) {
|
275
|
+
console.log("\n⨠Best matches found:");
|
276
|
+
results.forEach((match) => {
|
277
|
+
console.log(
|
278
|
+
`- ${match.query} : ${match.similarityPercentage.toFixed(2)}% (${
|
279
|
+
match.purpose
|
280
|
+
})`
|
281
|
+
);
|
282
|
+
console.log(` Matching content: "${match.chunk}"`);
|
283
|
+
});
|
284
|
+
} else {
|
285
|
+
console.log("No matches found");
|
286
|
+
}
|
287
|
+
|
288
|
+
return results;
|
289
|
+
}
|
290
|
+
|
291
|
+
/**
|
292
|
+
* Delete memories for a given scope and user
|
293
|
+
*/
|
294
|
+
async deleteMemories(scope: MemoryScope, userId?: string) {
|
295
|
+
const indexName = this._getIndexName(scope, userId);
|
296
|
+
return this._makeRequest(`/indexes/${indexName}`, {
|
297
|
+
method: "DELETE",
|
298
|
+
});
|
299
|
+
}
|
300
|
+
}
|
package/package.json
CHANGED
package/test.ts
CHANGED
@@ -127,7 +127,7 @@ export const getRssNews = {
|
|
127
127
|
host: "http://localhost:7700",
|
128
128
|
apiKey: "aSampleMasterKey",
|
129
129
|
});
|
130
|
-
const orchestrator = new Orchestrator([
|
130
|
+
const orchestrator = new Orchestrator([], memory);
|
131
131
|
const agent = new Agent({
|
132
132
|
user: {
|
133
133
|
id: "1",
|
@@ -138,9 +138,8 @@ export const getRssNews = {
|
|
138
138
|
maxEvaluatorIteration: 1,
|
139
139
|
});
|
140
140
|
|
141
|
-
const prompt = "
|
141
|
+
const prompt = "fais moi une analyse ethereum";
|
142
142
|
const context = prompt;
|
143
|
-
|
144
143
|
const result = await agent.process(prompt, context, {
|
145
144
|
onMessage: (message) => {
|
146
145
|
console.log({ message });
|
package/memory/persistent.d.ts
DELETED
@@ -1,28 +0,0 @@
|
|
1
|
-
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
|
2
|
-
import { Index, MeiliSearch } from "meilisearch";
|
3
|
-
import {
|
4
|
-
MatchOptions,
|
5
|
-
MatchResult,
|
6
|
-
Memory,
|
7
|
-
MemoryChunk,
|
8
|
-
MemoryScopeType,
|
9
|
-
} from "../types";
|
10
|
-
|
11
|
-
export class PersistentMemory {
|
12
|
-
private client: MeiliSearch;
|
13
|
-
private readonly INDEX_PREFIX: string;
|
14
|
-
private textSplitter: RecursiveCharacterTextSplitter;
|
15
|
-
|
16
|
-
constructor(options: PersistentMemoryOptions);
|
17
|
-
|
18
|
-
private _getIndexName(scope: MemoryScopeType, userId?: string): string;
|
19
|
-
private _getOrCreateIndex(indexName: string): Promise<Index>;
|
20
|
-
private _processContent(content: string): Promise<MemoryChunk[]>;
|
21
|
-
|
22
|
-
storeMemory(memory: Memory): Promise<void>;
|
23
|
-
findBestMatches(
|
24
|
-
query: string,
|
25
|
-
options?: MatchOptions
|
26
|
-
): Promise<MatchResult[]>;
|
27
|
-
deleteMemories(scope: MemoryScopeType, userId?: string): Promise<void>;
|
28
|
-
}
|
package/memory/persistent.js
DELETED
@@ -1,198 +0,0 @@
|
|
1
|
-
import { openai } from "@ai-sdk/openai";
|
2
|
-
import { cosineSimilarity, embed, embedMany } from "ai";
|
3
|
-
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
|
4
|
-
import { MeiliSearch } from "meilisearch";
|
5
|
-
|
6
|
-
const MemoryScope = {
|
7
|
-
GLOBAL: "global",
|
8
|
-
USER: "user",
|
9
|
-
};
|
10
|
-
|
11
|
-
export class PersistentMemory {
|
12
|
-
/**
|
13
|
-
* @param {Object} options
|
14
|
-
* @param {string} options.host - MeiliSearch host URL
|
15
|
-
* @param {string} options.apiKey - MeiliSearch API key
|
16
|
-
* @param {string} [options.indexPrefix="memory_"] - Prefix for index names
|
17
|
-
*/
|
18
|
-
constructor(options) {
|
19
|
-
this.client = new MeiliSearch({
|
20
|
-
host: options.host,
|
21
|
-
apiKey: options.apiKey,
|
22
|
-
});
|
23
|
-
this.INDEX_PREFIX = options.indexPrefix || "memory_";
|
24
|
-
this.textSplitter = new RecursiveCharacterTextSplitter({
|
25
|
-
chunkSize: 1000,
|
26
|
-
});
|
27
|
-
}
|
28
|
-
|
29
|
-
/**
|
30
|
-
* Get the index name based on scope and userId
|
31
|
-
* @private
|
32
|
-
*/
|
33
|
-
_getIndexName(scope, userId) {
|
34
|
-
if (scope === MemoryScope.GLOBAL) {
|
35
|
-
return `${this.INDEX_PREFIX}global`;
|
36
|
-
}
|
37
|
-
return `${this.INDEX_PREFIX}user_${userId}`;
|
38
|
-
}
|
39
|
-
|
40
|
-
/**
|
41
|
-
* Get or create an index with proper settings
|
42
|
-
* @private
|
43
|
-
*/
|
44
|
-
async _getOrCreateIndex(indexName) {
|
45
|
-
const index = this.client.index(indexName);
|
46
|
-
|
47
|
-
try {
|
48
|
-
await this.client.createIndex(indexName, { primaryKey: "id" });
|
49
|
-
await index.updateSettings({
|
50
|
-
searchableAttributes: ["query", "purpose", "chunks.content"],
|
51
|
-
sortableAttributes: ["createdAt"],
|
52
|
-
});
|
53
|
-
} catch (error) {
|
54
|
-
// Index might already exist, which is fine
|
55
|
-
if (!error.message.includes("already exists")) {
|
56
|
-
throw error;
|
57
|
-
}
|
58
|
-
}
|
59
|
-
|
60
|
-
return index;
|
61
|
-
}
|
62
|
-
|
63
|
-
/**
|
64
|
-
* Process content into chunks with embeddings
|
65
|
-
* @private
|
66
|
-
*/
|
67
|
-
async _processContent(content) {
|
68
|
-
// Split content into chunks
|
69
|
-
const chunks = await this.textSplitter.createDocuments([content]);
|
70
|
-
|
71
|
-
// Generate embeddings for all chunks
|
72
|
-
const { embeddings } = await embedMany({
|
73
|
-
model: openai.embedding("text-embedding-3-small"),
|
74
|
-
values: chunks.map((chunk) => chunk.pageContent),
|
75
|
-
});
|
76
|
-
|
77
|
-
// Create processed chunks with embeddings
|
78
|
-
return chunks.map((chunk, i) => ({
|
79
|
-
content: chunk.pageContent,
|
80
|
-
embedding: embeddings[i],
|
81
|
-
}));
|
82
|
-
}
|
83
|
-
|
84
|
-
/**
|
85
|
-
* Store a memory in the database
|
86
|
-
* @param {Object} memory - Memory object to store
|
87
|
-
*/
|
88
|
-
async storeMemory(memory) {
|
89
|
-
const indexName = this._getIndexName(memory.scope, memory.userId);
|
90
|
-
const index = await this._getOrCreateIndex(indexName);
|
91
|
-
|
92
|
-
// Process the query into chunks with embeddings
|
93
|
-
const chunks = await this._processContent(memory.query);
|
94
|
-
|
95
|
-
const result = await index.addDocuments([
|
96
|
-
{
|
97
|
-
...memory,
|
98
|
-
chunks,
|
99
|
-
createdAt: memory.createdAt.toISOString(),
|
100
|
-
},
|
101
|
-
]);
|
102
|
-
return result;
|
103
|
-
}
|
104
|
-
|
105
|
-
/**
|
106
|
-
* Find best matching memories using cosine similarity
|
107
|
-
* @param {string} query - Search query
|
108
|
-
* @param {Object} options - Search options
|
109
|
-
*/
|
110
|
-
async findBestMatches(query, options = {}) {
|
111
|
-
console.log("\nš Searching in persistent memory:", query);
|
112
|
-
|
113
|
-
// Generate embedding for the query
|
114
|
-
const { embedding: queryEmbedding } = await embed({
|
115
|
-
model: openai.embedding("text-embedding-3-small"),
|
116
|
-
value: query,
|
117
|
-
});
|
118
|
-
const searchResults = [];
|
119
|
-
|
120
|
-
// Search in global memories
|
121
|
-
if (!options.scope || options.scope === MemoryScope.GLOBAL) {
|
122
|
-
const globalIndex = await this._getOrCreateIndex(
|
123
|
-
this._getIndexName(MemoryScope.GLOBAL)
|
124
|
-
);
|
125
|
-
const globalResults = await globalIndex.search(query, {
|
126
|
-
limit: options.maxResults || 10,
|
127
|
-
});
|
128
|
-
searchResults.push(...globalResults.hits);
|
129
|
-
}
|
130
|
-
|
131
|
-
// Search in user memories
|
132
|
-
if (
|
133
|
-
options.userId &&
|
134
|
-
(!options.scope || options.scope === MemoryScope.USER)
|
135
|
-
) {
|
136
|
-
const userIndex = await this._getOrCreateIndex(
|
137
|
-
this._getIndexName(MemoryScope.USER, options.userId)
|
138
|
-
);
|
139
|
-
const userResults = await userIndex.search(query, {
|
140
|
-
limit: options.maxResults || 10,
|
141
|
-
});
|
142
|
-
searchResults.push(...userResults.hits);
|
143
|
-
}
|
144
|
-
|
145
|
-
// Process and filter results using cosine similarity
|
146
|
-
const results = searchResults
|
147
|
-
.flatMap((hit) => {
|
148
|
-
// Calculate similarities for each chunk
|
149
|
-
console.log(hit);
|
150
|
-
const chunkSimilarities = hit.chunks.map((chunk) => ({
|
151
|
-
data: hit.data,
|
152
|
-
purpose: hit.purpose,
|
153
|
-
chunk: chunk.content,
|
154
|
-
similarityPercentage:
|
155
|
-
(cosineSimilarity(queryEmbedding, chunk.embedding) + 1) * 50,
|
156
|
-
}));
|
157
|
-
console.log({ chunkSimilarities });
|
158
|
-
// Return the chunk with highest similarity
|
159
|
-
return chunkSimilarities.reduce(
|
160
|
-
(best, current) =>
|
161
|
-
current.similarityPercentage > best.similarityPercentage
|
162
|
-
? current
|
163
|
-
: best,
|
164
|
-
chunkSimilarities[0]
|
165
|
-
);
|
166
|
-
})
|
167
|
-
.filter(
|
168
|
-
(match) =>
|
169
|
-
match.similarityPercentage >= (options.similarityThreshold || 70)
|
170
|
-
)
|
171
|
-
.sort((a, b) => b.similarityPercentage - a.similarityPercentage);
|
172
|
-
|
173
|
-
// Log results
|
174
|
-
if (results.length > 0) {
|
175
|
-
console.log("\n⨠Best matches found:");
|
176
|
-
results.forEach((match) => {
|
177
|
-
console.log(
|
178
|
-
`- ${match.purpose} (${match.similarityPercentage.toFixed(2)}%)`
|
179
|
-
);
|
180
|
-
console.log(` Matching chunk: "${match.chunk}"`);
|
181
|
-
});
|
182
|
-
} else {
|
183
|
-
console.log("No matches found");
|
184
|
-
}
|
185
|
-
|
186
|
-
return results;
|
187
|
-
}
|
188
|
-
|
189
|
-
/**
|
190
|
-
* Delete memories for a given scope and user
|
191
|
-
* @param {string} scope - Memory scope
|
192
|
-
* @param {string} [userId] - User ID for user-specific memories
|
193
|
-
*/
|
194
|
-
async deleteMemories(scope, userId) {
|
195
|
-
const indexName = this._getIndexName(scope, userId);
|
196
|
-
await this.client.deleteIndex(indexName);
|
197
|
-
}
|
198
|
-
}
|