@ai.ntellect/core 0.0.36 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agent/index.ts +18 -9
- package/dist/agent/index.d.ts +8 -5
- package/dist/agent/index.js +7 -5
- package/dist/index.d.ts +2 -1
- package/dist/index.js +2 -1
- package/dist/llm/evaluator/context.js +1 -0
- package/dist/llm/evaluator/index.d.ts +3 -1
- package/dist/llm/evaluator/index.js +52 -1
- package/dist/llm/orchestrator/context.js +1 -4
- package/dist/llm/orchestrator/index.d.ts +3 -1
- package/dist/llm/orchestrator/index.js +16 -2
- package/dist/memory/{index.d.ts → cache.d.ts} +3 -3
- package/dist/memory/{index.js → cache.js} +4 -4
- package/dist/test.js +8 -2
- package/dist/types.d.ts +22 -2
- package/index.ts +2 -1
- package/llm/evaluator/context.ts +1 -0
- package/llm/evaluator/index.ts +62 -2
- package/llm/orchestrator/context.ts +1 -4
- package/llm/orchestrator/index.ts +19 -2
- package/memory/{index.ts → cache.ts} +14 -12
- package/memory/persistent.d.ts +28 -0
- package/memory/persistent.js +198 -0
- package/package.json +1 -1
- package/test.ts +8 -2
- package/types.ts +26 -2
package/agent/index.ts
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
import { Evaluator } from "../llm/evaluator";
|
2
2
|
import { Orchestrator } from "../llm/orchestrator";
|
3
3
|
import { Synthesizer } from "../llm/synthesizer";
|
4
|
-
import {
|
4
|
+
import { CacheMemory } from "../memory/cache";
|
5
|
+
import { PersistentMemory } from "../memory/persistent";
|
5
6
|
import { ActionSchema, AgentEvent, MemoryScope, User } from "../types";
|
6
7
|
import { QueueItemTransformer } from "../utils/queue-item-transformer";
|
7
8
|
import { ActionHandler } from "./handlers/ActionHandler";
|
@@ -12,27 +13,31 @@ export class Agent {
|
|
12
13
|
private readonly actionHandler: ActionHandler;
|
13
14
|
private readonly user: User;
|
14
15
|
private readonly orchestrator: Orchestrator;
|
15
|
-
private readonly
|
16
|
+
private readonly persistentMemory: PersistentMemory;
|
17
|
+
private readonly cacheMemory: CacheMemory | undefined;
|
16
18
|
private readonly stream: boolean;
|
17
19
|
private readonly maxEvaluatorIteration: number;
|
18
|
-
private
|
20
|
+
private evaluatorIteration = 0;
|
19
21
|
|
20
22
|
constructor({
|
21
23
|
user,
|
22
24
|
orchestrator,
|
23
|
-
|
25
|
+
persistentMemory,
|
26
|
+
cacheMemory,
|
24
27
|
stream,
|
25
28
|
maxEvaluatorIteration = 1,
|
26
29
|
}: {
|
27
30
|
user: User;
|
28
31
|
orchestrator: Orchestrator;
|
29
|
-
|
32
|
+
persistentMemory: PersistentMemory;
|
33
|
+
cacheMemory?: CacheMemory;
|
30
34
|
stream: boolean;
|
31
35
|
maxEvaluatorIteration: number;
|
32
36
|
}) {
|
33
37
|
this.user = user;
|
34
38
|
this.orchestrator = orchestrator;
|
35
|
-
this.
|
39
|
+
this.cacheMemory = cacheMemory;
|
40
|
+
this.persistentMemory = persistentMemory;
|
36
41
|
this.stream = stream;
|
37
42
|
this.maxEvaluatorIteration = maxEvaluatorIteration;
|
38
43
|
this.actionHandler = new ActionHandler();
|
@@ -90,7 +95,10 @@ export class Agent {
|
|
90
95
|
return this.handleActionResults({ ...actionsResult, initialPrompt });
|
91
96
|
}
|
92
97
|
|
93
|
-
const evaluator = new Evaluator(
|
98
|
+
const evaluator = new Evaluator(
|
99
|
+
this.orchestrator.tools,
|
100
|
+
this.persistentMemory
|
101
|
+
);
|
94
102
|
const evaluation = await evaluator.process(
|
95
103
|
initialPrompt,
|
96
104
|
contextualizedPrompt,
|
@@ -100,6 +108,7 @@ export class Agent {
|
|
100
108
|
events.onMessage?.(evaluation);
|
101
109
|
|
102
110
|
if (evaluation.nextActions.length > 0) {
|
111
|
+
this.evaluatorIteration++;
|
103
112
|
return this.handleActions(
|
104
113
|
{
|
105
114
|
initialPrompt: contextualizedPrompt,
|
@@ -136,11 +145,11 @@ export class Agent {
|
|
136
145
|
}
|
137
146
|
|
138
147
|
private async findSimilarActions(prompt: string) {
|
139
|
-
if (!this.
|
148
|
+
if (!this.cacheMemory) {
|
140
149
|
return [];
|
141
150
|
}
|
142
151
|
|
143
|
-
return this.
|
152
|
+
return this.cacheMemory.findBestMatches(prompt, {
|
144
153
|
similarityThreshold: this.SIMILARITY_THRESHOLD,
|
145
154
|
maxResults: this.MAX_RESULTS,
|
146
155
|
userId: this.user.id,
|
package/dist/agent/index.d.ts
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
import { Orchestrator } from "../llm/orchestrator";
|
2
|
-
import {
|
2
|
+
import { CacheMemory } from "../memory/cache";
|
3
|
+
import { PersistentMemory } from "../memory/persistent";
|
3
4
|
import { AgentEvent, User } from "../types";
|
4
5
|
export declare class Agent {
|
5
6
|
private readonly SIMILARITY_THRESHOLD;
|
@@ -7,14 +8,16 @@ export declare class Agent {
|
|
7
8
|
private readonly actionHandler;
|
8
9
|
private readonly user;
|
9
10
|
private readonly orchestrator;
|
10
|
-
private readonly
|
11
|
+
private readonly persistentMemory;
|
12
|
+
private readonly cacheMemory;
|
11
13
|
private readonly stream;
|
12
14
|
private readonly maxEvaluatorIteration;
|
13
|
-
private
|
14
|
-
constructor({ user, orchestrator,
|
15
|
+
private evaluatorIteration;
|
16
|
+
constructor({ user, orchestrator, persistentMemory, cacheMemory, stream, maxEvaluatorIteration, }: {
|
15
17
|
user: User;
|
16
18
|
orchestrator: Orchestrator;
|
17
|
-
|
19
|
+
persistentMemory: PersistentMemory;
|
20
|
+
cacheMemory?: CacheMemory;
|
18
21
|
stream: boolean;
|
19
22
|
maxEvaluatorIteration: number;
|
20
23
|
});
|
package/dist/agent/index.js
CHANGED
@@ -7,13 +7,14 @@ const types_1 = require("../types");
|
|
7
7
|
const queue_item_transformer_1 = require("../utils/queue-item-transformer");
|
8
8
|
const ActionHandler_1 = require("./handlers/ActionHandler");
|
9
9
|
class Agent {
|
10
|
-
constructor({ user, orchestrator,
|
10
|
+
constructor({ user, orchestrator, persistentMemory, cacheMemory, stream, maxEvaluatorIteration = 1, }) {
|
11
11
|
this.SIMILARITY_THRESHOLD = 95;
|
12
12
|
this.MAX_RESULTS = 1;
|
13
13
|
this.evaluatorIteration = 0;
|
14
14
|
this.user = user;
|
15
15
|
this.orchestrator = orchestrator;
|
16
|
-
this.
|
16
|
+
this.cacheMemory = cacheMemory;
|
17
|
+
this.persistentMemory = persistentMemory;
|
17
18
|
this.stream = stream;
|
18
19
|
this.maxEvaluatorIteration = maxEvaluatorIteration;
|
19
20
|
this.actionHandler = new ActionHandler_1.ActionHandler();
|
@@ -42,10 +43,11 @@ class Agent {
|
|
42
43
|
if (this.evaluatorIteration >= this.maxEvaluatorIteration) {
|
43
44
|
return this.handleActionResults({ ...actionsResult, initialPrompt });
|
44
45
|
}
|
45
|
-
const evaluator = new evaluator_1.Evaluator(this.orchestrator.tools);
|
46
|
+
const evaluator = new evaluator_1.Evaluator(this.orchestrator.tools, this.persistentMemory);
|
46
47
|
const evaluation = await evaluator.process(initialPrompt, contextualizedPrompt, JSON.stringify(actionsResult.data));
|
47
48
|
events.onMessage?.(evaluation);
|
48
49
|
if (evaluation.nextActions.length > 0) {
|
50
|
+
this.evaluatorIteration++;
|
49
51
|
return this.handleActions({
|
50
52
|
initialPrompt: contextualizedPrompt,
|
51
53
|
contextualizedPrompt: initialPrompt,
|
@@ -71,10 +73,10 @@ class Agent {
|
|
71
73
|
: await synthesizer.process(summaryData);
|
72
74
|
}
|
73
75
|
async findSimilarActions(prompt) {
|
74
|
-
if (!this.
|
76
|
+
if (!this.cacheMemory) {
|
75
77
|
return [];
|
76
78
|
}
|
77
|
-
return this.
|
79
|
+
return this.cacheMemory.findBestMatches(prompt, {
|
78
80
|
similarityThreshold: this.SIMILARITY_THRESHOLD,
|
79
81
|
maxResults: this.MAX_RESULTS,
|
80
82
|
userId: this.user.id,
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
@@ -19,4 +19,5 @@ __exportStar(require("./llm/orchestrator"), exports);
|
|
19
19
|
__exportStar(require("./llm/synthesizer"), exports);
|
20
20
|
__exportStar(require("./services/queue"), exports);
|
21
21
|
__exportStar(require("./types"), exports);
|
22
|
-
__exportStar(require("./memory"), exports);
|
22
|
+
__exportStar(require("./memory/cache"), exports);
|
23
|
+
__exportStar(require("./memory/persistent"), exports);
|
@@ -30,6 +30,7 @@ exports.evaluatorContext = {
|
|
30
30
|
1. Success status with explanation (no action needed)
|
31
31
|
2. Next actions needed (if any)
|
32
32
|
3. Why you are doing the next actions or why you are not doing them
|
33
|
+
4. Extract relevant semantic information to remember (eg. token symbols, token addresses, etc.). No need to remember the actions or the news.
|
33
34
|
`;
|
34
35
|
},
|
35
36
|
};
|
@@ -1,7 +1,9 @@
|
|
1
|
+
import { PersistentMemory } from "../../memory/persistent";
|
1
2
|
import { ActionSchema } from "../../types";
|
2
3
|
export declare class Evaluator {
|
3
4
|
private readonly model;
|
4
5
|
tools: ActionSchema[];
|
5
|
-
|
6
|
+
private memory;
|
7
|
+
constructor(tools: ActionSchema[], memory: PersistentMemory);
|
6
8
|
process(prompt: string, goal: string, results: string): Promise<any>;
|
7
9
|
}
|
@@ -4,11 +4,13 @@ exports.Evaluator = void 0;
|
|
4
4
|
const openai_1 = require("@ai-sdk/openai");
|
5
5
|
const ai_1 = require("ai");
|
6
6
|
const zod_1 = require("zod");
|
7
|
+
const types_1 = require("../../types");
|
7
8
|
const context_1 = require("./context");
|
8
9
|
class Evaluator {
|
9
|
-
constructor(tools) {
|
10
|
+
constructor(tools, memory) {
|
10
11
|
this.model = (0, openai_1.openai)("gpt-4o");
|
11
12
|
this.tools = tools;
|
13
|
+
this.memory = memory;
|
12
14
|
}
|
13
15
|
async process(prompt, goal, results) {
|
14
16
|
try {
|
@@ -23,6 +25,10 @@ class Evaluator {
|
|
23
25
|
}),
|
24
26
|
})),
|
25
27
|
why: zod_1.z.string(),
|
28
|
+
importantToRemembers: zod_1.z.array(zod_1.z.object({
|
29
|
+
hypotheticalQuery: zod_1.z.string(),
|
30
|
+
result: zod_1.z.string(),
|
31
|
+
})),
|
26
32
|
}),
|
27
33
|
prompt: prompt,
|
28
34
|
system: context_1.evaluatorContext.compose(goal, results, this.tools),
|
@@ -34,10 +40,55 @@ class Evaluator {
|
|
34
40
|
parameters: action.parameters || {},
|
35
41
|
})),
|
36
42
|
};
|
43
|
+
if (validatedResponse.importantToRemembers.length > 0) {
|
44
|
+
for (const item of validatedResponse.importantToRemembers) {
|
45
|
+
// Check if the item is already in the memory
|
46
|
+
const memories = await this.memory.findBestMatches(item.hypotheticalQuery);
|
47
|
+
if (memories.length === 0) {
|
48
|
+
console.log("Adding to memory", {
|
49
|
+
query: item.hypotheticalQuery,
|
50
|
+
data: item.result,
|
51
|
+
});
|
52
|
+
await this.memory.storeMemory({
|
53
|
+
id: crypto.randomUUID(),
|
54
|
+
purpose: "importantToRemember",
|
55
|
+
query: item.hypotheticalQuery,
|
56
|
+
data: item.result,
|
57
|
+
scope: types_1.MemoryScope.USER,
|
58
|
+
createdAt: new Date(),
|
59
|
+
});
|
60
|
+
}
|
61
|
+
}
|
62
|
+
}
|
63
|
+
console.log("Evaluator response");
|
64
|
+
console.dir(validatedResponse, { depth: null });
|
37
65
|
return validatedResponse;
|
38
66
|
}
|
39
67
|
catch (error) {
|
40
68
|
if (error) {
|
69
|
+
console.log("Evaluator error");
|
70
|
+
console.dir(error.value, { depth: null });
|
71
|
+
console.error(error.message);
|
72
|
+
if (error.value.importantToRemembers.length > 0) {
|
73
|
+
for (const item of error.value.importantToRemembers) {
|
74
|
+
// Check if the item is already in the memory
|
75
|
+
const memories = await this.memory.findBestMatches(item.hypotheticalQuery);
|
76
|
+
if (memories.length === 0) {
|
77
|
+
console.log("Adding to memory", {
|
78
|
+
query: item.hypotheticalQuery,
|
79
|
+
data: item.result,
|
80
|
+
});
|
81
|
+
await this.memory.storeMemory({
|
82
|
+
id: crypto.randomUUID(),
|
83
|
+
purpose: "importantToRemember",
|
84
|
+
query: item.hypotheticalQuery,
|
85
|
+
data: item.result,
|
86
|
+
scope: types_1.MemoryScope.USER,
|
87
|
+
createdAt: new Date(),
|
88
|
+
});
|
89
|
+
}
|
90
|
+
}
|
91
|
+
}
|
41
92
|
return {
|
42
93
|
...error.value,
|
43
94
|
};
|
@@ -10,10 +10,7 @@ exports.orchestratorContext = {
|
|
10
10
|
"If some parameters are not clear or missing, YOU MUST ask the user for them.",
|
11
11
|
"ALWAYS use the same language as user request. (If it's English, use English, if it's French, use French, etc.)",
|
12
12
|
],
|
13
|
-
warnings: [
|
14
|
-
"NEVER repeat the same action twice if the user doesn't ask for it.",
|
15
|
-
"NEVER repeat the same action if its not necessary.",
|
16
|
-
],
|
13
|
+
warnings: ["NEVER repeat same actions if the user doesn't ask for it."],
|
17
14
|
},
|
18
15
|
compose: (tools) => {
|
19
16
|
return `
|
@@ -1,7 +1,9 @@
|
|
1
|
+
import { PersistentMemory } from "../../memory/persistent";
|
1
2
|
import { ActionSchema, BaseLLM } from "../../types";
|
2
3
|
export declare class Orchestrator implements BaseLLM {
|
3
4
|
private readonly model;
|
4
5
|
tools: ActionSchema[];
|
5
|
-
|
6
|
+
private memory;
|
7
|
+
constructor(tools: ActionSchema[], memory: PersistentMemory);
|
6
8
|
process(prompt: string): Promise<any>;
|
7
9
|
}
|
@@ -6,9 +6,23 @@ const ai_1 = require("ai");
|
|
6
6
|
const zod_1 = require("zod");
|
7
7
|
const context_1 = require("./context");
|
8
8
|
class Orchestrator {
|
9
|
-
constructor(tools) {
|
9
|
+
constructor(tools, memory) {
|
10
10
|
this.model = (0, openai_1.openai)("gpt-4o");
|
11
|
-
this.
|
11
|
+
this.memory = memory;
|
12
|
+
this.tools = [
|
13
|
+
...tools,
|
14
|
+
{
|
15
|
+
name: "search_memory",
|
16
|
+
description: "Search for relevant information in the internal knowledge base",
|
17
|
+
parameters: zod_1.z.object({
|
18
|
+
query: zod_1.z.string(),
|
19
|
+
}),
|
20
|
+
execute: async (params) => {
|
21
|
+
const memories = await this.memory.findBestMatches(params.value);
|
22
|
+
return memories;
|
23
|
+
},
|
24
|
+
},
|
25
|
+
];
|
12
26
|
}
|
13
27
|
async process(prompt) {
|
14
28
|
try {
|
@@ -1,9 +1,9 @@
|
|
1
|
-
import { CreateMemoryInput, MatchOptions,
|
2
|
-
export declare class
|
1
|
+
import { CacheMemoryOptions, CreateMemoryInput, MatchOptions, MemoryScope } from "../types";
|
2
|
+
export declare class CacheMemory {
|
3
3
|
private redis;
|
4
4
|
private readonly CACHE_PREFIX;
|
5
5
|
private readonly CACHE_TTL;
|
6
|
-
constructor(options?:
|
6
|
+
constructor(options?: CacheMemoryOptions);
|
7
7
|
private initRedis;
|
8
8
|
private getMemoryKey;
|
9
9
|
private storeMemory;
|
@@ -1,12 +1,12 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.
|
3
|
+
exports.CacheMemory = void 0;
|
4
4
|
const openai_1 = require("@ai-sdk/openai");
|
5
5
|
const ai_1 = require("ai");
|
6
6
|
const redis_1 = require("redis");
|
7
7
|
const zod_1 = require("zod");
|
8
8
|
const types_1 = require("../types");
|
9
|
-
class
|
9
|
+
class CacheMemory {
|
10
10
|
constructor(options = {}) {
|
11
11
|
const ttlInHours = options.cacheTTL ?? 1;
|
12
12
|
this.CACHE_TTL = ttlInHours * 60 * 60;
|
@@ -48,7 +48,7 @@ class MemoryCache {
|
|
48
48
|
});
|
49
49
|
}
|
50
50
|
async findBestMatches(query, options = {}) {
|
51
|
-
console.log("\n🔍 Searching for query:", query);
|
51
|
+
console.log("\n🔍 Searching in cache for query:", query);
|
52
52
|
const { embedding } = await (0, ai_1.embed)({
|
53
53
|
model: openai_1.openai.embedding("text-embedding-3-small"),
|
54
54
|
value: query,
|
@@ -186,4 +186,4 @@ class MemoryCache {
|
|
186
186
|
return memory;
|
187
187
|
}
|
188
188
|
}
|
189
|
-
exports.
|
189
|
+
exports.CacheMemory = CacheMemory;
|
package/dist/test.js
CHANGED
@@ -8,6 +8,7 @@ const rss_parser_1 = __importDefault(require("rss-parser"));
|
|
8
8
|
const zod_1 = require("zod");
|
9
9
|
const agent_1 = require("./agent");
|
10
10
|
const orchestrator_1 = require("./llm/orchestrator");
|
11
|
+
const persistent_1 = require("./memory/persistent");
|
11
12
|
exports.getChainsTVL = {
|
12
13
|
name: "get_chains_tvl",
|
13
14
|
description: "Get current TVL (Total Value Locked) of all chains from DeFiLlama",
|
@@ -102,16 +103,21 @@ exports.getRssNews = {
|
|
102
103
|
},
|
103
104
|
};
|
104
105
|
(async () => {
|
105
|
-
const
|
106
|
+
const memory = new persistent_1.PersistentMemory({
|
107
|
+
host: "http://localhost:7700",
|
108
|
+
apiKey: "aSampleMasterKey",
|
109
|
+
});
|
110
|
+
const orchestrator = new orchestrator_1.Orchestrator([exports.getRssNews, exports.getChainsTVL], memory);
|
106
111
|
const agent = new agent_1.Agent({
|
107
112
|
user: {
|
108
113
|
id: "1",
|
109
114
|
},
|
110
115
|
orchestrator,
|
116
|
+
persistentMemory: memory,
|
111
117
|
stream: false,
|
112
118
|
maxEvaluatorIteration: 1,
|
113
119
|
});
|
114
|
-
const prompt = "
|
120
|
+
const prompt = "tu connais avax";
|
115
121
|
const context = prompt;
|
116
122
|
const result = await agent.process(prompt, context, {
|
117
123
|
onMessage: (message) => {
|
package/dist/types.d.ts
CHANGED
@@ -93,7 +93,7 @@ export interface SummarizerAgent {
|
|
93
93
|
} | StreamTextResult<Record<string, any>>>;
|
94
94
|
streamProcess: (results: object, onFinish?: (event: any) => void) => Promise<StreamTextResult<Record<string, any>>>;
|
95
95
|
}
|
96
|
-
export interface
|
96
|
+
export interface CacheMemoryOptions {
|
97
97
|
cacheTTL?: number;
|
98
98
|
redisUrl?: string;
|
99
99
|
cachePrefix?: string;
|
@@ -105,7 +105,7 @@ export interface CreateMemoryInput {
|
|
105
105
|
userId?: string;
|
106
106
|
scope?: MemoryScope;
|
107
107
|
}
|
108
|
-
export interface
|
108
|
+
export interface CacheMemoryType {
|
109
109
|
id: string;
|
110
110
|
type: MemoryType;
|
111
111
|
data: any;
|
@@ -116,6 +116,26 @@ export interface Memory {
|
|
116
116
|
scope: MemoryScope;
|
117
117
|
createdAt: Date;
|
118
118
|
}
|
119
|
+
export interface PersistentMemoryOptions {
|
120
|
+
host: string;
|
121
|
+
apiKey: string;
|
122
|
+
indexPrefix?: string;
|
123
|
+
}
|
124
|
+
export interface MemoryChunk {
|
125
|
+
content: string;
|
126
|
+
embedding: number[];
|
127
|
+
}
|
128
|
+
export type MemoryScopeType = (typeof MemoryScope)[keyof typeof MemoryScope];
|
129
|
+
export interface Memory {
|
130
|
+
id: string;
|
131
|
+
query: string;
|
132
|
+
purpose: string;
|
133
|
+
data: any;
|
134
|
+
scope: MemoryScopeType;
|
135
|
+
userId?: string;
|
136
|
+
createdAt: Date;
|
137
|
+
chunks?: MemoryChunk[];
|
138
|
+
}
|
119
139
|
export declare enum MemoryType {
|
120
140
|
ACTION = "action",
|
121
141
|
CONVERSATION = "conversation",
|
package/index.ts
CHANGED
package/llm/evaluator/context.ts
CHANGED
@@ -29,6 +29,7 @@ export const evaluatorContext = {
|
|
29
29
|
1. Success status with explanation (no action needed)
|
30
30
|
2. Next actions needed (if any)
|
31
31
|
3. Why you are doing the next actions or why you are not doing them
|
32
|
+
4. Extract relevant semantic information to remember (eg. token symbols, token addresses, etc.). No need to remember the actions or the news.
|
32
33
|
`;
|
33
34
|
},
|
34
35
|
};
|
package/llm/evaluator/index.ts
CHANGED
@@ -1,15 +1,18 @@
|
|
1
1
|
import { openai } from "@ai-sdk/openai";
|
2
2
|
import { generateObject } from "ai";
|
3
3
|
import { z } from "zod";
|
4
|
-
import {
|
4
|
+
import { PersistentMemory } from "../../memory/persistent";
|
5
|
+
import { ActionSchema, MemoryScope } from "../../types";
|
5
6
|
import { evaluatorContext } from "./context";
|
6
7
|
|
7
8
|
export class Evaluator {
|
8
9
|
private readonly model = openai("gpt-4o");
|
9
10
|
public tools: ActionSchema[];
|
11
|
+
private memory: PersistentMemory;
|
10
12
|
|
11
|
-
constructor(tools: ActionSchema[]) {
|
13
|
+
constructor(tools: ActionSchema[], memory: PersistentMemory) {
|
12
14
|
this.tools = tools;
|
15
|
+
this.memory = memory;
|
13
16
|
}
|
14
17
|
|
15
18
|
async process(prompt: string, goal: string, results: string): Promise<any> {
|
@@ -27,6 +30,12 @@ export class Evaluator {
|
|
27
30
|
})
|
28
31
|
),
|
29
32
|
why: z.string(),
|
33
|
+
importantToRemembers: z.array(
|
34
|
+
z.object({
|
35
|
+
hypotheticalQuery: z.string(),
|
36
|
+
result: z.string(),
|
37
|
+
})
|
38
|
+
),
|
30
39
|
}),
|
31
40
|
prompt: prompt,
|
32
41
|
system: evaluatorContext.compose(goal, results, this.tools),
|
@@ -40,9 +49,60 @@ export class Evaluator {
|
|
40
49
|
})),
|
41
50
|
};
|
42
51
|
|
52
|
+
if (validatedResponse.importantToRemembers.length > 0) {
|
53
|
+
for (const item of validatedResponse.importantToRemembers) {
|
54
|
+
// Check if the item is already in the memory
|
55
|
+
const memories = await this.memory.findBestMatches(
|
56
|
+
item.hypotheticalQuery
|
57
|
+
);
|
58
|
+
if (memories.length === 0) {
|
59
|
+
console.log("Adding to memory", {
|
60
|
+
query: item.hypotheticalQuery,
|
61
|
+
data: item.result,
|
62
|
+
});
|
63
|
+
await this.memory.storeMemory({
|
64
|
+
id: crypto.randomUUID(),
|
65
|
+
purpose: "importantToRemember",
|
66
|
+
query: item.hypotheticalQuery,
|
67
|
+
data: item.result,
|
68
|
+
scope: MemoryScope.USER,
|
69
|
+
createdAt: new Date(),
|
70
|
+
});
|
71
|
+
}
|
72
|
+
}
|
73
|
+
}
|
74
|
+
|
75
|
+
console.log("Evaluator response");
|
76
|
+
console.dir(validatedResponse, { depth: null });
|
43
77
|
return validatedResponse;
|
44
78
|
} catch (error: any) {
|
45
79
|
if (error) {
|
80
|
+
console.log("Evaluator error");
|
81
|
+
console.dir(error.value, { depth: null });
|
82
|
+
console.error(error.message);
|
83
|
+
if (error.value.importantToRemembers.length > 0) {
|
84
|
+
for (const item of error.value.importantToRemembers) {
|
85
|
+
// Check if the item is already in the memory
|
86
|
+
const memories = await this.memory.findBestMatches(
|
87
|
+
item.hypotheticalQuery
|
88
|
+
);
|
89
|
+
if (memories.length === 0) {
|
90
|
+
console.log("Adding to memory", {
|
91
|
+
query: item.hypotheticalQuery,
|
92
|
+
data: item.result,
|
93
|
+
});
|
94
|
+
await this.memory.storeMemory({
|
95
|
+
id: crypto.randomUUID(),
|
96
|
+
purpose: "importantToRemember",
|
97
|
+
query: item.hypotheticalQuery,
|
98
|
+
data: item.result,
|
99
|
+
scope: MemoryScope.USER,
|
100
|
+
createdAt: new Date(),
|
101
|
+
});
|
102
|
+
}
|
103
|
+
}
|
104
|
+
}
|
105
|
+
|
46
106
|
return {
|
47
107
|
...error.value,
|
48
108
|
};
|
@@ -9,10 +9,7 @@ export const orchestratorContext = {
|
|
9
9
|
"If some parameters are not clear or missing, YOU MUST ask the user for them.",
|
10
10
|
"ALWAYS use the same language as user request. (If it's English, use English, if it's French, use French, etc.)",
|
11
11
|
],
|
12
|
-
warnings: [
|
13
|
-
"NEVER repeat the same action twice if the user doesn't ask for it.",
|
14
|
-
"NEVER repeat the same action if its not necessary.",
|
15
|
-
],
|
12
|
+
warnings: ["NEVER repeat same actions if the user doesn't ask for it."],
|
16
13
|
},
|
17
14
|
compose: (tools: ActionSchema[]) => {
|
18
15
|
return `
|
@@ -1,15 +1,32 @@
|
|
1
1
|
import { openai } from "@ai-sdk/openai";
|
2
2
|
import { generateObject } from "ai";
|
3
3
|
import { z } from "zod";
|
4
|
+
import { PersistentMemory } from "../../memory/persistent";
|
4
5
|
import { ActionSchema, BaseLLM } from "../../types";
|
5
6
|
import { orchestratorContext } from "./context";
|
6
7
|
|
7
8
|
export class Orchestrator implements BaseLLM {
|
8
9
|
private readonly model = openai("gpt-4o");
|
9
10
|
public tools: ActionSchema[];
|
11
|
+
private memory: PersistentMemory;
|
10
12
|
|
11
|
-
constructor(tools: ActionSchema[]) {
|
12
|
-
this.
|
13
|
+
constructor(tools: ActionSchema[], memory: PersistentMemory) {
|
14
|
+
this.memory = memory;
|
15
|
+
this.tools = [
|
16
|
+
...tools,
|
17
|
+
{
|
18
|
+
name: "search_memory",
|
19
|
+
description:
|
20
|
+
"Search for relevant information in the internal knowledge base",
|
21
|
+
parameters: z.object({
|
22
|
+
query: z.string(),
|
23
|
+
}),
|
24
|
+
execute: async (params) => {
|
25
|
+
const memories = await this.memory.findBestMatches(params.value);
|
26
|
+
return memories;
|
27
|
+
},
|
28
|
+
},
|
29
|
+
];
|
13
30
|
}
|
14
31
|
|
15
32
|
async process(prompt: string): Promise<any> {
|
@@ -3,20 +3,20 @@ import { cosineSimilarity, embed, generateObject } from "ai";
|
|
3
3
|
import { createClient } from "redis";
|
4
4
|
import { z } from "zod";
|
5
5
|
import {
|
6
|
+
CacheMemoryOptions,
|
7
|
+
CacheMemoryType,
|
6
8
|
CreateMemoryInput,
|
7
9
|
MatchOptions,
|
8
|
-
Memory,
|
9
|
-
MemoryCacheOptions,
|
10
10
|
MemoryScope,
|
11
11
|
MemoryType,
|
12
12
|
} from "../types";
|
13
13
|
|
14
|
-
export class
|
14
|
+
export class CacheMemory {
|
15
15
|
private redis;
|
16
16
|
private readonly CACHE_PREFIX: string;
|
17
17
|
private readonly CACHE_TTL: number;
|
18
18
|
|
19
|
-
constructor(options:
|
19
|
+
constructor(options: CacheMemoryOptions = {}) {
|
20
20
|
const ttlInHours = options.cacheTTL ?? 1;
|
21
21
|
this.CACHE_TTL = ttlInHours * 60 * 60;
|
22
22
|
this.CACHE_PREFIX = options.cachePrefix ?? "memory:";
|
@@ -53,7 +53,7 @@ export class MemoryCache {
|
|
53
53
|
return `${this.CACHE_PREFIX}user:${userId}:`;
|
54
54
|
}
|
55
55
|
|
56
|
-
private async storeMemory(memory:
|
56
|
+
private async storeMemory(memory: CacheMemoryType) {
|
57
57
|
const prefix = this.getMemoryKey(memory.scope, memory.userId);
|
58
58
|
const key = `${prefix}${memory.id}`;
|
59
59
|
await this.redis.set(key, JSON.stringify(memory), {
|
@@ -71,7 +71,7 @@ export class MemoryCache {
|
|
71
71
|
purpose: string;
|
72
72
|
}[]
|
73
73
|
> {
|
74
|
-
console.log("\n🔍 Searching for query:", query);
|
74
|
+
console.log("\n🔍 Searching in cache for query:", query);
|
75
75
|
|
76
76
|
const { embedding } = await embed({
|
77
77
|
model: openai.embedding("text-embedding-3-small"),
|
@@ -126,8 +126,8 @@ export class MemoryCache {
|
|
126
126
|
private async getAllMemories(
|
127
127
|
scope?: MemoryScope,
|
128
128
|
userId?: string
|
129
|
-
): Promise<
|
130
|
-
let patterns:
|
129
|
+
): Promise<CacheMemoryType[]> {
|
130
|
+
let patterns: CacheMemoryType[] = [];
|
131
131
|
|
132
132
|
if (!scope || scope === MemoryScope.GLOBAL) {
|
133
133
|
const globalPrefix = this.getMemoryKey(MemoryScope.GLOBAL);
|
@@ -146,8 +146,10 @@ export class MemoryCache {
|
|
146
146
|
return patterns;
|
147
147
|
}
|
148
148
|
|
149
|
-
private async getMemoriesFromKeys(
|
150
|
-
|
149
|
+
private async getMemoriesFromKeys(
|
150
|
+
keys: string[]
|
151
|
+
): Promise<CacheMemoryType[]> {
|
152
|
+
const memories: CacheMemoryType[] = [];
|
151
153
|
for (const key of keys) {
|
152
154
|
const data = await this.redis.get(key);
|
153
155
|
if (data) {
|
@@ -230,13 +232,13 @@ export class MemoryCache {
|
|
230
232
|
purpose: string;
|
231
233
|
userId?: string;
|
232
234
|
scope?: MemoryScope;
|
233
|
-
}): Promise<
|
235
|
+
}): Promise<CacheMemoryType> {
|
234
236
|
const { embedding } = await embed({
|
235
237
|
model: openai.embedding("text-embedding-3-small"),
|
236
238
|
value: params.content,
|
237
239
|
});
|
238
240
|
|
239
|
-
const memory:
|
241
|
+
const memory: CacheMemoryType = {
|
240
242
|
id: params.id,
|
241
243
|
type: params.type,
|
242
244
|
data: params.data,
|
@@ -0,0 +1,28 @@
|
|
1
|
+
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
|
2
|
+
import { Index, MeiliSearch } from "meilisearch";
|
3
|
+
import {
|
4
|
+
MatchOptions,
|
5
|
+
MatchResult,
|
6
|
+
Memory,
|
7
|
+
MemoryChunk,
|
8
|
+
MemoryScopeType,
|
9
|
+
} from "../types";
|
10
|
+
|
11
|
+
export class PersistentMemory {
|
12
|
+
private client: MeiliSearch;
|
13
|
+
private readonly INDEX_PREFIX: string;
|
14
|
+
private textSplitter: RecursiveCharacterTextSplitter;
|
15
|
+
|
16
|
+
constructor(options: PersistentMemoryOptions);
|
17
|
+
|
18
|
+
private _getIndexName(scope: MemoryScopeType, userId?: string): string;
|
19
|
+
private _getOrCreateIndex(indexName: string): Promise<Index>;
|
20
|
+
private _processContent(content: string): Promise<MemoryChunk[]>;
|
21
|
+
|
22
|
+
storeMemory(memory: Memory): Promise<void>;
|
23
|
+
findBestMatches(
|
24
|
+
query: string,
|
25
|
+
options?: MatchOptions
|
26
|
+
): Promise<MatchResult[]>;
|
27
|
+
deleteMemories(scope: MemoryScopeType, userId?: string): Promise<void>;
|
28
|
+
}
|
@@ -0,0 +1,198 @@
|
|
1
|
+
import { openai } from "@ai-sdk/openai";
|
2
|
+
import { cosineSimilarity, embed, embedMany } from "ai";
|
3
|
+
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
|
4
|
+
import { MeiliSearch } from "meilisearch";
|
5
|
+
|
6
|
+
export const MemoryScope = {
|
7
|
+
GLOBAL: "global",
|
8
|
+
USER: "user",
|
9
|
+
};
|
10
|
+
|
11
|
+
export class PersistentMemory {
|
12
|
+
/**
|
13
|
+
* @param {Object} options
|
14
|
+
* @param {string} options.host - MeiliSearch host URL
|
15
|
+
* @param {string} options.apiKey - MeiliSearch API key
|
16
|
+
* @param {string} [options.indexPrefix="memory_"] - Prefix for index names
|
17
|
+
*/
|
18
|
+
constructor(options) {
|
19
|
+
this.client = new MeiliSearch({
|
20
|
+
host: options.host,
|
21
|
+
apiKey: options.apiKey,
|
22
|
+
});
|
23
|
+
this.INDEX_PREFIX = options.indexPrefix || "memory_";
|
24
|
+
this.textSplitter = new RecursiveCharacterTextSplitter({
|
25
|
+
chunkSize: 1000,
|
26
|
+
});
|
27
|
+
}
|
28
|
+
|
29
|
+
/**
|
30
|
+
* Get the index name based on scope and userId
|
31
|
+
* @private
|
32
|
+
*/
|
33
|
+
_getIndexName(scope, userId) {
|
34
|
+
if (scope === MemoryScope.GLOBAL) {
|
35
|
+
return `${this.INDEX_PREFIX}global`;
|
36
|
+
}
|
37
|
+
return `${this.INDEX_PREFIX}user_${userId}`;
|
38
|
+
}
|
39
|
+
|
40
|
+
/**
|
41
|
+
* Get or create an index with proper settings
|
42
|
+
* @private
|
43
|
+
*/
|
44
|
+
async _getOrCreateIndex(indexName) {
|
45
|
+
const index = this.client.index(indexName);
|
46
|
+
|
47
|
+
try {
|
48
|
+
await this.client.createIndex(indexName, { primaryKey: "id" });
|
49
|
+
await index.updateSettings({
|
50
|
+
searchableAttributes: ["query", "purpose", "chunks.content"],
|
51
|
+
sortableAttributes: ["createdAt"],
|
52
|
+
});
|
53
|
+
} catch (error) {
|
54
|
+
// Index might already exist, which is fine
|
55
|
+
if (!error.message.includes("already exists")) {
|
56
|
+
throw error;
|
57
|
+
}
|
58
|
+
}
|
59
|
+
|
60
|
+
return index;
|
61
|
+
}
|
62
|
+
|
63
|
+
/**
|
64
|
+
* Process content into chunks with embeddings
|
65
|
+
* @private
|
66
|
+
*/
|
67
|
+
async _processContent(content) {
|
68
|
+
// Split content into chunks
|
69
|
+
const chunks = await this.textSplitter.createDocuments([content]);
|
70
|
+
|
71
|
+
// Generate embeddings for all chunks
|
72
|
+
const { embeddings } = await embedMany({
|
73
|
+
model: openai.embedding("text-embedding-3-small"),
|
74
|
+
values: chunks.map((chunk) => chunk.pageContent),
|
75
|
+
});
|
76
|
+
|
77
|
+
// Create processed chunks with embeddings
|
78
|
+
return chunks.map((chunk, i) => ({
|
79
|
+
content: chunk.pageContent,
|
80
|
+
embedding: embeddings[i],
|
81
|
+
}));
|
82
|
+
}
|
83
|
+
|
84
|
+
/**
|
85
|
+
* Store a memory in the database
|
86
|
+
* @param {Object} memory - Memory object to store
|
87
|
+
*/
|
88
|
+
async storeMemory(memory) {
|
89
|
+
const indexName = this._getIndexName(memory.scope, memory.userId);
|
90
|
+
const index = await this._getOrCreateIndex(indexName);
|
91
|
+
|
92
|
+
// Process the query into chunks with embeddings
|
93
|
+
const chunks = await this._processContent(memory.query);
|
94
|
+
|
95
|
+
const result = await index.addDocuments([
|
96
|
+
{
|
97
|
+
...memory,
|
98
|
+
chunks,
|
99
|
+
createdAt: memory.createdAt.toISOString(),
|
100
|
+
},
|
101
|
+
]);
|
102
|
+
return result;
|
103
|
+
}
|
104
|
+
|
105
|
+
/**
|
106
|
+
* Find best matching memories using cosine similarity
|
107
|
+
* @param {string} query - Search query
|
108
|
+
* @param {Object} options - Search options
|
109
|
+
*/
|
110
|
+
async findBestMatches(query, options = {}) {
|
111
|
+
console.log("\n🔍 Searching in persistent memory:", query);
|
112
|
+
|
113
|
+
// Generate embedding for the query
|
114
|
+
const { embedding: queryEmbedding } = await embed({
|
115
|
+
model: openai.embedding("text-embedding-3-small"),
|
116
|
+
value: query,
|
117
|
+
});
|
118
|
+
const searchResults = [];
|
119
|
+
|
120
|
+
// Search in global memories
|
121
|
+
if (!options.scope || options.scope === MemoryScope.GLOBAL) {
|
122
|
+
const globalIndex = await this._getOrCreateIndex(
|
123
|
+
this._getIndexName(MemoryScope.GLOBAL)
|
124
|
+
);
|
125
|
+
const globalResults = await globalIndex.search(query, {
|
126
|
+
limit: options.maxResults || 10,
|
127
|
+
});
|
128
|
+
searchResults.push(...globalResults.hits);
|
129
|
+
}
|
130
|
+
|
131
|
+
// Search in user memories
|
132
|
+
if (
|
133
|
+
options.userId &&
|
134
|
+
(!options.scope || options.scope === MemoryScope.USER)
|
135
|
+
) {
|
136
|
+
const userIndex = await this._getOrCreateIndex(
|
137
|
+
this._getIndexName(MemoryScope.USER, options.userId)
|
138
|
+
);
|
139
|
+
const userResults = await userIndex.search(query, {
|
140
|
+
limit: options.maxResults || 10,
|
141
|
+
});
|
142
|
+
searchResults.push(...userResults.hits);
|
143
|
+
}
|
144
|
+
|
145
|
+
// Process and filter results using cosine similarity
|
146
|
+
const results = searchResults
|
147
|
+
.flatMap((hit) => {
|
148
|
+
// Calculate similarities for each chunk
|
149
|
+
console.log(hit);
|
150
|
+
const chunkSimilarities = hit.chunks.map((chunk) => ({
|
151
|
+
data: hit.data,
|
152
|
+
purpose: hit.purpose,
|
153
|
+
chunk: chunk.content,
|
154
|
+
similarityPercentage:
|
155
|
+
(cosineSimilarity(queryEmbedding, chunk.embedding) + 1) * 50,
|
156
|
+
}));
|
157
|
+
console.log({ chunkSimilarities });
|
158
|
+
// Return the chunk with highest similarity
|
159
|
+
return chunkSimilarities.reduce(
|
160
|
+
(best, current) =>
|
161
|
+
current.similarityPercentage > best.similarityPercentage
|
162
|
+
? current
|
163
|
+
: best,
|
164
|
+
chunkSimilarities[0]
|
165
|
+
);
|
166
|
+
})
|
167
|
+
.filter(
|
168
|
+
(match) =>
|
169
|
+
match.similarityPercentage >= (options.similarityThreshold || 70)
|
170
|
+
)
|
171
|
+
.sort((a, b) => b.similarityPercentage - a.similarityPercentage);
|
172
|
+
|
173
|
+
// Log results
|
174
|
+
if (results.length > 0) {
|
175
|
+
console.log("\n✨ Best matches found:");
|
176
|
+
results.forEach((match) => {
|
177
|
+
console.log(
|
178
|
+
`- ${match.purpose} (${match.similarityPercentage.toFixed(2)}%)`
|
179
|
+
);
|
180
|
+
console.log(` Matching chunk: "${match.chunk}"`);
|
181
|
+
});
|
182
|
+
} else {
|
183
|
+
console.log("No matches found");
|
184
|
+
}
|
185
|
+
|
186
|
+
return results;
|
187
|
+
}
|
188
|
+
|
189
|
+
/**
|
190
|
+
* Delete memories for a given scope and user
|
191
|
+
* @param {string} scope - Memory scope
|
192
|
+
* @param {string} [userId] - User ID for user-specific memories
|
193
|
+
*/
|
194
|
+
async deleteMemories(scope, userId) {
|
195
|
+
const indexName = this._getIndexName(scope, userId);
|
196
|
+
await this.client.deleteIndex(indexName);
|
197
|
+
}
|
198
|
+
}
|
package/package.json
CHANGED
package/test.ts
CHANGED
@@ -2,6 +2,7 @@ import Parser from "rss-parser";
|
|
2
2
|
import { z } from "zod";
|
3
3
|
import { Agent } from "./agent";
|
4
4
|
import { Orchestrator } from "./llm/orchestrator";
|
5
|
+
import { PersistentMemory } from "./memory/persistent";
|
5
6
|
|
6
7
|
interface ChainTVL {
|
7
8
|
name: string;
|
@@ -122,17 +123,22 @@ export const getRssNews = {
|
|
122
123
|
};
|
123
124
|
|
124
125
|
(async () => {
|
125
|
-
const
|
126
|
+
const memory = new PersistentMemory({
|
127
|
+
host: "http://localhost:7700",
|
128
|
+
apiKey: "aSampleMasterKey",
|
129
|
+
});
|
130
|
+
const orchestrator = new Orchestrator([getRssNews, getChainsTVL], memory);
|
126
131
|
const agent = new Agent({
|
127
132
|
user: {
|
128
133
|
id: "1",
|
129
134
|
},
|
130
135
|
orchestrator,
|
136
|
+
persistentMemory: memory,
|
131
137
|
stream: false,
|
132
138
|
maxEvaluatorIteration: 1,
|
133
139
|
});
|
134
140
|
|
135
|
-
const prompt = "
|
141
|
+
const prompt = "tu connais avax";
|
136
142
|
const context = prompt;
|
137
143
|
|
138
144
|
const result = await agent.process(prompt, context, {
|
package/types.ts
CHANGED
@@ -119,7 +119,7 @@ export interface SummarizerAgent {
|
|
119
119
|
) => Promise<StreamTextResult<Record<string, any>>>;
|
120
120
|
}
|
121
121
|
|
122
|
-
export interface
|
122
|
+
export interface CacheMemoryOptions {
|
123
123
|
cacheTTL?: number;
|
124
124
|
redisUrl?: string;
|
125
125
|
cachePrefix?: string;
|
@@ -133,7 +133,7 @@ export interface CreateMemoryInput {
|
|
133
133
|
scope?: MemoryScope;
|
134
134
|
}
|
135
135
|
|
136
|
-
export interface
|
136
|
+
export interface CacheMemoryType {
|
137
137
|
id: string;
|
138
138
|
type: MemoryType;
|
139
139
|
data: any;
|
@@ -145,6 +145,30 @@ export interface Memory {
|
|
145
145
|
createdAt: Date;
|
146
146
|
}
|
147
147
|
|
148
|
+
export interface PersistentMemoryOptions {
|
149
|
+
host: string;
|
150
|
+
apiKey: string;
|
151
|
+
indexPrefix?: string;
|
152
|
+
}
|
153
|
+
|
154
|
+
export interface MemoryChunk {
|
155
|
+
content: string;
|
156
|
+
embedding: number[];
|
157
|
+
}
|
158
|
+
|
159
|
+
export type MemoryScopeType = (typeof MemoryScope)[keyof typeof MemoryScope];
|
160
|
+
|
161
|
+
export interface Memory {
|
162
|
+
id: string;
|
163
|
+
query: string;
|
164
|
+
purpose: string;
|
165
|
+
data: any;
|
166
|
+
scope: MemoryScopeType;
|
167
|
+
userId?: string;
|
168
|
+
createdAt: Date;
|
169
|
+
chunks?: MemoryChunk[];
|
170
|
+
}
|
171
|
+
|
148
172
|
export enum MemoryType {
|
149
173
|
ACTION = "action",
|
150
174
|
CONVERSATION = "conversation",
|