@ai.ntellect/core 0.3.3 ā 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.nvmrc +1 -0
- package/README.FR.md +242 -247
- package/README.md +249 -246
- package/agent/index.ts +199 -215
- package/agent/tools/get-rss.ts +64 -0
- package/bull.ts +5 -0
- package/dist/agent/index.d.ts +29 -26
- package/dist/agent/index.js +123 -112
- package/dist/agent/tools/get-rss.d.ts +16 -0
- package/dist/agent/tools/get-rss.js +62 -0
- package/dist/bull.d.ts +1 -0
- package/dist/bull.js +9 -0
- package/dist/examples/index.d.ts +2 -0
- package/dist/examples/index.js +89 -0
- package/dist/llm/interpreter/context.d.ts +5 -22
- package/dist/llm/interpreter/context.js +8 -9
- package/dist/llm/interpreter/index.d.ts +9 -5
- package/dist/llm/interpreter/index.js +55 -48
- package/dist/llm/memory-manager/context.d.ts +2 -0
- package/dist/llm/memory-manager/context.js +22 -0
- package/dist/llm/memory-manager/index.d.ts +17 -0
- package/dist/llm/memory-manager/index.js +107 -0
- package/dist/llm/orchestrator/context.d.ts +2 -10
- package/dist/llm/orchestrator/context.js +19 -14
- package/dist/llm/orchestrator/index.d.ts +36 -21
- package/dist/llm/orchestrator/index.js +122 -88
- package/dist/llm/orchestrator/types.d.ts +12 -0
- package/dist/llm/orchestrator/types.js +2 -0
- package/dist/memory/cache.d.ts +6 -5
- package/dist/memory/cache.js +31 -21
- package/dist/memory/persistent.d.ts +5 -3
- package/dist/memory/persistent.js +89 -73
- package/dist/services/redis-cache.d.ts +37 -0
- package/dist/services/redis-cache.js +93 -0
- package/dist/services/scheduler.d.ts +39 -16
- package/dist/services/scheduler.js +81 -103
- package/dist/services/telegram-monitor.d.ts +0 -15
- package/dist/services/telegram-monitor.js +117 -101
- package/dist/test.js +106 -172
- package/dist/types.d.ts +38 -7
- package/dist/utils/generate-object.d.ts +12 -0
- package/dist/utils/generate-object.js +90 -0
- package/dist/utils/header-builder.d.ts +11 -0
- package/dist/utils/header-builder.js +34 -0
- package/dist/utils/inject-actions.js +2 -2
- package/dist/utils/queue-item-transformer.d.ts +2 -2
- package/dist/utils/schema-generator.d.ts +16 -0
- package/dist/utils/schema-generator.js +46 -0
- package/examples/index.ts +103 -0
- package/llm/interpreter/context.ts +20 -8
- package/llm/interpreter/index.ts +81 -54
- package/llm/memory-manager/context.ts +21 -0
- package/llm/memory-manager/index.ts +163 -0
- package/llm/orchestrator/context.ts +20 -13
- package/llm/orchestrator/index.ts +210 -130
- package/llm/orchestrator/types.ts +14 -0
- package/memory/cache.ts +37 -31
- package/memory/persistent.ts +121 -99
- package/package.json +11 -2
- package/services/redis-cache.ts +128 -0
- package/services/scheduler.ts +102 -141
- package/services/telegram-monitor.ts +138 -138
- package/t.py +79 -0
- package/t.spec +38 -0
- package/types.ts +40 -7
- package/utils/generate-object.ts +105 -0
- package/utils/header-builder.ts +40 -0
- package/utils/inject-actions.ts +4 -6
- package/utils/queue-item-transformer.ts +2 -1
- package/utils/schema-generator.ts +73 -0
- package/agent/handlers/ActionHandler.ts +0 -48
- package/agent/handlers/ConfirmationHandler.ts +0 -37
- package/agent/handlers/EventHandler.ts +0 -35
- package/dist/agent/handlers/ActionHandler.d.ts +0 -8
- package/dist/agent/handlers/ActionHandler.js +0 -36
- package/dist/agent/handlers/ConfirmationHandler.d.ts +0 -7
- package/dist/agent/handlers/ConfirmationHandler.js +0 -31
- package/dist/agent/handlers/EventHandler.d.ts +0 -10
- package/dist/agent/handlers/EventHandler.js +0 -34
- package/dist/llm/evaluator/context.d.ts +0 -10
- package/dist/llm/evaluator/context.js +0 -24
- package/dist/llm/evaluator/index.d.ts +0 -16
- package/dist/llm/evaluator/index.js +0 -150
- package/llm/evaluator/context.ts +0 -21
- package/llm/evaluator/index.ts +0 -193
@@ -1,152 +1,232 @@
|
|
1
|
-
import {
|
2
|
-
import { generateObject } from "ai";
|
1
|
+
import { LanguageModelV1 } from "ai";
|
3
2
|
import { z } from "zod";
|
4
3
|
import { CacheMemory } from "../../memory/cache";
|
5
4
|
import { PersistentMemory } from "../../memory/persistent";
|
6
|
-
import {
|
5
|
+
import { ActionQueueManager } from "../../services/queue";
|
6
|
+
import { CacheConfig, RedisCache } from "../../services/redis-cache";
|
7
|
+
import { TaskScheduler } from "../../services/scheduler";
|
8
|
+
import {
|
9
|
+
ActionSchema,
|
10
|
+
GenerateObjectResponse,
|
11
|
+
MemoryScope,
|
12
|
+
QueueCallbacks,
|
13
|
+
} from "../../types";
|
14
|
+
import { generateObject } from "../../utils/generate-object";
|
15
|
+
import { LLMHeaderBuilder } from "../../utils/header-builder";
|
7
16
|
import { injectActions } from "../../utils/inject-actions";
|
8
|
-
import {
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
private
|
14
|
-
|
15
|
-
|
17
|
+
import { Interpreter } from "../interpreter";
|
18
|
+
import { orchestratorInstructions } from "./context";
|
19
|
+
import { State } from "./types";
|
20
|
+
|
21
|
+
export class AgentRuntime {
|
22
|
+
private readonly model: LanguageModelV1;
|
23
|
+
private readonly tools: ActionSchema[];
|
24
|
+
private readonly interpreters: Interpreter[];
|
25
|
+
private readonly queueManager: ActionQueueManager;
|
26
|
+
private readonly scheduler: TaskScheduler;
|
27
|
+
private readonly cache: RedisCache;
|
28
|
+
private memory?: {
|
29
|
+
persistent?: PersistentMemory;
|
30
|
+
cache?: CacheMemory;
|
16
31
|
};
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
this.
|
32
|
+
|
33
|
+
constructor(
|
34
|
+
model: LanguageModelV1,
|
35
|
+
tools: ActionSchema[],
|
36
|
+
interpreters: Interpreter[],
|
37
|
+
redisConfig: CacheConfig,
|
38
|
+
memory?: {
|
39
|
+
persistent?: PersistentMemory;
|
40
|
+
cache?: CacheMemory;
|
41
|
+
},
|
42
|
+
callbacks?: QueueCallbacks
|
43
|
+
) {
|
44
|
+
this.model = model;
|
45
|
+
this.tools = tools;
|
46
|
+
this.interpreters = interpreters;
|
47
|
+
this.queueManager = new ActionQueueManager(tools, callbacks);
|
32
48
|
this.memory = memory;
|
33
|
-
this.
|
34
|
-
|
35
|
-
{
|
36
|
-
name: "search_internal_knowledge_base",
|
37
|
-
description:
|
38
|
-
"Search for relevant information in the internal knowledge base",
|
39
|
-
parameters: z.object({
|
40
|
-
query: z.string(),
|
41
|
-
}),
|
42
|
-
execute: async ({ query }: { query: string }) => {
|
43
|
-
const persistentMemories =
|
44
|
-
await this.memory.persistent.findRelevantDocuments(query, {
|
45
|
-
similarityThreshold: 70,
|
46
|
-
});
|
47
|
-
return `# LONG_TERM_MEMORY: ${JSON.stringify(persistentMemories)}`;
|
48
|
-
},
|
49
|
-
},
|
50
|
-
];
|
49
|
+
this.cache = new RedisCache(redisConfig);
|
50
|
+
this.scheduler = new TaskScheduler(this, this.cache);
|
51
51
|
}
|
52
52
|
|
53
|
-
|
54
|
-
|
55
|
-
const
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
53
|
+
private async buildContext(state: State): Promise<string> {
|
54
|
+
console.log("š§ Building context with RAG and CAG...");
|
55
|
+
const context = LLMHeaderBuilder.create();
|
56
|
+
|
57
|
+
// Add orchestrator instructions
|
58
|
+
context.addHeader("ROLE", orchestratorInstructions.role);
|
59
|
+
context.addHeader("LANGUAGE", orchestratorInstructions.language);
|
60
|
+
context.addHeader(
|
61
|
+
"IMPORTANT",
|
62
|
+
orchestratorInstructions.guidelines.important
|
63
|
+
);
|
64
|
+
context.addHeader("WARNINGS", orchestratorInstructions.guidelines.warnings);
|
65
|
+
// Add tools to context
|
66
|
+
context.addHeader("TOOLS", injectActions(this.tools));
|
67
|
+
|
68
|
+
// Add previous actions if any
|
69
|
+
if (state.previousActions?.length) {
|
70
|
+
context.addHeader(
|
71
|
+
"PREVIOUS_ACTIONS",
|
72
|
+
JSON.stringify(state.previousActions)
|
73
|
+
);
|
74
|
+
}
|
75
|
+
|
76
|
+
// Get recent similar actions (CAG)
|
77
|
+
if (this.memory?.cache) {
|
78
|
+
const cacheMemories = await this.memory.cache.findSimilarActions(
|
79
|
+
state.currentContext,
|
80
|
+
{
|
81
|
+
similarityThreshold: 80,
|
82
|
+
maxResults: 3,
|
83
|
+
scope: MemoryScope.GLOBAL,
|
84
|
+
}
|
85
|
+
);
|
86
|
+
|
87
|
+
if (cacheMemories.length > 0) {
|
88
|
+
context.addHeader("RECENT_ACTIONS", JSON.stringify(cacheMemories));
|
89
|
+
}
|
90
|
+
}
|
91
|
+
|
92
|
+
// Get relevant knowledge (RAG)
|
93
|
+
if (this.memory?.persistent) {
|
94
|
+
const persistentMemory =
|
95
|
+
await this.memory.persistent.findRelevantDocuments(
|
96
|
+
state.currentContext,
|
97
|
+
{
|
98
|
+
similarityThreshold: 80,
|
99
|
+
}
|
100
|
+
);
|
101
|
+
|
102
|
+
if (persistentMemory.length > 0) {
|
103
|
+
context.addHeader(
|
104
|
+
"RELEVANT_KNOWLEDGE",
|
105
|
+
JSON.stringify(persistentMemory)
|
106
|
+
);
|
107
|
+
}
|
108
|
+
}
|
109
|
+
|
110
|
+
// Add available interpreters
|
111
|
+
context.addHeader(
|
112
|
+
"AVAILABLE_INTERPRETERS",
|
113
|
+
JSON.stringify(this.interpreters.map((i) => i.name))
|
114
|
+
);
|
115
|
+
console.log("Context built with memories", context.toString());
|
116
|
+
return context.toString();
|
68
117
|
}
|
69
118
|
|
70
|
-
async process(
|
71
|
-
|
72
|
-
|
73
|
-
): Promise<{
|
74
|
-
actions: {
|
119
|
+
async process(state: State): Promise<{
|
120
|
+
shouldContinue: boolean;
|
121
|
+
actions: Array<{
|
75
122
|
name: string;
|
76
|
-
|
77
|
-
parameters: {
|
123
|
+
parameters: Array<{
|
78
124
|
name: string;
|
79
125
|
value: any;
|
80
|
-
}
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
results: results,
|
87
|
-
});
|
88
|
-
try {
|
89
|
-
console.log("\nš Orchestrator processing");
|
90
|
-
console.log("Prompt:", prompt);
|
91
|
-
|
92
|
-
const response = await generateObject({
|
93
|
-
model: this.model,
|
94
|
-
schema: z.object({
|
95
|
-
actions: z.array(
|
96
|
-
z.object({
|
97
|
-
name: z.string(),
|
98
|
-
type: z.enum(["on-chain", "off-chain", "question", "analysis"]),
|
99
|
-
parameters: z.array(
|
100
|
-
z.object({
|
101
|
-
name: z.string(),
|
102
|
-
value: z.any(),
|
103
|
-
})
|
104
|
-
),
|
105
|
-
})
|
106
|
-
),
|
107
|
-
answer: z.string(),
|
108
|
-
}),
|
109
|
-
prompt: prompt,
|
110
|
-
system: state,
|
111
|
-
temperature: 0,
|
112
|
-
});
|
113
|
-
|
114
|
-
const validatedResponse = {
|
115
|
-
...response.object,
|
116
|
-
actions: response.object.actions.map((action) => ({
|
117
|
-
...action,
|
118
|
-
parameters: Array.isArray(action.parameters)
|
119
|
-
? action.parameters.map((param) => ({
|
120
|
-
name: param.name,
|
121
|
-
value: param.value ?? null,
|
122
|
-
}))
|
123
|
-
: Object.entries(action.parameters || {}).map(([name, value]) => ({
|
124
|
-
name,
|
125
|
-
value: value ?? null,
|
126
|
-
})),
|
127
|
-
})),
|
126
|
+
}>;
|
127
|
+
scheduler?: {
|
128
|
+
isScheduled: boolean;
|
129
|
+
scheduledAtInC?: string;
|
130
|
+
interval?: string;
|
131
|
+
reason?: string;
|
128
132
|
};
|
129
|
-
|
130
|
-
|
131
|
-
|
133
|
+
}>;
|
134
|
+
socialResponse?: {
|
135
|
+
shouldRespond: boolean;
|
136
|
+
response?: string;
|
137
|
+
isPartialResponse?: boolean;
|
138
|
+
};
|
139
|
+
interpreter?: string;
|
140
|
+
results?: string;
|
141
|
+
}> {
|
142
|
+
console.log("š Processing state:");
|
143
|
+
console.dir(state, { depth: null });
|
144
|
+
if (state.previousActions?.length) {
|
132
145
|
console.log(
|
133
|
-
"
|
134
|
-
|
135
|
-
|
136
|
-
|
146
|
+
"š Previous actions:",
|
147
|
+
state.previousActions
|
148
|
+
.map((a) => (typeof a === "string" ? a : a.name))
|
149
|
+
.join(", ")
|
137
150
|
);
|
138
|
-
|
139
|
-
|
151
|
+
}
|
152
|
+
|
153
|
+
const context = await this.buildContext(state);
|
154
|
+
|
155
|
+
console.log("\nš§ Generating response from LLM...");
|
156
|
+
const response = await generateObject<GenerateObjectResponse>({
|
157
|
+
model: this.model,
|
158
|
+
schema: z.object({
|
159
|
+
shouldContinue: z.boolean(),
|
160
|
+
actions: z.array(
|
161
|
+
z.object({
|
162
|
+
name: z.string(),
|
163
|
+
parameters: z.array(
|
164
|
+
z.object({
|
165
|
+
name: z.string(),
|
166
|
+
value: z.any(),
|
167
|
+
})
|
168
|
+
),
|
169
|
+
scheduler: z
|
170
|
+
.object({
|
171
|
+
isScheduled: z.boolean(),
|
172
|
+
cronExpression: z.string().optional(),
|
173
|
+
reason: z.string().optional(),
|
174
|
+
})
|
175
|
+
.optional(),
|
176
|
+
})
|
177
|
+
),
|
178
|
+
socialResponse: z
|
179
|
+
.object({
|
180
|
+
shouldRespond: z.boolean(),
|
181
|
+
response: z.string().optional(),
|
182
|
+
isPartialResponse: z.boolean().optional(),
|
183
|
+
})
|
184
|
+
.optional(),
|
185
|
+
interpreter: z.string().optional(),
|
186
|
+
}),
|
187
|
+
prompt: state.currentContext,
|
188
|
+
system: context.toString(),
|
189
|
+
temperature: 0,
|
190
|
+
});
|
191
|
+
console.log("š Orchestrator response:");
|
192
|
+
console.dir(response.object, { depth: null });
|
193
|
+
|
194
|
+
// Force shouldContinue to false if no actions are planned
|
195
|
+
if (response.object.actions.length === 0) {
|
196
|
+
response.object.shouldContinue = false;
|
197
|
+
console.log("ā ļø No actions planned, forcing shouldContinue to false");
|
198
|
+
}
|
199
|
+
|
200
|
+
// Handle social interactions and actions in a single block
|
201
|
+
if (response.object.socialResponse?.shouldRespond) {
|
202
|
+
console.log("\nš¬ Processing social response");
|
203
|
+
if (response.object.socialResponse.response) {
|
204
|
+
console.log("š¢ Response:", response.object.socialResponse.response);
|
205
|
+
// Ensure all parameters have a value property
|
140
206
|
}
|
207
|
+
}
|
141
208
|
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
209
|
+
// Handle scheduled actions
|
210
|
+
for (const action of response.object.actions) {
|
211
|
+
if (action.scheduler?.isScheduled) {
|
212
|
+
await this.scheduler.scheduleRequest({
|
213
|
+
originalRequest: state.currentContext,
|
214
|
+
cronExpression: action.scheduler.cronExpression,
|
215
|
+
});
|
148
216
|
}
|
149
|
-
throw error;
|
150
217
|
}
|
218
|
+
|
219
|
+
// Store actions in Redis cache
|
220
|
+
if (response.object.actions.length > 0) {
|
221
|
+
const requestId = crypto.randomUUID();
|
222
|
+
await this.cache.storePreviousActions(requestId, response.object.actions);
|
223
|
+
}
|
224
|
+
|
225
|
+
// Store message in recent messages
|
226
|
+
await this.cache.storeRecentMessage(state.currentContext, {
|
227
|
+
socialResponse: response.object.socialResponse,
|
228
|
+
});
|
229
|
+
|
230
|
+
return response.object;
|
151
231
|
}
|
152
232
|
}
|
@@ -0,0 +1,14 @@
|
|
1
|
+
import { QueueResult } from "../../types";
|
2
|
+
|
3
|
+
export interface State {
|
4
|
+
currentContext: string;
|
5
|
+
previousActions: (string | QueueResult)[];
|
6
|
+
reward?: number;
|
7
|
+
userRequest?: string;
|
8
|
+
results?: string;
|
9
|
+
}
|
10
|
+
|
11
|
+
export interface Action {
|
12
|
+
name: string;
|
13
|
+
parameters: Record<string, any>;
|
14
|
+
}
|
package/memory/cache.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import { openai } from "@ai-sdk/openai";
|
2
|
-
import { cosineSimilarity, embed } from "ai";
|
2
|
+
import { cosineSimilarity, embed, EmbeddingModel } from "ai";
|
3
3
|
import { createClient } from "redis";
|
4
4
|
import {
|
5
5
|
CacheMemoryOptions,
|
@@ -7,16 +7,16 @@ import {
|
|
7
7
|
CreateMemoryInput,
|
8
8
|
MatchOptions,
|
9
9
|
MemoryScope,
|
10
|
-
MemoryType,
|
11
|
-
QueueResult,
|
12
10
|
} from "../types";
|
13
11
|
|
14
12
|
export class CacheMemory {
|
15
13
|
private redis;
|
16
14
|
private readonly CACHE_PREFIX: string;
|
17
15
|
private readonly CACHE_TTL: number;
|
16
|
+
private readonly embeddingModel: EmbeddingModel<string>;
|
18
17
|
|
19
|
-
constructor(options: CacheMemoryOptions
|
18
|
+
constructor(options: CacheMemoryOptions) {
|
19
|
+
this.embeddingModel = options.embeddingModel;
|
20
20
|
const ttlInHours = options.cacheTTL ?? 1;
|
21
21
|
this.CACHE_TTL = ttlInHours * 60 * 60;
|
22
22
|
this.CACHE_PREFIX = options.cachePrefix ?? "memory:";
|
@@ -44,11 +44,11 @@ export class CacheMemory {
|
|
44
44
|
}
|
45
45
|
}
|
46
46
|
|
47
|
-
private async storeMemory(memory: CacheMemoryType) {
|
47
|
+
private async storeMemory(memory: CacheMemoryType, ttl?: number) {
|
48
48
|
const prefix = this.CACHE_PREFIX;
|
49
49
|
const key = `${prefix}${memory.id}`;
|
50
50
|
const result = await this.redis.set(key, JSON.stringify(memory), {
|
51
|
-
EX: this.CACHE_TTL,
|
51
|
+
EX: ttl || this.CACHE_TTL,
|
52
52
|
});
|
53
53
|
console.log("š¾ Cache memory created:", result);
|
54
54
|
}
|
@@ -58,9 +58,9 @@ export class CacheMemory {
|
|
58
58
|
options: MatchOptions & { userId?: string; scope?: MemoryScope } = {}
|
59
59
|
): Promise<
|
60
60
|
{
|
61
|
-
|
62
|
-
similarityPercentage: number;
|
61
|
+
data: any;
|
63
62
|
query: string;
|
63
|
+
createdAt: Date;
|
64
64
|
}[]
|
65
65
|
> {
|
66
66
|
console.log("\nš Searching in cache");
|
@@ -72,7 +72,7 @@ export class CacheMemory {
|
|
72
72
|
value: query,
|
73
73
|
});
|
74
74
|
|
75
|
-
const memories = await this.getAllMemories(
|
75
|
+
const memories = await this.getAllMemories();
|
76
76
|
console.log(`\nš Found ${memories.length} cached queries to compare`);
|
77
77
|
|
78
78
|
const matches = memories
|
@@ -80,7 +80,7 @@ export class CacheMemory {
|
|
80
80
|
const similarity = cosineSimilarity(embedding, memory.embedding);
|
81
81
|
const similarityPercentage = (similarity + 1) * 50;
|
82
82
|
return {
|
83
|
-
|
83
|
+
data: memory.data,
|
84
84
|
query: memory.query,
|
85
85
|
similarityPercentage,
|
86
86
|
createdAt: memory.createdAt,
|
@@ -103,6 +103,7 @@ export class CacheMemory {
|
|
103
103
|
results.forEach((match, index) => {
|
104
104
|
console.log(`\n${index + 1}. Match Details:`);
|
105
105
|
console.log(` Query: ${match.query}`);
|
106
|
+
console.log(` Data: ${JSON.stringify(match.data)}`);
|
106
107
|
console.log(` Similarity: ${match.similarityPercentage.toFixed(2)}%`);
|
107
108
|
console.log("ā".repeat(50));
|
108
109
|
});
|
@@ -110,13 +111,16 @@ export class CacheMemory {
|
|
110
111
|
console.log("\nā No similar queries found in cache");
|
111
112
|
}
|
112
113
|
|
113
|
-
return results
|
114
|
+
return results.map((match) => {
|
115
|
+
return {
|
116
|
+
data: match.data,
|
117
|
+
query: match.query,
|
118
|
+
createdAt: match.createdAt,
|
119
|
+
};
|
120
|
+
});
|
114
121
|
}
|
115
122
|
|
116
|
-
async getAllMemories(
|
117
|
-
scope?: MemoryScope,
|
118
|
-
userId?: string
|
119
|
-
): Promise<CacheMemoryType[]> {
|
123
|
+
async getAllMemories(): Promise<CacheMemoryType[]> {
|
120
124
|
const keys = await this.redis.keys(`${this.CACHE_PREFIX}*`);
|
121
125
|
const memories = await this.getMemoriesFromKeys(keys);
|
122
126
|
|
@@ -140,11 +144,10 @@ export class CacheMemory {
|
|
140
144
|
input: CreateMemoryInput
|
141
145
|
): Promise<CacheMemoryType | undefined> {
|
142
146
|
console.log("\nš Processing new memory creation");
|
143
|
-
console.log("Content:", input.
|
144
|
-
console.log("
|
145
|
-
console.log("Scope:", input.scope);
|
147
|
+
console.log("Content:", input.query);
|
148
|
+
console.log("TTL:", input.ttl ? `${input.ttl} seconds` : "default");
|
146
149
|
|
147
|
-
const existingPattern = await this.findSimilarActions(input.
|
150
|
+
const existingPattern = await this.findSimilarActions(input.query, {
|
148
151
|
similarityThreshold: 95,
|
149
152
|
userId: input.userId,
|
150
153
|
scope: input.scope,
|
@@ -156,7 +159,8 @@ export class CacheMemory {
|
|
156
159
|
existingPattern.forEach((match, index) => {
|
157
160
|
console.log(`\n${index + 1}. Existing Match:`);
|
158
161
|
console.log(` Query: ${match.query}`);
|
159
|
-
console.log(`
|
162
|
+
console.log(` Data: ${JSON.stringify(match.data)}`);
|
163
|
+
console.log(` Created At: ${match.createdAt}`);
|
160
164
|
});
|
161
165
|
console.log("\nāļø Skipping creation of new memory");
|
162
166
|
return;
|
@@ -166,11 +170,11 @@ export class CacheMemory {
|
|
166
170
|
|
167
171
|
const memory = await this.createSingleMemory({
|
168
172
|
id: crypto.randomUUID(),
|
169
|
-
|
170
|
-
type: input.type,
|
173
|
+
query: input.query,
|
171
174
|
data: input.data,
|
172
175
|
userId: input.userId,
|
173
176
|
scope: input.scope,
|
177
|
+
ttl: input.ttl,
|
174
178
|
});
|
175
179
|
|
176
180
|
return memory;
|
@@ -178,28 +182,27 @@ export class CacheMemory {
|
|
178
182
|
|
179
183
|
private async createSingleMemory(params: {
|
180
184
|
id: string;
|
181
|
-
|
182
|
-
type: MemoryType;
|
185
|
+
query: string;
|
183
186
|
data: any;
|
184
187
|
userId?: string;
|
185
188
|
scope?: MemoryScope;
|
189
|
+
ttl?: number;
|
186
190
|
}): Promise<CacheMemoryType> {
|
187
191
|
console.log("\nšļø Creating new cache memory");
|
188
192
|
console.log("ID:", params.id);
|
189
|
-
console.log("Content:", params.
|
193
|
+
console.log("Content:", params.query);
|
190
194
|
|
191
195
|
console.log("\nš§® Generating embedding...");
|
192
196
|
const { embedding } = await embed({
|
193
|
-
model:
|
194
|
-
value: params.
|
197
|
+
model: this.embeddingModel,
|
198
|
+
value: params.query,
|
195
199
|
});
|
196
200
|
console.log("ā
Embedding generated successfully");
|
197
201
|
|
198
202
|
const memory: CacheMemoryType = {
|
199
203
|
id: params.id,
|
200
|
-
type: params.type,
|
201
204
|
data: params.data,
|
202
|
-
query: params.
|
205
|
+
query: params.query,
|
203
206
|
embedding,
|
204
207
|
userId: params.userId,
|
205
208
|
scope:
|
@@ -207,8 +210,11 @@ export class CacheMemory {
|
|
207
210
|
createdAt: new Date(),
|
208
211
|
};
|
209
212
|
|
210
|
-
await this.storeMemory(memory);
|
211
|
-
console.log("ā
|
213
|
+
await this.storeMemory(memory, params.ttl);
|
214
|
+
console.log("ā
Short-term memory created and stored successfully", {
|
215
|
+
...memory,
|
216
|
+
ttl: params.ttl || this.CACHE_TTL,
|
217
|
+
});
|
212
218
|
|
213
219
|
return memory;
|
214
220
|
}
|