langchain 0.0.87 → 0.0.89
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/chat_convo/prompt.cjs +2 -2
- package/dist/agents/chat_convo/prompt.d.ts +2 -2
- package/dist/agents/chat_convo/prompt.js +2 -2
- package/dist/agents/structured_chat/outputParser.cjs +2 -2
- package/dist/agents/structured_chat/outputParser.d.ts +2 -1
- package/dist/agents/structured_chat/outputParser.js +2 -2
- package/dist/agents/structured_chat/prompt.cjs +1 -1
- package/dist/agents/structured_chat/prompt.d.ts +1 -1
- package/dist/agents/structured_chat/prompt.js +1 -1
- package/dist/chains/query_constructor/index.cjs +3 -3
- package/dist/chains/query_constructor/index.d.ts +1 -1
- package/dist/chains/query_constructor/index.js +1 -1
- package/dist/chat_models/openai.cjs +111 -1
- package/dist/chat_models/openai.d.ts +11 -0
- package/dist/chat_models/openai.js +109 -0
- package/dist/document_loaders/fs/unstructured.cjs +6 -0
- package/dist/document_loaders/fs/unstructured.js +6 -0
- package/dist/experimental/generative_agents/generative_agent.cjs +276 -0
- package/dist/experimental/generative_agents/generative_agent.d.ts +43 -0
- package/dist/experimental/generative_agents/generative_agent.js +272 -0
- package/dist/experimental/generative_agents/generative_agent_memory.cjs +326 -0
- package/dist/experimental/generative_agents/generative_agent_memory.d.ts +51 -0
- package/dist/experimental/generative_agents/generative_agent_memory.js +322 -0
- package/dist/experimental/generative_agents/index.cjs +7 -0
- package/dist/experimental/generative_agents/index.d.ts +2 -0
- package/dist/experimental/generative_agents/index.js +2 -0
- package/dist/llms/openai-chat.cjs +29 -20
- package/dist/llms/openai-chat.d.ts +4 -0
- package/dist/llms/openai-chat.js +29 -20
- package/dist/llms/openai.cjs +31 -20
- package/dist/llms/openai.d.ts +3 -0
- package/dist/llms/openai.js +31 -20
- package/dist/output_parsers/expression.cjs +24 -6
- package/dist/output_parsers/expression.d.ts +9 -0
- package/dist/output_parsers/expression.js +24 -6
- package/dist/output_parsers/expression_type_handlers/array_literal_expression_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/base.cjs +11 -13
- package/dist/output_parsers/expression_type_handlers/base.d.ts +15 -17
- package/dist/output_parsers/expression_type_handlers/base.js +11 -13
- package/dist/output_parsers/expression_type_handlers/boolean_literal_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/call_expression_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/factory.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/grammar/parser_grammar.cjs +296 -0
- package/dist/output_parsers/expression_type_handlers/grammar/parser_grammar.d.ts +13 -0
- package/dist/output_parsers/expression_type_handlers/grammar/parser_grammar.js +293 -0
- package/dist/output_parsers/expression_type_handlers/identifier_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/member_expression_handler.cjs +2 -2
- package/dist/output_parsers/expression_type_handlers/member_expression_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/member_expression_handler.js +2 -2
- package/dist/output_parsers/expression_type_handlers/numeric_literal_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/object_literal_expression_handler.cjs +1 -1
- package/dist/output_parsers/expression_type_handlers/object_literal_expression_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/object_literal_expression_handler.js +1 -1
- package/dist/output_parsers/expression_type_handlers/property_assignment_handler.cjs +1 -1
- package/dist/output_parsers/expression_type_handlers/property_assignment_handler.d.ts +2 -3
- package/dist/output_parsers/expression_type_handlers/property_assignment_handler.js +1 -1
- package/dist/output_parsers/expression_type_handlers/string_literal_handler.d.ts +2 -3
- package/dist/retrievers/self_query/index.cjs +1 -1
- package/dist/retrievers/self_query/index.js +2 -2
- package/dist/retrievers/time_weighted.cjs +6 -0
- package/dist/retrievers/time_weighted.d.ts +1 -0
- package/dist/retrievers/time_weighted.js +6 -0
- package/dist/retrievers/vespa.cjs +38 -0
- package/dist/retrievers/vespa.d.ts +19 -0
- package/dist/retrievers/vespa.js +34 -0
- package/dist/tools/brave_search.cjs +57 -0
- package/dist/tools/brave_search.d.ts +12 -0
- package/dist/tools/brave_search.js +53 -0
- package/dist/tools/index.cjs +3 -1
- package/dist/tools/index.d.ts +1 -0
- package/dist/tools/index.js +1 -0
- package/dist/util/prompt-layer.cjs +28 -0
- package/dist/util/prompt-layer.d.ts +3 -0
- package/dist/util/prompt-layer.js +24 -0
- package/dist/vectorstores/milvus.cjs +2 -0
- package/dist/vectorstores/milvus.d.ts +2 -0
- package/dist/vectorstores/milvus.js +2 -0
- package/dist/vectorstores/singlestore.cjs +109 -0
- package/dist/vectorstores/singlestore.d.ts +25 -0
- package/dist/vectorstores/singlestore.js +105 -0
- package/dist/vectorstores/supabase.cjs +15 -4
- package/dist/vectorstores/supabase.d.ts +6 -3
- package/dist/vectorstores/supabase.js +15 -4
- package/experimental/generative_agents.cjs +1 -0
- package/experimental/generative_agents.d.ts +1 -0
- package/experimental/generative_agents.js +1 -0
- package/package.json +39 -5
- package/retrievers/vespa.cjs +1 -0
- package/retrievers/vespa.d.ts +1 -0
- package/retrievers/vespa.js +1 -0
- package/vectorstores/singlestore.cjs +1 -0
- package/vectorstores/singlestore.d.ts +1 -0
- package/vectorstores/singlestore.js +1 -0
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.GenerativeAgentMemory = void 0;
|
|
4
|
+
const llm_chain_js_1 = require("../../chains/llm_chain.cjs");
|
|
5
|
+
const index_js_1 = require("../../prompts/index.cjs");
|
|
6
|
+
const document_js_1 = require("../../document.cjs");
|
|
7
|
+
const base_js_1 = require("../../memory/base.cjs");
|
|
8
|
+
class GenerativeAgentMemory extends base_js_1.BaseMemory {
|
|
9
|
+
constructor(llm, memoryRetriever, config) {
|
|
10
|
+
super();
|
|
11
|
+
Object.defineProperty(this, "llm", {
|
|
12
|
+
enumerable: true,
|
|
13
|
+
configurable: true,
|
|
14
|
+
writable: true,
|
|
15
|
+
value: void 0
|
|
16
|
+
});
|
|
17
|
+
Object.defineProperty(this, "memoryRetriever", {
|
|
18
|
+
enumerable: true,
|
|
19
|
+
configurable: true,
|
|
20
|
+
writable: true,
|
|
21
|
+
value: void 0
|
|
22
|
+
});
|
|
23
|
+
Object.defineProperty(this, "verbose", {
|
|
24
|
+
enumerable: true,
|
|
25
|
+
configurable: true,
|
|
26
|
+
writable: true,
|
|
27
|
+
value: void 0
|
|
28
|
+
});
|
|
29
|
+
Object.defineProperty(this, "reflectionThreshold", {
|
|
30
|
+
enumerable: true,
|
|
31
|
+
configurable: true,
|
|
32
|
+
writable: true,
|
|
33
|
+
value: void 0
|
|
34
|
+
});
|
|
35
|
+
Object.defineProperty(this, "currentPlan", {
|
|
36
|
+
enumerable: true,
|
|
37
|
+
configurable: true,
|
|
38
|
+
writable: true,
|
|
39
|
+
value: []
|
|
40
|
+
});
|
|
41
|
+
Object.defineProperty(this, "importanceWeight", {
|
|
42
|
+
enumerable: true,
|
|
43
|
+
configurable: true,
|
|
44
|
+
writable: true,
|
|
45
|
+
value: 0.15
|
|
46
|
+
});
|
|
47
|
+
Object.defineProperty(this, "aggregateImportance", {
|
|
48
|
+
enumerable: true,
|
|
49
|
+
configurable: true,
|
|
50
|
+
writable: true,
|
|
51
|
+
value: 0.0
|
|
52
|
+
});
|
|
53
|
+
Object.defineProperty(this, "maxTokensLimit", {
|
|
54
|
+
enumerable: true,
|
|
55
|
+
configurable: true,
|
|
56
|
+
writable: true,
|
|
57
|
+
value: 1200
|
|
58
|
+
});
|
|
59
|
+
Object.defineProperty(this, "queriesKey", {
|
|
60
|
+
enumerable: true,
|
|
61
|
+
configurable: true,
|
|
62
|
+
writable: true,
|
|
63
|
+
value: "queries"
|
|
64
|
+
});
|
|
65
|
+
Object.defineProperty(this, "mostRecentMemoriesTokenKey", {
|
|
66
|
+
enumerable: true,
|
|
67
|
+
configurable: true,
|
|
68
|
+
writable: true,
|
|
69
|
+
value: "recent_memories_token"
|
|
70
|
+
});
|
|
71
|
+
Object.defineProperty(this, "addMemoryKey", {
|
|
72
|
+
enumerable: true,
|
|
73
|
+
configurable: true,
|
|
74
|
+
writable: true,
|
|
75
|
+
value: "addMemory"
|
|
76
|
+
});
|
|
77
|
+
Object.defineProperty(this, "relevantMemoriesKey", {
|
|
78
|
+
enumerable: true,
|
|
79
|
+
configurable: true,
|
|
80
|
+
writable: true,
|
|
81
|
+
value: "relevant_memories"
|
|
82
|
+
});
|
|
83
|
+
Object.defineProperty(this, "relevantMemoriesSimpleKey", {
|
|
84
|
+
enumerable: true,
|
|
85
|
+
configurable: true,
|
|
86
|
+
writable: true,
|
|
87
|
+
value: "relevant_memories_simple"
|
|
88
|
+
});
|
|
89
|
+
Object.defineProperty(this, "mostRecentMemoriesKey", {
|
|
90
|
+
enumerable: true,
|
|
91
|
+
configurable: true,
|
|
92
|
+
writable: true,
|
|
93
|
+
value: "most_recent_memories"
|
|
94
|
+
});
|
|
95
|
+
Object.defineProperty(this, "nowKey", {
|
|
96
|
+
enumerable: true,
|
|
97
|
+
configurable: true,
|
|
98
|
+
writable: true,
|
|
99
|
+
value: "now"
|
|
100
|
+
});
|
|
101
|
+
Object.defineProperty(this, "reflecting", {
|
|
102
|
+
enumerable: true,
|
|
103
|
+
configurable: true,
|
|
104
|
+
writable: true,
|
|
105
|
+
value: false
|
|
106
|
+
});
|
|
107
|
+
this.llm = llm;
|
|
108
|
+
this.memoryRetriever = memoryRetriever;
|
|
109
|
+
this.verbose = config?.verbose ?? this.verbose;
|
|
110
|
+
this.reflectionThreshold =
|
|
111
|
+
config?.reflectionThreshold ?? this.reflectionThreshold;
|
|
112
|
+
this.importanceWeight = config?.importanceWeight ?? this.importanceWeight;
|
|
113
|
+
this.maxTokensLimit = config?.maxTokensLimit ?? this.maxTokensLimit;
|
|
114
|
+
}
|
|
115
|
+
getRelevantMemoriesKey() {
|
|
116
|
+
return this.relevantMemoriesKey;
|
|
117
|
+
}
|
|
118
|
+
getMostRecentMemoriesTokenKey() {
|
|
119
|
+
return this.mostRecentMemoriesTokenKey;
|
|
120
|
+
}
|
|
121
|
+
getAddMemoryKey() {
|
|
122
|
+
return this.addMemoryKey;
|
|
123
|
+
}
|
|
124
|
+
getCurrentTimeKey() {
|
|
125
|
+
return this.nowKey;
|
|
126
|
+
}
|
|
127
|
+
get memoryKeys() {
|
|
128
|
+
// Return an array of memory keys
|
|
129
|
+
return [this.relevantMemoriesKey, this.mostRecentMemoriesKey];
|
|
130
|
+
}
|
|
131
|
+
chain(prompt) {
|
|
132
|
+
const chain = new llm_chain_js_1.LLMChain({
|
|
133
|
+
llm: this.llm,
|
|
134
|
+
prompt,
|
|
135
|
+
verbose: this.verbose,
|
|
136
|
+
outputKey: "output",
|
|
137
|
+
});
|
|
138
|
+
return chain;
|
|
139
|
+
}
|
|
140
|
+
static parseList(text) {
|
|
141
|
+
// parse a newine seperates string into a list of strings
|
|
142
|
+
return text.split("\n").map((s) => s.trim());
|
|
143
|
+
}
|
|
144
|
+
async getTopicsOfReflection(lastK = 50) {
|
|
145
|
+
const prompt = index_js_1.PromptTemplate.fromTemplate("{observations}\n\n" +
|
|
146
|
+
"Given only the information above, what are the 3 most salient" +
|
|
147
|
+
" high-level questions we can answer about the subjects in" +
|
|
148
|
+
" the statements? Provide each question on a new line.\n\n");
|
|
149
|
+
const observations = this.memoryRetriever.getMemoryStream().slice(-lastK);
|
|
150
|
+
const observationStr = observations
|
|
151
|
+
.map((o) => o.pageContent)
|
|
152
|
+
.join("\n");
|
|
153
|
+
const result = await this.chain(prompt).run(observationStr);
|
|
154
|
+
return GenerativeAgentMemory.parseList(result);
|
|
155
|
+
}
|
|
156
|
+
async getInsightsOnTopic(topic, now) {
|
|
157
|
+
// generate insights on a topic of reflection, based on pertinent memories
|
|
158
|
+
const prompt = index_js_1.PromptTemplate.fromTemplate("Statements about {topic}\n" +
|
|
159
|
+
"{related_statements}\n\n" +
|
|
160
|
+
"What 5 high-level insights can you infer from the above statements?" +
|
|
161
|
+
" (example format: insight (because of 1, 5, 3))");
|
|
162
|
+
const relatedMemories = await this.fetchMemories(topic, now);
|
|
163
|
+
const relatedStatements = relatedMemories
|
|
164
|
+
.map((memory, index) => `${index + 1}. ${memory.pageContent}`)
|
|
165
|
+
.join("\n");
|
|
166
|
+
const result = await this.chain(prompt).call({
|
|
167
|
+
topic,
|
|
168
|
+
relatedStatements,
|
|
169
|
+
});
|
|
170
|
+
return GenerativeAgentMemory.parseList(result.output); // added output
|
|
171
|
+
}
|
|
172
|
+
async pauseToReflect(now) {
|
|
173
|
+
if (this.verbose) {
|
|
174
|
+
console.log("Pausing to reflect...");
|
|
175
|
+
}
|
|
176
|
+
const newInsights = [];
|
|
177
|
+
const topics = await this.getTopicsOfReflection();
|
|
178
|
+
for (const topic of topics) {
|
|
179
|
+
const insights = await this.getInsightsOnTopic(topic, now);
|
|
180
|
+
for (const insight of insights) {
|
|
181
|
+
// add memory
|
|
182
|
+
await this.addMemory(insight, now);
|
|
183
|
+
}
|
|
184
|
+
newInsights.push(...insights);
|
|
185
|
+
}
|
|
186
|
+
return newInsights;
|
|
187
|
+
}
|
|
188
|
+
async scoreMemoryImportance(memoryContent) {
|
|
189
|
+
// score the absolute importance of a given memory
|
|
190
|
+
const prompt = index_js_1.PromptTemplate.fromTemplate("On the scale of 1 to 10, where 1 is purely mundane" +
|
|
191
|
+
" (e.g., brushing teeth, making bed) and 10 is" +
|
|
192
|
+
" extremely poignant (e.g., a break up, college" +
|
|
193
|
+
" acceptance), rate the likely poignancy of the" +
|
|
194
|
+
" following piece of memory. Respond with a single integer." +
|
|
195
|
+
"\nMemory: {memory_content}" +
|
|
196
|
+
"\nRating: ");
|
|
197
|
+
const score = await this.chain(prompt).run({
|
|
198
|
+
memoryContent,
|
|
199
|
+
});
|
|
200
|
+
const strippedScore = score.trim();
|
|
201
|
+
if (this.verbose) {
|
|
202
|
+
console.log("Importance score:", strippedScore);
|
|
203
|
+
}
|
|
204
|
+
const match = strippedScore.match(/^\D*(\d+)/);
|
|
205
|
+
if (match) {
|
|
206
|
+
const capturedNumber = parseFloat(match[1]);
|
|
207
|
+
const result = (capturedNumber / 10) * this.importanceWeight;
|
|
208
|
+
return result;
|
|
209
|
+
}
|
|
210
|
+
else {
|
|
211
|
+
return 0.0;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
async addMemory(memoryContent, now) {
|
|
215
|
+
// add an observation or memory to the agent's memory
|
|
216
|
+
const importanceScore = await this.scoreMemoryImportance(memoryContent);
|
|
217
|
+
this.aggregateImportance += importanceScore;
|
|
218
|
+
const document = new document_js_1.Document({
|
|
219
|
+
pageContent: memoryContent,
|
|
220
|
+
metadata: {
|
|
221
|
+
importance: importanceScore,
|
|
222
|
+
},
|
|
223
|
+
});
|
|
224
|
+
await this.memoryRetriever.addDocuments([document]);
|
|
225
|
+
// after an agent has processed a certain amoung of memories (as measured by aggregate importance),
|
|
226
|
+
// it is time to pause and reflect on recent events to add more synthesized memories to the agent's
|
|
227
|
+
// memory stream.
|
|
228
|
+
if (this.reflectionThreshold !== undefined &&
|
|
229
|
+
this.aggregateImportance > this.reflectionThreshold &&
|
|
230
|
+
!this.reflecting) {
|
|
231
|
+
this.reflecting = true;
|
|
232
|
+
await this.pauseToReflect(now);
|
|
233
|
+
this.aggregateImportance = 0.0;
|
|
234
|
+
this.reflecting = false;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
// TODO: Mock "now" to simulate different times
|
|
238
|
+
async fetchMemories(observation, _now) {
|
|
239
|
+
return this.memoryRetriever.getRelevantDocuments(observation);
|
|
240
|
+
}
|
|
241
|
+
formatMemoriesDetail(relevantMemories) {
|
|
242
|
+
if (!relevantMemories.length) {
|
|
243
|
+
return "No relevant information.";
|
|
244
|
+
}
|
|
245
|
+
const contentStrings = new Set();
|
|
246
|
+
const content = [];
|
|
247
|
+
for (const memory of relevantMemories) {
|
|
248
|
+
if (memory.pageContent in contentStrings) {
|
|
249
|
+
continue;
|
|
250
|
+
}
|
|
251
|
+
contentStrings.add(memory.pageContent);
|
|
252
|
+
const createdTime = memory.metadata.created_at.toLocaleString("en-US", {
|
|
253
|
+
month: "long",
|
|
254
|
+
day: "numeric",
|
|
255
|
+
year: "numeric",
|
|
256
|
+
hour: "numeric",
|
|
257
|
+
minute: "numeric",
|
|
258
|
+
hour12: true,
|
|
259
|
+
});
|
|
260
|
+
content.push(`${createdTime}: ${memory.pageContent.trim()}`);
|
|
261
|
+
}
|
|
262
|
+
const joinedContent = content.map((mem) => `${mem}`).join("\n");
|
|
263
|
+
return joinedContent;
|
|
264
|
+
}
|
|
265
|
+
formatMemoriesSimple(relevantMemories) {
|
|
266
|
+
const joinedContent = relevantMemories
|
|
267
|
+
.map((mem) => `${mem.pageContent}`)
|
|
268
|
+
.join("; ");
|
|
269
|
+
return joinedContent;
|
|
270
|
+
}
|
|
271
|
+
async getMemoriesUntilLimit(consumedTokens) {
|
|
272
|
+
// reduce the number of tokens in the documents
|
|
273
|
+
const result = [];
|
|
274
|
+
for (const doc of this.memoryRetriever
|
|
275
|
+
.getMemoryStream()
|
|
276
|
+
.slice()
|
|
277
|
+
.reverse()) {
|
|
278
|
+
if (consumedTokens >= this.maxTokensLimit) {
|
|
279
|
+
if (this.verbose) {
|
|
280
|
+
console.log("Exceeding max tokens for LLM, filtering memories");
|
|
281
|
+
}
|
|
282
|
+
break;
|
|
283
|
+
}
|
|
284
|
+
// eslint-disable-next-line no-param-reassign
|
|
285
|
+
consumedTokens += await this.llm.getNumTokens(doc.pageContent);
|
|
286
|
+
if (consumedTokens < this.maxTokensLimit) {
|
|
287
|
+
result.push(doc);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
return this.formatMemoriesSimple(result);
|
|
291
|
+
}
|
|
292
|
+
get memoryVariables() {
|
|
293
|
+
// input keys this memory class will load dynamically
|
|
294
|
+
return [];
|
|
295
|
+
}
|
|
296
|
+
async loadMemoryVariables(inputs) {
|
|
297
|
+
const queries = inputs[this.queriesKey];
|
|
298
|
+
const now = inputs[this.nowKey];
|
|
299
|
+
if (queries !== undefined) {
|
|
300
|
+
const relevantMemories = (await Promise.all(queries.map((query) => this.fetchMemories(query, now)))).flat();
|
|
301
|
+
return {
|
|
302
|
+
[this.relevantMemoriesKey]: this.formatMemoriesDetail(relevantMemories),
|
|
303
|
+
[this.relevantMemoriesSimpleKey]: this.formatMemoriesSimple(relevantMemories),
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
const mostRecentMemoriesToken = inputs[this.mostRecentMemoriesTokenKey];
|
|
307
|
+
if (mostRecentMemoriesToken !== undefined) {
|
|
308
|
+
return {
|
|
309
|
+
[this.mostRecentMemoriesKey]: await this.getMemoriesUntilLimit(mostRecentMemoriesToken),
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
return {};
|
|
313
|
+
}
|
|
314
|
+
async saveContext(_inputs, outputs) {
|
|
315
|
+
// save the context of this model run to memory
|
|
316
|
+
const mem = outputs[this.addMemoryKey];
|
|
317
|
+
const now = outputs[this.nowKey];
|
|
318
|
+
if (mem) {
|
|
319
|
+
await this.addMemory(mem, now);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
clear() {
|
|
323
|
+
// TODO: clear memory contents
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
exports.GenerativeAgentMemory = GenerativeAgentMemory;
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { LLMChain } from "../../chains/llm_chain.js";
|
|
2
|
+
import { PromptTemplate } from "../../prompts/index.js";
|
|
3
|
+
import { BaseLLM } from "../../llms/base.js";
|
|
4
|
+
import { Document } from "../../document.js";
|
|
5
|
+
import { TimeWeightedVectorStoreRetriever } from "../../retrievers/time_weighted.js";
|
|
6
|
+
import { BaseMemory, InputValues, OutputValues } from "../../memory/base.js";
|
|
7
|
+
export type GenerativeAgentMemoryConfig = {
|
|
8
|
+
reflectionThreshold?: number;
|
|
9
|
+
importanceWeight?: number;
|
|
10
|
+
verbose?: boolean;
|
|
11
|
+
maxTokensLimit?: number;
|
|
12
|
+
};
|
|
13
|
+
export declare class GenerativeAgentMemory extends BaseMemory {
|
|
14
|
+
llm: BaseLLM;
|
|
15
|
+
memoryRetriever: TimeWeightedVectorStoreRetriever;
|
|
16
|
+
verbose: boolean;
|
|
17
|
+
reflectionThreshold?: number;
|
|
18
|
+
currentPlan: string[];
|
|
19
|
+
importanceWeight: number;
|
|
20
|
+
private aggregateImportance;
|
|
21
|
+
private maxTokensLimit;
|
|
22
|
+
queriesKey: string;
|
|
23
|
+
mostRecentMemoriesTokenKey: string;
|
|
24
|
+
addMemoryKey: string;
|
|
25
|
+
relevantMemoriesKey: string;
|
|
26
|
+
relevantMemoriesSimpleKey: string;
|
|
27
|
+
mostRecentMemoriesKey: string;
|
|
28
|
+
nowKey: string;
|
|
29
|
+
reflecting: boolean;
|
|
30
|
+
constructor(llm: BaseLLM, memoryRetriever: TimeWeightedVectorStoreRetriever, config?: GenerativeAgentMemoryConfig);
|
|
31
|
+
getRelevantMemoriesKey(): string;
|
|
32
|
+
getMostRecentMemoriesTokenKey(): string;
|
|
33
|
+
getAddMemoryKey(): string;
|
|
34
|
+
getCurrentTimeKey(): string;
|
|
35
|
+
get memoryKeys(): string[];
|
|
36
|
+
chain(prompt: PromptTemplate): LLMChain;
|
|
37
|
+
static parseList(text: string): string[];
|
|
38
|
+
getTopicsOfReflection(lastK?: number): Promise<string[]>;
|
|
39
|
+
getInsightsOnTopic(topic: string, now?: Date): Promise<string[]>;
|
|
40
|
+
pauseToReflect(now?: Date): Promise<string[]>;
|
|
41
|
+
scoreMemoryImportance(memoryContent: string): Promise<number>;
|
|
42
|
+
addMemory(memoryContent: string, now?: Date): Promise<void>;
|
|
43
|
+
fetchMemories(observation: string, _now?: Date): Promise<Document[]>;
|
|
44
|
+
formatMemoriesDetail(relevantMemories: Document[]): string;
|
|
45
|
+
formatMemoriesSimple(relevantMemories: Document[]): string;
|
|
46
|
+
getMemoriesUntilLimit(consumedTokens: number): Promise<string>;
|
|
47
|
+
get memoryVariables(): string[];
|
|
48
|
+
loadMemoryVariables(inputs: InputValues): Promise<Record<string, string>>;
|
|
49
|
+
saveContext(_inputs: InputValues, outputs: OutputValues): Promise<void>;
|
|
50
|
+
clear(): void;
|
|
51
|
+
}
|
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import { LLMChain } from "../../chains/llm_chain.js";
|
|
2
|
+
import { PromptTemplate } from "../../prompts/index.js";
|
|
3
|
+
import { Document } from "../../document.js";
|
|
4
|
+
import { BaseMemory } from "../../memory/base.js";
|
|
5
|
+
export class GenerativeAgentMemory extends BaseMemory {
|
|
6
|
+
constructor(llm, memoryRetriever, config) {
|
|
7
|
+
super();
|
|
8
|
+
Object.defineProperty(this, "llm", {
|
|
9
|
+
enumerable: true,
|
|
10
|
+
configurable: true,
|
|
11
|
+
writable: true,
|
|
12
|
+
value: void 0
|
|
13
|
+
});
|
|
14
|
+
Object.defineProperty(this, "memoryRetriever", {
|
|
15
|
+
enumerable: true,
|
|
16
|
+
configurable: true,
|
|
17
|
+
writable: true,
|
|
18
|
+
value: void 0
|
|
19
|
+
});
|
|
20
|
+
Object.defineProperty(this, "verbose", {
|
|
21
|
+
enumerable: true,
|
|
22
|
+
configurable: true,
|
|
23
|
+
writable: true,
|
|
24
|
+
value: void 0
|
|
25
|
+
});
|
|
26
|
+
Object.defineProperty(this, "reflectionThreshold", {
|
|
27
|
+
enumerable: true,
|
|
28
|
+
configurable: true,
|
|
29
|
+
writable: true,
|
|
30
|
+
value: void 0
|
|
31
|
+
});
|
|
32
|
+
Object.defineProperty(this, "currentPlan", {
|
|
33
|
+
enumerable: true,
|
|
34
|
+
configurable: true,
|
|
35
|
+
writable: true,
|
|
36
|
+
value: []
|
|
37
|
+
});
|
|
38
|
+
Object.defineProperty(this, "importanceWeight", {
|
|
39
|
+
enumerable: true,
|
|
40
|
+
configurable: true,
|
|
41
|
+
writable: true,
|
|
42
|
+
value: 0.15
|
|
43
|
+
});
|
|
44
|
+
Object.defineProperty(this, "aggregateImportance", {
|
|
45
|
+
enumerable: true,
|
|
46
|
+
configurable: true,
|
|
47
|
+
writable: true,
|
|
48
|
+
value: 0.0
|
|
49
|
+
});
|
|
50
|
+
Object.defineProperty(this, "maxTokensLimit", {
|
|
51
|
+
enumerable: true,
|
|
52
|
+
configurable: true,
|
|
53
|
+
writable: true,
|
|
54
|
+
value: 1200
|
|
55
|
+
});
|
|
56
|
+
Object.defineProperty(this, "queriesKey", {
|
|
57
|
+
enumerable: true,
|
|
58
|
+
configurable: true,
|
|
59
|
+
writable: true,
|
|
60
|
+
value: "queries"
|
|
61
|
+
});
|
|
62
|
+
Object.defineProperty(this, "mostRecentMemoriesTokenKey", {
|
|
63
|
+
enumerable: true,
|
|
64
|
+
configurable: true,
|
|
65
|
+
writable: true,
|
|
66
|
+
value: "recent_memories_token"
|
|
67
|
+
});
|
|
68
|
+
Object.defineProperty(this, "addMemoryKey", {
|
|
69
|
+
enumerable: true,
|
|
70
|
+
configurable: true,
|
|
71
|
+
writable: true,
|
|
72
|
+
value: "addMemory"
|
|
73
|
+
});
|
|
74
|
+
Object.defineProperty(this, "relevantMemoriesKey", {
|
|
75
|
+
enumerable: true,
|
|
76
|
+
configurable: true,
|
|
77
|
+
writable: true,
|
|
78
|
+
value: "relevant_memories"
|
|
79
|
+
});
|
|
80
|
+
Object.defineProperty(this, "relevantMemoriesSimpleKey", {
|
|
81
|
+
enumerable: true,
|
|
82
|
+
configurable: true,
|
|
83
|
+
writable: true,
|
|
84
|
+
value: "relevant_memories_simple"
|
|
85
|
+
});
|
|
86
|
+
Object.defineProperty(this, "mostRecentMemoriesKey", {
|
|
87
|
+
enumerable: true,
|
|
88
|
+
configurable: true,
|
|
89
|
+
writable: true,
|
|
90
|
+
value: "most_recent_memories"
|
|
91
|
+
});
|
|
92
|
+
Object.defineProperty(this, "nowKey", {
|
|
93
|
+
enumerable: true,
|
|
94
|
+
configurable: true,
|
|
95
|
+
writable: true,
|
|
96
|
+
value: "now"
|
|
97
|
+
});
|
|
98
|
+
Object.defineProperty(this, "reflecting", {
|
|
99
|
+
enumerable: true,
|
|
100
|
+
configurable: true,
|
|
101
|
+
writable: true,
|
|
102
|
+
value: false
|
|
103
|
+
});
|
|
104
|
+
this.llm = llm;
|
|
105
|
+
this.memoryRetriever = memoryRetriever;
|
|
106
|
+
this.verbose = config?.verbose ?? this.verbose;
|
|
107
|
+
this.reflectionThreshold =
|
|
108
|
+
config?.reflectionThreshold ?? this.reflectionThreshold;
|
|
109
|
+
this.importanceWeight = config?.importanceWeight ?? this.importanceWeight;
|
|
110
|
+
this.maxTokensLimit = config?.maxTokensLimit ?? this.maxTokensLimit;
|
|
111
|
+
}
|
|
112
|
+
getRelevantMemoriesKey() {
|
|
113
|
+
return this.relevantMemoriesKey;
|
|
114
|
+
}
|
|
115
|
+
getMostRecentMemoriesTokenKey() {
|
|
116
|
+
return this.mostRecentMemoriesTokenKey;
|
|
117
|
+
}
|
|
118
|
+
getAddMemoryKey() {
|
|
119
|
+
return this.addMemoryKey;
|
|
120
|
+
}
|
|
121
|
+
getCurrentTimeKey() {
|
|
122
|
+
return this.nowKey;
|
|
123
|
+
}
|
|
124
|
+
get memoryKeys() {
|
|
125
|
+
// Return an array of memory keys
|
|
126
|
+
return [this.relevantMemoriesKey, this.mostRecentMemoriesKey];
|
|
127
|
+
}
|
|
128
|
+
chain(prompt) {
|
|
129
|
+
const chain = new LLMChain({
|
|
130
|
+
llm: this.llm,
|
|
131
|
+
prompt,
|
|
132
|
+
verbose: this.verbose,
|
|
133
|
+
outputKey: "output",
|
|
134
|
+
});
|
|
135
|
+
return chain;
|
|
136
|
+
}
|
|
137
|
+
static parseList(text) {
|
|
138
|
+
// parse a newine seperates string into a list of strings
|
|
139
|
+
return text.split("\n").map((s) => s.trim());
|
|
140
|
+
}
|
|
141
|
+
async getTopicsOfReflection(lastK = 50) {
|
|
142
|
+
const prompt = PromptTemplate.fromTemplate("{observations}\n\n" +
|
|
143
|
+
"Given only the information above, what are the 3 most salient" +
|
|
144
|
+
" high-level questions we can answer about the subjects in" +
|
|
145
|
+
" the statements? Provide each question on a new line.\n\n");
|
|
146
|
+
const observations = this.memoryRetriever.getMemoryStream().slice(-lastK);
|
|
147
|
+
const observationStr = observations
|
|
148
|
+
.map((o) => o.pageContent)
|
|
149
|
+
.join("\n");
|
|
150
|
+
const result = await this.chain(prompt).run(observationStr);
|
|
151
|
+
return GenerativeAgentMemory.parseList(result);
|
|
152
|
+
}
|
|
153
|
+
async getInsightsOnTopic(topic, now) {
|
|
154
|
+
// generate insights on a topic of reflection, based on pertinent memories
|
|
155
|
+
const prompt = PromptTemplate.fromTemplate("Statements about {topic}\n" +
|
|
156
|
+
"{related_statements}\n\n" +
|
|
157
|
+
"What 5 high-level insights can you infer from the above statements?" +
|
|
158
|
+
" (example format: insight (because of 1, 5, 3))");
|
|
159
|
+
const relatedMemories = await this.fetchMemories(topic, now);
|
|
160
|
+
const relatedStatements = relatedMemories
|
|
161
|
+
.map((memory, index) => `${index + 1}. ${memory.pageContent}`)
|
|
162
|
+
.join("\n");
|
|
163
|
+
const result = await this.chain(prompt).call({
|
|
164
|
+
topic,
|
|
165
|
+
relatedStatements,
|
|
166
|
+
});
|
|
167
|
+
return GenerativeAgentMemory.parseList(result.output); // added output
|
|
168
|
+
}
|
|
169
|
+
async pauseToReflect(now) {
|
|
170
|
+
if (this.verbose) {
|
|
171
|
+
console.log("Pausing to reflect...");
|
|
172
|
+
}
|
|
173
|
+
const newInsights = [];
|
|
174
|
+
const topics = await this.getTopicsOfReflection();
|
|
175
|
+
for (const topic of topics) {
|
|
176
|
+
const insights = await this.getInsightsOnTopic(topic, now);
|
|
177
|
+
for (const insight of insights) {
|
|
178
|
+
// add memory
|
|
179
|
+
await this.addMemory(insight, now);
|
|
180
|
+
}
|
|
181
|
+
newInsights.push(...insights);
|
|
182
|
+
}
|
|
183
|
+
return newInsights;
|
|
184
|
+
}
|
|
185
|
+
async scoreMemoryImportance(memoryContent) {
|
|
186
|
+
// score the absolute importance of a given memory
|
|
187
|
+
const prompt = PromptTemplate.fromTemplate("On the scale of 1 to 10, where 1 is purely mundane" +
|
|
188
|
+
" (e.g., brushing teeth, making bed) and 10 is" +
|
|
189
|
+
" extremely poignant (e.g., a break up, college" +
|
|
190
|
+
" acceptance), rate the likely poignancy of the" +
|
|
191
|
+
" following piece of memory. Respond with a single integer." +
|
|
192
|
+
"\nMemory: {memory_content}" +
|
|
193
|
+
"\nRating: ");
|
|
194
|
+
const score = await this.chain(prompt).run({
|
|
195
|
+
memoryContent,
|
|
196
|
+
});
|
|
197
|
+
const strippedScore = score.trim();
|
|
198
|
+
if (this.verbose) {
|
|
199
|
+
console.log("Importance score:", strippedScore);
|
|
200
|
+
}
|
|
201
|
+
const match = strippedScore.match(/^\D*(\d+)/);
|
|
202
|
+
if (match) {
|
|
203
|
+
const capturedNumber = parseFloat(match[1]);
|
|
204
|
+
const result = (capturedNumber / 10) * this.importanceWeight;
|
|
205
|
+
return result;
|
|
206
|
+
}
|
|
207
|
+
else {
|
|
208
|
+
return 0.0;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
async addMemory(memoryContent, now) {
|
|
212
|
+
// add an observation or memory to the agent's memory
|
|
213
|
+
const importanceScore = await this.scoreMemoryImportance(memoryContent);
|
|
214
|
+
this.aggregateImportance += importanceScore;
|
|
215
|
+
const document = new Document({
|
|
216
|
+
pageContent: memoryContent,
|
|
217
|
+
metadata: {
|
|
218
|
+
importance: importanceScore,
|
|
219
|
+
},
|
|
220
|
+
});
|
|
221
|
+
await this.memoryRetriever.addDocuments([document]);
|
|
222
|
+
// after an agent has processed a certain amoung of memories (as measured by aggregate importance),
|
|
223
|
+
// it is time to pause and reflect on recent events to add more synthesized memories to the agent's
|
|
224
|
+
// memory stream.
|
|
225
|
+
if (this.reflectionThreshold !== undefined &&
|
|
226
|
+
this.aggregateImportance > this.reflectionThreshold &&
|
|
227
|
+
!this.reflecting) {
|
|
228
|
+
this.reflecting = true;
|
|
229
|
+
await this.pauseToReflect(now);
|
|
230
|
+
this.aggregateImportance = 0.0;
|
|
231
|
+
this.reflecting = false;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
// TODO: Mock "now" to simulate different times
|
|
235
|
+
async fetchMemories(observation, _now) {
|
|
236
|
+
return this.memoryRetriever.getRelevantDocuments(observation);
|
|
237
|
+
}
|
|
238
|
+
formatMemoriesDetail(relevantMemories) {
|
|
239
|
+
if (!relevantMemories.length) {
|
|
240
|
+
return "No relevant information.";
|
|
241
|
+
}
|
|
242
|
+
const contentStrings = new Set();
|
|
243
|
+
const content = [];
|
|
244
|
+
for (const memory of relevantMemories) {
|
|
245
|
+
if (memory.pageContent in contentStrings) {
|
|
246
|
+
continue;
|
|
247
|
+
}
|
|
248
|
+
contentStrings.add(memory.pageContent);
|
|
249
|
+
const createdTime = memory.metadata.created_at.toLocaleString("en-US", {
|
|
250
|
+
month: "long",
|
|
251
|
+
day: "numeric",
|
|
252
|
+
year: "numeric",
|
|
253
|
+
hour: "numeric",
|
|
254
|
+
minute: "numeric",
|
|
255
|
+
hour12: true,
|
|
256
|
+
});
|
|
257
|
+
content.push(`${createdTime}: ${memory.pageContent.trim()}`);
|
|
258
|
+
}
|
|
259
|
+
const joinedContent = content.map((mem) => `${mem}`).join("\n");
|
|
260
|
+
return joinedContent;
|
|
261
|
+
}
|
|
262
|
+
formatMemoriesSimple(relevantMemories) {
|
|
263
|
+
const joinedContent = relevantMemories
|
|
264
|
+
.map((mem) => `${mem.pageContent}`)
|
|
265
|
+
.join("; ");
|
|
266
|
+
return joinedContent;
|
|
267
|
+
}
|
|
268
|
+
async getMemoriesUntilLimit(consumedTokens) {
|
|
269
|
+
// reduce the number of tokens in the documents
|
|
270
|
+
const result = [];
|
|
271
|
+
for (const doc of this.memoryRetriever
|
|
272
|
+
.getMemoryStream()
|
|
273
|
+
.slice()
|
|
274
|
+
.reverse()) {
|
|
275
|
+
if (consumedTokens >= this.maxTokensLimit) {
|
|
276
|
+
if (this.verbose) {
|
|
277
|
+
console.log("Exceeding max tokens for LLM, filtering memories");
|
|
278
|
+
}
|
|
279
|
+
break;
|
|
280
|
+
}
|
|
281
|
+
// eslint-disable-next-line no-param-reassign
|
|
282
|
+
consumedTokens += await this.llm.getNumTokens(doc.pageContent);
|
|
283
|
+
if (consumedTokens < this.maxTokensLimit) {
|
|
284
|
+
result.push(doc);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
return this.formatMemoriesSimple(result);
|
|
288
|
+
}
|
|
289
|
+
get memoryVariables() {
|
|
290
|
+
// input keys this memory class will load dynamically
|
|
291
|
+
return [];
|
|
292
|
+
}
|
|
293
|
+
async loadMemoryVariables(inputs) {
|
|
294
|
+
const queries = inputs[this.queriesKey];
|
|
295
|
+
const now = inputs[this.nowKey];
|
|
296
|
+
if (queries !== undefined) {
|
|
297
|
+
const relevantMemories = (await Promise.all(queries.map((query) => this.fetchMemories(query, now)))).flat();
|
|
298
|
+
return {
|
|
299
|
+
[this.relevantMemoriesKey]: this.formatMemoriesDetail(relevantMemories),
|
|
300
|
+
[this.relevantMemoriesSimpleKey]: this.formatMemoriesSimple(relevantMemories),
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
const mostRecentMemoriesToken = inputs[this.mostRecentMemoriesTokenKey];
|
|
304
|
+
if (mostRecentMemoriesToken !== undefined) {
|
|
305
|
+
return {
|
|
306
|
+
[this.mostRecentMemoriesKey]: await this.getMemoriesUntilLimit(mostRecentMemoriesToken),
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
return {};
|
|
310
|
+
}
|
|
311
|
+
async saveContext(_inputs, outputs) {
|
|
312
|
+
// save the context of this model run to memory
|
|
313
|
+
const mem = outputs[this.addMemoryKey];
|
|
314
|
+
const now = outputs[this.nowKey];
|
|
315
|
+
if (mem) {
|
|
316
|
+
await this.addMemory(mem, now);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
clear() {
|
|
320
|
+
// TODO: clear memory contents
|
|
321
|
+
}
|
|
322
|
+
}
|