@mastra/memory 0.3.0-alpha.7 → 0.3.0-alpha.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +13 -13
- package/CHANGELOG.md +22 -0
- package/dist/_tsup-dts-rollup.d.cts +1 -1
- package/dist/_tsup-dts-rollup.d.ts +1 -1
- package/dist/index.cjs +28 -13
- package/dist/index.js +28 -13
- package/dist/processors/index.cjs +3 -1
- package/dist/processors/index.js +3 -1
- package/package.json +2 -2
- package/src/index.ts +31 -16
- package/src/processors/index.test.ts +2 -2
- package/src/processors/token-limiter.ts +6 -3
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,29 +1,29 @@
|
|
|
1
1
|
|
|
2
|
-
> @mastra/memory@0.3.0-alpha.
|
|
2
|
+
> @mastra/memory@0.3.0-alpha.9 build /home/runner/work/mastra/mastra/packages/memory
|
|
3
3
|
> pnpm run check && tsup src/index.ts src/processors/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting
|
|
4
4
|
|
|
5
5
|
|
|
6
|
-
> @mastra/memory@0.3.0-alpha.
|
|
6
|
+
> @mastra/memory@0.3.0-alpha.9 check /home/runner/work/mastra/mastra/packages/memory
|
|
7
7
|
> tsc --noEmit
|
|
8
8
|
|
|
9
9
|
[34mCLI[39m Building entry: src/index.ts, src/processors/index.ts
|
|
10
10
|
[34mCLI[39m Using tsconfig: tsconfig.json
|
|
11
11
|
[34mCLI[39m tsup v8.4.0
|
|
12
12
|
[34mTSC[39m Build start
|
|
13
|
-
[32mTSC[39m ⚡️ Build success in
|
|
13
|
+
[32mTSC[39m ⚡️ Build success in 9622ms
|
|
14
14
|
[34mDTS[39m Build start
|
|
15
15
|
[34mCLI[39m Target: es2022
|
|
16
|
-
[34mCLI[39m Cleaning output folder
|
|
17
|
-
[34mESM[39m Build start
|
|
18
|
-
[34mCJS[39m Build start
|
|
19
16
|
Analysis will use the bundled TypeScript version 5.8.2
|
|
20
17
|
[36mWriting package typings: /home/runner/work/mastra/mastra/packages/memory/dist/_tsup-dts-rollup.d.ts[39m
|
|
21
18
|
Analysis will use the bundled TypeScript version 5.8.2
|
|
22
19
|
[36mWriting package typings: /home/runner/work/mastra/mastra/packages/memory/dist/_tsup-dts-rollup.d.cts[39m
|
|
23
|
-
[32mDTS[39m ⚡️ Build success in
|
|
24
|
-
[
|
|
25
|
-
[
|
|
26
|
-
[
|
|
27
|
-
[
|
|
28
|
-
[
|
|
29
|
-
[
|
|
20
|
+
[32mDTS[39m ⚡️ Build success in 10412ms
|
|
21
|
+
[34mCLI[39m Cleaning output folder
|
|
22
|
+
[34mESM[39m Build start
|
|
23
|
+
[34mCJS[39m Build start
|
|
24
|
+
[32mCJS[39m [1mdist/index.cjs [22m[32m18.02 KB[39m
|
|
25
|
+
[32mCJS[39m [1mdist/processors/index.cjs [22m[32m5.59 KB[39m
|
|
26
|
+
[32mCJS[39m ⚡️ Build success in 1142ms
|
|
27
|
+
[32mESM[39m [1mdist/index.js [22m[32m17.83 KB[39m
|
|
28
|
+
[32mESM[39m [1mdist/processors/index.js [22m[32m5.38 KB[39m
|
|
29
|
+
[32mESM[39m ⚡️ Build success in 1145ms
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,27 @@
|
|
|
1
1
|
# @mastra/memory
|
|
2
2
|
|
|
3
|
+
## 0.3.0-alpha.9
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 000a6d4: Fixed an issue where the TokenLimiter message processor was adding new messages into the remembered messages array
|
|
8
|
+
- Updated dependencies [000a6d4]
|
|
9
|
+
- Updated dependencies [ed2f549]
|
|
10
|
+
- Updated dependencies [c0f22b4]
|
|
11
|
+
- Updated dependencies [0a033fa]
|
|
12
|
+
- Updated dependencies [9c26508]
|
|
13
|
+
- Updated dependencies [0f4eae3]
|
|
14
|
+
- Updated dependencies [16a8648]
|
|
15
|
+
- @mastra/core@0.9.0-alpha.8
|
|
16
|
+
|
|
17
|
+
## 0.3.0-alpha.8
|
|
18
|
+
|
|
19
|
+
### Patch Changes
|
|
20
|
+
|
|
21
|
+
- 71d9444: updated savemessage to not use mutation when hiding working memory
|
|
22
|
+
- Updated dependencies [71d9444]
|
|
23
|
+
- @mastra/core@0.9.0-alpha.7
|
|
24
|
+
|
|
3
25
|
## 0.3.0-alpha.7
|
|
4
26
|
|
|
5
27
|
### Patch Changes
|
|
@@ -60,7 +60,7 @@ export declare class Memory extends MastraMemory {
|
|
|
60
60
|
messages: MessageType[];
|
|
61
61
|
memoryConfig?: MemoryConfig;
|
|
62
62
|
}): Promise<MessageType[]>;
|
|
63
|
-
protected
|
|
63
|
+
protected updateMessagesToHideWorkingMemory(messages: MessageType[]): MessageType[];
|
|
64
64
|
protected parseWorkingMemory(text: string): string | null;
|
|
65
65
|
getWorkingMemory({ threadId }: {
|
|
66
66
|
threadId: string;
|
|
@@ -60,7 +60,7 @@ export declare class Memory extends MastraMemory {
|
|
|
60
60
|
messages: MessageType[];
|
|
61
61
|
memoryConfig?: MemoryConfig;
|
|
62
62
|
}): Promise<MessageType[]>;
|
|
63
|
-
protected
|
|
63
|
+
protected updateMessagesToHideWorkingMemory(messages: MessageType[]): MessageType[];
|
|
64
64
|
protected parseWorkingMemory(text: string): string | null;
|
|
65
65
|
getWorkingMemory({ threadId }: {
|
|
66
66
|
threadId: string;
|
package/dist/index.cjs
CHANGED
|
@@ -289,13 +289,13 @@ var Memory = class extends memory.MastraMemory {
|
|
|
289
289
|
memoryConfig
|
|
290
290
|
}) {
|
|
291
291
|
await this.saveWorkingMemory(messages);
|
|
292
|
-
this.
|
|
292
|
+
const updatedMessages = this.updateMessagesToHideWorkingMemory(messages);
|
|
293
293
|
const config = this.getMergedThreadConfig(memoryConfig);
|
|
294
|
-
const result = this.storage.saveMessages({ messages });
|
|
294
|
+
const result = this.storage.saveMessages({ messages: updatedMessages });
|
|
295
295
|
if (this.vector && config.semanticRecall) {
|
|
296
296
|
let indexName;
|
|
297
297
|
await Promise.all(
|
|
298
|
-
|
|
298
|
+
updatedMessages.map(async (message) => {
|
|
299
299
|
if (typeof message.content !== `string` || message.content === "") return;
|
|
300
300
|
const { embeddings, chunks, dimension } = await this.embedMessageContent(message.content);
|
|
301
301
|
if (typeof indexName === `undefined`) {
|
|
@@ -320,22 +320,37 @@ var Memory = class extends memory.MastraMemory {
|
|
|
320
320
|
}
|
|
321
321
|
return result;
|
|
322
322
|
}
|
|
323
|
-
|
|
323
|
+
updateMessagesToHideWorkingMemory(messages) {
|
|
324
324
|
const workingMemoryRegex = /<working_memory>([^]*?)<\/working_memory>/g;
|
|
325
|
-
|
|
325
|
+
const updatedMessages = [];
|
|
326
|
+
for (const message of messages) {
|
|
326
327
|
if (typeof message?.content === `string`) {
|
|
327
|
-
|
|
328
|
+
updatedMessages.push({
|
|
329
|
+
...message,
|
|
330
|
+
content: message.content.replace(workingMemoryRegex, ``).trim()
|
|
331
|
+
});
|
|
328
332
|
} else if (Array.isArray(message?.content)) {
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
delete messages[index];
|
|
335
|
-
}
|
|
333
|
+
const contentIsWorkingMemory = message.content.some(
|
|
334
|
+
(content) => (content.type === `tool-call` || content.type === `tool-result`) && content.toolName === `updateWorkingMemory`
|
|
335
|
+
);
|
|
336
|
+
if (contentIsWorkingMemory) {
|
|
337
|
+
continue;
|
|
336
338
|
}
|
|
339
|
+
const newContent = message.content.map((content) => {
|
|
340
|
+
if (content.type === "text") {
|
|
341
|
+
return {
|
|
342
|
+
...content,
|
|
343
|
+
text: content.text.replace(workingMemoryRegex, "").trim()
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
return { ...content };
|
|
347
|
+
});
|
|
348
|
+
updatedMessages.push({ ...message, content: newContent });
|
|
349
|
+
} else {
|
|
350
|
+
updatedMessages.push({ ...message });
|
|
337
351
|
}
|
|
338
352
|
}
|
|
353
|
+
return updatedMessages;
|
|
339
354
|
}
|
|
340
355
|
parseWorkingMemory(text) {
|
|
341
356
|
if (!this.threadConfig.workingMemory?.enabled) return null;
|
package/dist/index.js
CHANGED
|
@@ -283,13 +283,13 @@ var Memory = class extends MastraMemory {
|
|
|
283
283
|
memoryConfig
|
|
284
284
|
}) {
|
|
285
285
|
await this.saveWorkingMemory(messages);
|
|
286
|
-
this.
|
|
286
|
+
const updatedMessages = this.updateMessagesToHideWorkingMemory(messages);
|
|
287
287
|
const config = this.getMergedThreadConfig(memoryConfig);
|
|
288
|
-
const result = this.storage.saveMessages({ messages });
|
|
288
|
+
const result = this.storage.saveMessages({ messages: updatedMessages });
|
|
289
289
|
if (this.vector && config.semanticRecall) {
|
|
290
290
|
let indexName;
|
|
291
291
|
await Promise.all(
|
|
292
|
-
|
|
292
|
+
updatedMessages.map(async (message) => {
|
|
293
293
|
if (typeof message.content !== `string` || message.content === "") return;
|
|
294
294
|
const { embeddings, chunks, dimension } = await this.embedMessageContent(message.content);
|
|
295
295
|
if (typeof indexName === `undefined`) {
|
|
@@ -314,22 +314,37 @@ var Memory = class extends MastraMemory {
|
|
|
314
314
|
}
|
|
315
315
|
return result;
|
|
316
316
|
}
|
|
317
|
-
|
|
317
|
+
updateMessagesToHideWorkingMemory(messages) {
|
|
318
318
|
const workingMemoryRegex = /<working_memory>([^]*?)<\/working_memory>/g;
|
|
319
|
-
|
|
319
|
+
const updatedMessages = [];
|
|
320
|
+
for (const message of messages) {
|
|
320
321
|
if (typeof message?.content === `string`) {
|
|
321
|
-
|
|
322
|
+
updatedMessages.push({
|
|
323
|
+
...message,
|
|
324
|
+
content: message.content.replace(workingMemoryRegex, ``).trim()
|
|
325
|
+
});
|
|
322
326
|
} else if (Array.isArray(message?.content)) {
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
delete messages[index];
|
|
329
|
-
}
|
|
327
|
+
const contentIsWorkingMemory = message.content.some(
|
|
328
|
+
(content) => (content.type === `tool-call` || content.type === `tool-result`) && content.toolName === `updateWorkingMemory`
|
|
329
|
+
);
|
|
330
|
+
if (contentIsWorkingMemory) {
|
|
331
|
+
continue;
|
|
330
332
|
}
|
|
333
|
+
const newContent = message.content.map((content) => {
|
|
334
|
+
if (content.type === "text") {
|
|
335
|
+
return {
|
|
336
|
+
...content,
|
|
337
|
+
text: content.text.replace(workingMemoryRegex, "").trim()
|
|
338
|
+
};
|
|
339
|
+
}
|
|
340
|
+
return { ...content };
|
|
341
|
+
});
|
|
342
|
+
updatedMessages.push({ ...message, content: newContent });
|
|
343
|
+
} else {
|
|
344
|
+
updatedMessages.push({ ...message });
|
|
331
345
|
}
|
|
332
346
|
}
|
|
347
|
+
return updatedMessages;
|
|
333
348
|
}
|
|
334
349
|
parseWorkingMemory(text) {
|
|
335
350
|
if (!this.threadConfig.workingMemory?.enabled) return null;
|
|
@@ -56,7 +56,9 @@ var TokenLimiter = class extends memory.MemoryProcessor {
|
|
|
56
56
|
if (!message) continue;
|
|
57
57
|
const messageTokens = this.countTokens(message);
|
|
58
58
|
if (totalTokens + messageTokens <= this.maxTokens) {
|
|
59
|
-
|
|
59
|
+
if (i < messages.length) {
|
|
60
|
+
result.unshift(message);
|
|
61
|
+
}
|
|
60
62
|
totalTokens += messageTokens;
|
|
61
63
|
} else {
|
|
62
64
|
this.logger.info(
|
package/dist/processors/index.js
CHANGED
|
@@ -50,7 +50,9 @@ var TokenLimiter = class extends MemoryProcessor {
|
|
|
50
50
|
if (!message) continue;
|
|
51
51
|
const messageTokens = this.countTokens(message);
|
|
52
52
|
if (totalTokens + messageTokens <= this.maxTokens) {
|
|
53
|
-
|
|
53
|
+
if (i < messages.length) {
|
|
54
|
+
result.unshift(message);
|
|
55
|
+
}
|
|
54
56
|
totalTokens += messageTokens;
|
|
55
57
|
} else {
|
|
56
58
|
this.logger.info(
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/memory",
|
|
3
|
-
"version": "0.3.0-alpha.
|
|
3
|
+
"version": "0.3.0-alpha.9",
|
|
4
4
|
"description": "",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -41,7 +41,7 @@
|
|
|
41
41
|
"redis": "^4.7.0",
|
|
42
42
|
"xxhash-wasm": "^1.1.0",
|
|
43
43
|
"zod": "^3.24.2",
|
|
44
|
-
"@mastra/core": "^0.9.0-alpha.
|
|
44
|
+
"@mastra/core": "^0.9.0-alpha.8"
|
|
45
45
|
},
|
|
46
46
|
"devDependencies": {
|
|
47
47
|
"@ai-sdk/openai": "^1.3.3",
|
package/src/index.ts
CHANGED
|
@@ -321,16 +321,16 @@ export class Memory extends MastraMemory {
|
|
|
321
321
|
await this.saveWorkingMemory(messages);
|
|
322
322
|
|
|
323
323
|
// Then strip working memory tags from all messages
|
|
324
|
-
this.
|
|
324
|
+
const updatedMessages = this.updateMessagesToHideWorkingMemory(messages);
|
|
325
325
|
|
|
326
326
|
const config = this.getMergedThreadConfig(memoryConfig);
|
|
327
327
|
|
|
328
|
-
const result = this.storage.saveMessages({ messages });
|
|
328
|
+
const result = this.storage.saveMessages({ messages: updatedMessages });
|
|
329
329
|
|
|
330
330
|
if (this.vector && config.semanticRecall) {
|
|
331
331
|
let indexName: Promise<string>;
|
|
332
332
|
await Promise.all(
|
|
333
|
-
|
|
333
|
+
updatedMessages.map(async message => {
|
|
334
334
|
if (typeof message.content !== `string` || message.content === '') return;
|
|
335
335
|
|
|
336
336
|
const { embeddings, chunks, dimension } = await this.embedMessageContent(message.content);
|
|
@@ -361,27 +361,42 @@ export class Memory extends MastraMemory {
|
|
|
361
361
|
return result;
|
|
362
362
|
}
|
|
363
363
|
|
|
364
|
-
protected
|
|
364
|
+
protected updateMessagesToHideWorkingMemory(messages: MessageType[]): MessageType[] {
|
|
365
365
|
const workingMemoryRegex = /<working_memory>([^]*?)<\/working_memory>/g;
|
|
366
366
|
|
|
367
|
-
|
|
367
|
+
const updatedMessages: MessageType[] = [];
|
|
368
|
+
|
|
369
|
+
for (const message of messages) {
|
|
368
370
|
if (typeof message?.content === `string`) {
|
|
369
|
-
|
|
371
|
+
updatedMessages.push({
|
|
372
|
+
...message,
|
|
373
|
+
content: message.content.replace(workingMemoryRegex, ``).trim(),
|
|
374
|
+
});
|
|
370
375
|
} else if (Array.isArray(message?.content)) {
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
content.text = content.text.replace(workingMemoryRegex, ``).trim();
|
|
374
|
-
}
|
|
375
|
-
|
|
376
|
-
if (
|
|
376
|
+
const contentIsWorkingMemory = message.content.some(
|
|
377
|
+
content =>
|
|
377
378
|
(content.type === `tool-call` || content.type === `tool-result`) &&
|
|
378
|
-
content.toolName === `updateWorkingMemory
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
379
|
+
content.toolName === `updateWorkingMemory`,
|
|
380
|
+
);
|
|
381
|
+
if (contentIsWorkingMemory) {
|
|
382
|
+
continue;
|
|
382
383
|
}
|
|
384
|
+
const newContent = message.content.map(content => {
|
|
385
|
+
if (content.type === 'text') {
|
|
386
|
+
return {
|
|
387
|
+
...content,
|
|
388
|
+
text: content.text.replace(workingMemoryRegex, '').trim(),
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
return { ...content };
|
|
392
|
+
}) as MessageType['content'];
|
|
393
|
+
updatedMessages.push({ ...message, content: newContent });
|
|
394
|
+
} else {
|
|
395
|
+
updatedMessages.push({ ...message });
|
|
383
396
|
}
|
|
384
397
|
}
|
|
398
|
+
|
|
399
|
+
return updatedMessages;
|
|
385
400
|
}
|
|
386
401
|
|
|
387
402
|
protected parseWorkingMemory(text: string): string | null {
|
|
@@ -90,7 +90,7 @@ describe('TokenLimiter', () => {
|
|
|
90
90
|
console.log(`Estimated ${estimate} tokens, used ${used} tokens.\n`, counts);
|
|
91
91
|
|
|
92
92
|
// Check if within 2% margin
|
|
93
|
-
expect(percentDifference(estimate, used)).toBeLessThanOrEqual(
|
|
93
|
+
expect(percentDifference(estimate, used)).toBeLessThanOrEqual(4);
|
|
94
94
|
}
|
|
95
95
|
|
|
96
96
|
const calculatorTool = createTool({
|
|
@@ -111,7 +111,7 @@ describe('TokenLimiter', () => {
|
|
|
111
111
|
tools: { calculatorTool },
|
|
112
112
|
});
|
|
113
113
|
|
|
114
|
-
describe.concurrent(`
|
|
114
|
+
describe.concurrent(`96% accuracy`, () => {
|
|
115
115
|
it(`20 messages, no tools`, async () => {
|
|
116
116
|
await expectTokenEstimate(
|
|
117
117
|
{
|
|
@@ -73,7 +73,7 @@ export class TokenLimiter extends MemoryProcessor {
|
|
|
73
73
|
|
|
74
74
|
const result: CoreMessage[] = [];
|
|
75
75
|
|
|
76
|
-
// Process messages in reverse (newest first)
|
|
76
|
+
// Process messages in reverse (newest first) so that we stop estimating tokens on old messages. Once we get to our limit of tokens there's no reason to keep processing older messages
|
|
77
77
|
for (let i = allMessages.length - 1; i >= 0; i--) {
|
|
78
78
|
const message = allMessages[i];
|
|
79
79
|
|
|
@@ -83,8 +83,11 @@ export class TokenLimiter extends MemoryProcessor {
|
|
|
83
83
|
const messageTokens = this.countTokens(message);
|
|
84
84
|
|
|
85
85
|
if (totalTokens + messageTokens <= this.maxTokens) {
|
|
86
|
-
// Insert at the beginning to maintain chronological order
|
|
87
|
-
|
|
86
|
+
// Insert at the beginning to maintain chronological order, but only if it's not a new message
|
|
87
|
+
if (i < messages.length) {
|
|
88
|
+
// less than messages.length because we're iterating in reverse. If the index is greater than messages.length it's a new message
|
|
89
|
+
result.unshift(message);
|
|
90
|
+
}
|
|
88
91
|
totalTokens += messageTokens;
|
|
89
92
|
} else {
|
|
90
93
|
this.logger.info(
|