@looopy-ai/aws 2.1.23 → 2.1.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { BedrockAgentCoreClient } from '@aws-sdk/client-bedrock-agentcore';
|
|
2
|
-
import type
|
|
2
|
+
import { type CompactionOptions, type CompactionResult, type LLMMessage, type MessageStore } from '@looopy-ai/core';
|
|
3
3
|
export interface AgentCoreMemoryMessageStoreConfig {
|
|
4
4
|
memoryId: string;
|
|
5
5
|
agentId: string;
|
|
@@ -7,13 +7,15 @@ export interface AgentCoreMemoryMessageStoreConfig {
|
|
|
7
7
|
client?: BedrockAgentCoreClient;
|
|
8
8
|
extractActorId?: (contextId: string) => string;
|
|
9
9
|
longTermMemoryNamespace?: string;
|
|
10
|
+
initialFetchLimit?: number;
|
|
10
11
|
}
|
|
11
12
|
export declare class AgentCoreMemoryMessageStore implements MessageStore {
|
|
12
13
|
private readonly memoryId;
|
|
13
14
|
private readonly actorId;
|
|
14
|
-
private readonly includeLongTermMemories;
|
|
15
15
|
private readonly longTermMemoryNamespace?;
|
|
16
16
|
private readonly client;
|
|
17
|
+
private readonly initialFetchLimit;
|
|
18
|
+
private readonly messages;
|
|
17
19
|
constructor(config: AgentCoreMemoryMessageStoreConfig);
|
|
18
20
|
append(contextId: string, messages: LLMMessage[]): Promise<void>;
|
|
19
21
|
getRecent(contextId: string, options?: {
|
|
@@ -25,12 +27,12 @@ export declare class AgentCoreMemoryMessageStore implements MessageStore {
|
|
|
25
27
|
getRange(contextId: string, startIndex: number, endIndex: number): Promise<LLMMessage[]>;
|
|
26
28
|
compact(_contextId: string, _options?: CompactionOptions): Promise<CompactionResult>;
|
|
27
29
|
clear(contextId: string): Promise<void>;
|
|
30
|
+
private loadCacheIfNeeded;
|
|
28
31
|
searchMemories(query: string, options?: {
|
|
29
32
|
maxResults?: number;
|
|
30
33
|
}): Promise<unknown[]>;
|
|
31
34
|
private convertEventsToMessages;
|
|
32
35
|
private retrieveLongTermMemories;
|
|
33
|
-
private formatLongTermMemories;
|
|
34
36
|
private toAgentCoreRole;
|
|
35
37
|
private fromAgentCoreRole;
|
|
36
38
|
}
|
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
import { BedrockAgentCoreClient, CreateEventCommand, DeleteEventCommand, ListEventsCommand, RetrieveMemoryRecordsCommand, } from '@aws-sdk/client-bedrock-agentcore';
|
|
2
|
-
import { trimToTokenBudget } from '@looopy-ai/core';
|
|
2
|
+
import { estimateTokens, trimToTokenBudget, } from '@looopy-ai/core';
|
|
3
3
|
export class AgentCoreMemoryMessageStore {
|
|
4
4
|
memoryId;
|
|
5
5
|
actorId;
|
|
6
|
-
includeLongTermMemories;
|
|
7
6
|
longTermMemoryNamespace;
|
|
8
7
|
client;
|
|
8
|
+
initialFetchLimit;
|
|
9
|
+
messages = new Map();
|
|
9
10
|
constructor(config) {
|
|
10
11
|
this.memoryId = config.memoryId;
|
|
11
12
|
this.actorId = config.agentId;
|
|
12
|
-
this.includeLongTermMemories = !!config.longTermMemoryNamespace;
|
|
13
13
|
this.longTermMemoryNamespace = config.longTermMemoryNamespace;
|
|
14
|
+
this.initialFetchLimit = config.initialFetchLimit ?? 50;
|
|
14
15
|
this.client =
|
|
15
16
|
config.client ||
|
|
16
17
|
new BedrockAgentCoreClient({
|
|
@@ -18,6 +19,17 @@ export class AgentCoreMemoryMessageStore {
|
|
|
18
19
|
});
|
|
19
20
|
}
|
|
20
21
|
async append(contextId, messages) {
|
|
22
|
+
const stored = this.messages.get(contextId) || [];
|
|
23
|
+
const nextIndex = stored.length;
|
|
24
|
+
const newMessages = messages.map((msg, i) => ({
|
|
25
|
+
...msg,
|
|
26
|
+
id: `msg_${contextId}_${nextIndex + i}`,
|
|
27
|
+
contextId,
|
|
28
|
+
index: nextIndex + i,
|
|
29
|
+
timestamp: new Date().toISOString(),
|
|
30
|
+
tokens: estimateTokens(msg.content),
|
|
31
|
+
}));
|
|
32
|
+
this.messages.set(contextId, [...stored, ...newMessages]);
|
|
21
33
|
for (const message of messages) {
|
|
22
34
|
const command = new CreateEventCommand({
|
|
23
35
|
memoryId: this.memoryId,
|
|
@@ -50,37 +62,27 @@ export class AgentCoreMemoryMessageStore {
|
|
|
50
62
|
}
|
|
51
63
|
}
|
|
52
64
|
async getRecent(contextId, options) {
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
const response = await this.client.send(command);
|
|
60
|
-
const messages = this.convertEventsToMessages(response.events ?? []);
|
|
61
|
-
if (this.includeLongTermMemories && messages.length > 0) {
|
|
62
|
-
const longTerm = await this.retrieveLongTermMemories(this.actorId, 'relevant context');
|
|
63
|
-
if (longTerm.length > 0) {
|
|
64
|
-
messages.unshift({
|
|
65
|
-
role: 'system',
|
|
66
|
-
content: this.formatLongTermMemories(longTerm),
|
|
67
|
-
});
|
|
68
|
-
}
|
|
65
|
+
await this.loadCacheIfNeeded(contextId, options?.maxMessages);
|
|
66
|
+
const all = this.messages.get(contextId) || [];
|
|
67
|
+
const { maxMessages = 50, maxTokens } = options || {};
|
|
68
|
+
let messages = all.slice(-maxMessages);
|
|
69
|
+
if (maxTokens) {
|
|
70
|
+
messages = trimToTokenBudget(messages, maxTokens);
|
|
69
71
|
}
|
|
70
|
-
|
|
71
|
-
return trimToTokenBudget(messages, options.maxTokens);
|
|
72
|
-
}
|
|
73
|
-
return messages;
|
|
72
|
+
return messages.slice();
|
|
74
73
|
}
|
|
75
74
|
async getAll(contextId) {
|
|
76
|
-
|
|
75
|
+
await this.loadCacheIfNeeded(contextId);
|
|
76
|
+
return (this.messages.get(contextId) || []).slice();
|
|
77
77
|
}
|
|
78
78
|
async getCount(contextId) {
|
|
79
|
-
|
|
79
|
+
await this.loadCacheIfNeeded(contextId);
|
|
80
|
+
const messages = this.messages.get(contextId) || [];
|
|
80
81
|
return messages.length;
|
|
81
82
|
}
|
|
82
83
|
async getRange(contextId, startIndex, endIndex) {
|
|
83
|
-
|
|
84
|
+
await this.loadCacheIfNeeded(contextId, endIndex);
|
|
85
|
+
const all = this.messages.get(contextId) || [];
|
|
84
86
|
return all.slice(startIndex, endIndex);
|
|
85
87
|
}
|
|
86
88
|
async compact(_contextId, _options) {
|
|
@@ -91,6 +93,7 @@ export class AgentCoreMemoryMessageStore {
|
|
|
91
93
|
};
|
|
92
94
|
}
|
|
93
95
|
async clear(contextId) {
|
|
96
|
+
this.messages.delete(contextId);
|
|
94
97
|
const list = await this.client.send(new ListEventsCommand({
|
|
95
98
|
memoryId: this.memoryId,
|
|
96
99
|
actorId: this.actorId,
|
|
@@ -108,6 +111,23 @@ export class AgentCoreMemoryMessageStore {
|
|
|
108
111
|
}));
|
|
109
112
|
}
|
|
110
113
|
}
|
|
114
|
+
async loadCacheIfNeeded(contextId, requested) {
|
|
115
|
+
const existing = this.messages.get(contextId);
|
|
116
|
+
if (existing) {
|
|
117
|
+
return existing;
|
|
118
|
+
}
|
|
119
|
+
const maxResults = requested ?? this.initialFetchLimit;
|
|
120
|
+
const command = new ListEventsCommand({
|
|
121
|
+
memoryId: this.memoryId,
|
|
122
|
+
actorId: this.actorId,
|
|
123
|
+
sessionId: contextId,
|
|
124
|
+
maxResults,
|
|
125
|
+
});
|
|
126
|
+
const response = await this.client.send(command);
|
|
127
|
+
const messages = this.convertEventsToMessages(response.events ?? []);
|
|
128
|
+
this.messages.set(contextId, messages);
|
|
129
|
+
return messages;
|
|
130
|
+
}
|
|
111
131
|
async searchMemories(query, options) {
|
|
112
132
|
return this.retrieveLongTermMemories(this.actorId, query, options?.maxResults ?? 10);
|
|
113
133
|
}
|
|
@@ -118,7 +138,8 @@ export class AgentCoreMemoryMessageStore {
|
|
|
118
138
|
const dateB = b.eventTimestamp?.getTime() ?? 0;
|
|
119
139
|
return dateA - dateB;
|
|
120
140
|
});
|
|
121
|
-
for (
|
|
141
|
+
for (let i = 0; i < events.length; i++) {
|
|
142
|
+
const event = events[i];
|
|
122
143
|
const message = { role: 'assistant', content: '' };
|
|
123
144
|
for (const payload of event.payload ?? []) {
|
|
124
145
|
if (payload.conversational) {
|
|
@@ -133,7 +154,15 @@ export class AgentCoreMemoryMessageStore {
|
|
|
133
154
|
message.toolCalls = blob.toolCalls;
|
|
134
155
|
}
|
|
135
156
|
}
|
|
136
|
-
|
|
157
|
+
const storedMessage = {
|
|
158
|
+
...message,
|
|
159
|
+
id: event.eventId ?? `event_${i}`,
|
|
160
|
+
contextId: event.sessionId ?? '',
|
|
161
|
+
index: i,
|
|
162
|
+
timestamp: event.eventTimestamp?.toISOString() ?? new Date().toISOString(),
|
|
163
|
+
tokens: estimateTokens(message.content),
|
|
164
|
+
};
|
|
165
|
+
messages.push(storedMessage);
|
|
137
166
|
}
|
|
138
167
|
return messages;
|
|
139
168
|
}
|
|
@@ -149,16 +178,6 @@ export class AgentCoreMemoryMessageStore {
|
|
|
149
178
|
const response = await this.client.send(command);
|
|
150
179
|
return response.memoryRecordSummaries ?? [];
|
|
151
180
|
}
|
|
152
|
-
formatLongTermMemories(memories) {
|
|
153
|
-
if (memories.length === 0) {
|
|
154
|
-
return '';
|
|
155
|
-
}
|
|
156
|
-
const lines = memories.map((record) => {
|
|
157
|
-
const data = record;
|
|
158
|
-
return `- ${String(data.content || data.memory || JSON.stringify(record))}`;
|
|
159
|
-
});
|
|
160
|
-
return `Relevant context from previous sessions:\n${lines.join('\n')}`;
|
|
161
|
-
}
|
|
162
181
|
toAgentCoreRole(role) {
|
|
163
182
|
switch (role) {
|
|
164
183
|
case 'user':
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@looopy-ai/aws",
|
|
3
|
-
"version": "2.1.
|
|
3
|
+
"version": "2.1.25",
|
|
4
4
|
"description": "AWS storage and providers for Looopy AI",
|
|
5
5
|
"repository": {
|
|
6
6
|
"url": "https://github.com/looopy-ai/lib"
|
|
@@ -39,23 +39,23 @@
|
|
|
39
39
|
"author": "",
|
|
40
40
|
"license": "MIT",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@aws-sdk/client-bedrock-agentcore": "^3.
|
|
43
|
-
"@aws-sdk/client-dynamodb": "^3.
|
|
44
|
-
"@aws-sdk/client-secrets-manager": "^3.
|
|
45
|
-
"@aws-sdk/lib-dynamodb": "^3.
|
|
46
|
-
"@hono/node-server": "^1.19.
|
|
42
|
+
"@aws-sdk/client-bedrock-agentcore": "^3.971.0",
|
|
43
|
+
"@aws-sdk/client-dynamodb": "^3.971.0",
|
|
44
|
+
"@aws-sdk/client-secrets-manager": "^3.971.0",
|
|
45
|
+
"@aws-sdk/lib-dynamodb": "^3.971.0",
|
|
46
|
+
"@hono/node-server": "^1.19.9",
|
|
47
47
|
"@opentelemetry/exporter-metrics-otlp-http": "^0.207.0",
|
|
48
48
|
"@opentelemetry/exporter-trace-otlp-http": "^0.207.0",
|
|
49
49
|
"@opentelemetry/instrumentation": "^0.207.0",
|
|
50
|
-
"@opentelemetry/resources": "^2.
|
|
51
|
-
"@opentelemetry/sdk-metrics": "^2.
|
|
52
|
-
"@opentelemetry/sdk-trace-base": "^2.
|
|
53
|
-
"@opentelemetry/sdk-trace-node": "^2.
|
|
54
|
-
"@opentelemetry/semantic-conventions": "^1.
|
|
55
|
-
"@smithy/types": "^4.
|
|
50
|
+
"@opentelemetry/resources": "^2.4.0",
|
|
51
|
+
"@opentelemetry/sdk-metrics": "^2.4.0",
|
|
52
|
+
"@opentelemetry/sdk-trace-base": "^2.4.0",
|
|
53
|
+
"@opentelemetry/sdk-trace-node": "^2.4.0",
|
|
54
|
+
"@opentelemetry/semantic-conventions": "^1.39.0",
|
|
55
|
+
"@smithy/types": "^4.12.0",
|
|
56
56
|
"hono": "^4.10.5",
|
|
57
57
|
"pino-http": "^11.0.0",
|
|
58
|
-
"@looopy-ai/core": "2.1.
|
|
58
|
+
"@looopy-ai/core": "2.1.22"
|
|
59
59
|
},
|
|
60
60
|
"publishConfig": {
|
|
61
61
|
"access": "public"
|