@looopy-ai/aws 2.1.22 → 2.1.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,6 +7,7 @@ export interface AgentCoreMemoryMessageStoreConfig {
7
7
  client?: BedrockAgentCoreClient;
8
8
  extractActorId?: (contextId: string) => string;
9
9
  longTermMemoryNamespace?: string;
10
+ initialFetchLimit?: number;
10
11
  }
11
12
  export declare class AgentCoreMemoryMessageStore implements MessageStore {
12
13
  private readonly memoryId;
@@ -14,6 +15,8 @@ export declare class AgentCoreMemoryMessageStore implements MessageStore {
14
15
  private readonly includeLongTermMemories;
15
16
  private readonly longTermMemoryNamespace?;
16
17
  private readonly client;
18
+ private readonly initialFetchLimit;
19
+ private readonly cache;
17
20
  constructor(config: AgentCoreMemoryMessageStoreConfig);
18
21
  append(contextId: string, messages: LLMMessage[]): Promise<void>;
19
22
  getRecent(contextId: string, options?: {
@@ -25,6 +28,8 @@ export declare class AgentCoreMemoryMessageStore implements MessageStore {
25
28
  getRange(contextId: string, startIndex: number, endIndex: number): Promise<LLMMessage[]>;
26
29
  compact(_contextId: string, _options?: CompactionOptions): Promise<CompactionResult>;
27
30
  clear(contextId: string): Promise<void>;
31
+ private ensureCache;
32
+ private loadCacheIfNeeded;
28
33
  searchMemories(query: string, options?: {
29
34
  maxResults?: number;
30
35
  }): Promise<unknown[]>;
@@ -6,11 +6,14 @@ export class AgentCoreMemoryMessageStore {
6
6
  includeLongTermMemories;
7
7
  longTermMemoryNamespace;
8
8
  client;
9
+ initialFetchLimit;
10
+ cache = new Map();
9
11
  constructor(config) {
10
12
  this.memoryId = config.memoryId;
11
13
  this.actorId = config.agentId;
12
14
  this.includeLongTermMemories = !!config.longTermMemoryNamespace;
13
15
  this.longTermMemoryNamespace = config.longTermMemoryNamespace;
16
+ this.initialFetchLimit = config.initialFetchLimit ?? 500;
14
17
  this.client =
15
18
  config.client ||
16
19
  new BedrockAgentCoreClient({
@@ -18,7 +21,9 @@ export class AgentCoreMemoryMessageStore {
18
21
  });
19
22
  }
20
23
  async append(contextId, messages) {
24
+ const cache = this.ensureCache(contextId);
21
25
  for (const message of messages) {
26
+ cache.push(message);
22
27
  const command = new CreateEventCommand({
23
28
  memoryId: this.memoryId,
24
29
  actorId: this.actorId,
@@ -50,14 +55,8 @@ export class AgentCoreMemoryMessageStore {
50
55
  }
51
56
  }
52
57
  async getRecent(contextId, options) {
53
- const command = new ListEventsCommand({
54
- memoryId: this.memoryId,
55
- actorId: this.actorId,
56
- sessionId: contextId,
57
- maxResults: options?.maxMessages ?? 50,
58
- });
59
- const response = await this.client.send(command);
60
- const messages = this.convertEventsToMessages(response.events ?? []);
58
+ const cache = await this.loadCacheIfNeeded(contextId, options?.maxMessages);
59
+ const messages = options?.maxMessages ? cache.slice(-options.maxMessages) : cache.slice();
61
60
  if (this.includeLongTermMemories && messages.length > 0) {
62
61
  const longTerm = await this.retrieveLongTermMemories(this.actorId, 'relevant context');
63
62
  if (longTerm.length > 0) {
@@ -70,18 +69,19 @@ export class AgentCoreMemoryMessageStore {
70
69
  if (options?.maxTokens) {
71
70
  return trimToTokenBudget(messages, options.maxTokens);
72
71
  }
73
- return messages;
72
+ return messages.slice();
74
73
  }
75
74
  async getAll(contextId) {
76
- return this.getRecent(contextId, { maxMessages: 1000 });
75
+ const cache = await this.loadCacheIfNeeded(contextId);
76
+ return cache.slice();
77
77
  }
78
78
  async getCount(contextId) {
79
- const messages = await this.getRecent(contextId);
80
- return messages.length;
79
+ const cache = await this.loadCacheIfNeeded(contextId);
80
+ return cache.length;
81
81
  }
82
82
  async getRange(contextId, startIndex, endIndex) {
83
- const all = await this.getAll(contextId);
84
- return all.slice(startIndex, endIndex);
83
+ const cache = await this.loadCacheIfNeeded(contextId, endIndex);
84
+ return cache.slice(startIndex, endIndex);
85
85
  }
86
86
  async compact(_contextId, _options) {
87
87
  return {
@@ -91,6 +91,7 @@ export class AgentCoreMemoryMessageStore {
91
91
  };
92
92
  }
93
93
  async clear(contextId) {
94
+ this.cache.delete(contextId);
94
95
  const list = await this.client.send(new ListEventsCommand({
95
96
  memoryId: this.memoryId,
96
97
  actorId: this.actorId,
@@ -108,6 +109,29 @@ export class AgentCoreMemoryMessageStore {
108
109
  }));
109
110
  }
110
111
  }
112
+ ensureCache(contextId) {
113
+ if (!this.cache.has(contextId)) {
114
+ this.cache.set(contextId, []);
115
+ }
116
+ return this.cache.get(contextId);
117
+ }
118
+ async loadCacheIfNeeded(contextId, requested) {
119
+ const existing = this.cache.get(contextId);
120
+ if (existing) {
121
+ return existing;
122
+ }
123
+ const maxResults = requested ?? this.initialFetchLimit;
124
+ const command = new ListEventsCommand({
125
+ memoryId: this.memoryId,
126
+ actorId: this.actorId,
127
+ sessionId: contextId,
128
+ maxResults,
129
+ });
130
+ const response = await this.client.send(command);
131
+ const messages = this.convertEventsToMessages(response.events ?? []);
132
+ this.cache.set(contextId, messages);
133
+ return messages;
134
+ }
111
135
  async searchMemories(query, options) {
112
136
  return this.retrieveLongTermMemories(this.actorId, query, options?.maxResults ?? 10);
113
137
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@looopy-ai/aws",
3
- "version": "2.1.22",
3
+ "version": "2.1.24",
4
4
  "description": "AWS storage and providers for Looopy AI",
5
5
  "repository": {
6
6
  "url": "https://github.com/looopy-ai/lib"
@@ -55,7 +55,7 @@
55
55
  "@smithy/types": "^4.9.0",
56
56
  "hono": "^4.10.5",
57
57
  "pino-http": "^11.0.0",
58
- "@looopy-ai/core": "2.1.19"
58
+ "@looopy-ai/core": "2.1.21"
59
59
  },
60
60
  "publishConfig": {
61
61
  "access": "public"