family-ai-agent 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. package/.env.example +49 -0
  2. package/README.md +161 -0
  3. package/dist/cli/index.d.ts +3 -0
  4. package/dist/cli/index.d.ts.map +1 -0
  5. package/dist/cli/index.js +336 -0
  6. package/dist/cli/index.js.map +1 -0
  7. package/dist/config/index.d.ts +37 -0
  8. package/dist/config/index.d.ts.map +1 -0
  9. package/dist/config/index.js +68 -0
  10. package/dist/config/index.js.map +1 -0
  11. package/dist/config/models.d.ts +17 -0
  12. package/dist/config/models.d.ts.map +1 -0
  13. package/dist/config/models.js +128 -0
  14. package/dist/config/models.js.map +1 -0
  15. package/dist/core/agents/agent-factory.d.ts +31 -0
  16. package/dist/core/agents/agent-factory.d.ts.map +1 -0
  17. package/dist/core/agents/agent-factory.js +151 -0
  18. package/dist/core/agents/agent-factory.js.map +1 -0
  19. package/dist/core/agents/base-agent.d.ts +51 -0
  20. package/dist/core/agents/base-agent.d.ts.map +1 -0
  21. package/dist/core/agents/base-agent.js +245 -0
  22. package/dist/core/agents/base-agent.js.map +1 -0
  23. package/dist/core/agents/index.d.ts +8 -0
  24. package/dist/core/agents/index.d.ts.map +1 -0
  25. package/dist/core/agents/index.js +9 -0
  26. package/dist/core/agents/index.js.map +1 -0
  27. package/dist/core/agents/personalities/automation.d.ts +14 -0
  28. package/dist/core/agents/personalities/automation.d.ts.map +1 -0
  29. package/dist/core/agents/personalities/automation.js +146 -0
  30. package/dist/core/agents/personalities/automation.js.map +1 -0
  31. package/dist/core/agents/personalities/chat.d.ts +10 -0
  32. package/dist/core/agents/personalities/chat.d.ts.map +1 -0
  33. package/dist/core/agents/personalities/chat.js +132 -0
  34. package/dist/core/agents/personalities/chat.js.map +1 -0
  35. package/dist/core/agents/personalities/coding.d.ts +16 -0
  36. package/dist/core/agents/personalities/coding.d.ts.map +1 -0
  37. package/dist/core/agents/personalities/coding.js +166 -0
  38. package/dist/core/agents/personalities/coding.js.map +1 -0
  39. package/dist/core/agents/personalities/research.d.ts +13 -0
  40. package/dist/core/agents/personalities/research.d.ts.map +1 -0
  41. package/dist/core/agents/personalities/research.js +133 -0
  42. package/dist/core/agents/personalities/research.js.map +1 -0
  43. package/dist/core/agents/types.d.ts +102 -0
  44. package/dist/core/agents/types.d.ts.map +1 -0
  45. package/dist/core/agents/types.js +2 -0
  46. package/dist/core/agents/types.js.map +1 -0
  47. package/dist/core/orchestrator/graph.d.ts +118 -0
  48. package/dist/core/orchestrator/graph.d.ts.map +1 -0
  49. package/dist/core/orchestrator/graph.js +233 -0
  50. package/dist/core/orchestrator/graph.js.map +1 -0
  51. package/dist/database/client.d.ts +19 -0
  52. package/dist/database/client.d.ts.map +1 -0
  53. package/dist/database/client.js +95 -0
  54. package/dist/database/client.js.map +1 -0
  55. package/dist/index.d.ts +41 -0
  56. package/dist/index.d.ts.map +1 -0
  57. package/dist/index.js +67 -0
  58. package/dist/index.js.map +1 -0
  59. package/dist/llm/openrouter-client.d.ts +45 -0
  60. package/dist/llm/openrouter-client.d.ts.map +1 -0
  61. package/dist/llm/openrouter-client.js +155 -0
  62. package/dist/llm/openrouter-client.js.map +1 -0
  63. package/dist/memory/conversation/index.d.ts +37 -0
  64. package/dist/memory/conversation/index.d.ts.map +1 -0
  65. package/dist/memory/conversation/index.js +196 -0
  66. package/dist/memory/conversation/index.js.map +1 -0
  67. package/dist/memory/index.d.ts +4 -0
  68. package/dist/memory/index.d.ts.map +1 -0
  69. package/dist/memory/index.js +5 -0
  70. package/dist/memory/index.js.map +1 -0
  71. package/dist/memory/knowledge-base/index.d.ts +51 -0
  72. package/dist/memory/knowledge-base/index.d.ts.map +1 -0
  73. package/dist/memory/knowledge-base/index.js +222 -0
  74. package/dist/memory/knowledge-base/index.js.map +1 -0
  75. package/dist/memory/longterm/vector-store.d.ts +44 -0
  76. package/dist/memory/longterm/vector-store.d.ts.map +1 -0
  77. package/dist/memory/longterm/vector-store.js +229 -0
  78. package/dist/memory/longterm/vector-store.js.map +1 -0
  79. package/dist/safety/audit-logger.d.ts +68 -0
  80. package/dist/safety/audit-logger.d.ts.map +1 -0
  81. package/dist/safety/audit-logger.js +215 -0
  82. package/dist/safety/audit-logger.js.map +1 -0
  83. package/dist/safety/guardrails/input-guardrail.d.ts +21 -0
  84. package/dist/safety/guardrails/input-guardrail.d.ts.map +1 -0
  85. package/dist/safety/guardrails/input-guardrail.js +145 -0
  86. package/dist/safety/guardrails/input-guardrail.js.map +1 -0
  87. package/dist/safety/guardrails/output-guardrail.d.ts +18 -0
  88. package/dist/safety/guardrails/output-guardrail.d.ts.map +1 -0
  89. package/dist/safety/guardrails/output-guardrail.js +125 -0
  90. package/dist/safety/guardrails/output-guardrail.js.map +1 -0
  91. package/dist/safety/index.d.ts +4 -0
  92. package/dist/safety/index.d.ts.map +1 -0
  93. package/dist/safety/index.js +5 -0
  94. package/dist/safety/index.js.map +1 -0
  95. package/dist/utils/errors.d.ts +36 -0
  96. package/dist/utils/errors.d.ts.map +1 -0
  97. package/dist/utils/errors.js +94 -0
  98. package/dist/utils/errors.js.map +1 -0
  99. package/dist/utils/logger.d.ts +8 -0
  100. package/dist/utils/logger.d.ts.map +1 -0
  101. package/dist/utils/logger.js +47 -0
  102. package/dist/utils/logger.js.map +1 -0
  103. package/docker/init-db.sql +149 -0
  104. package/docker/sandbox/Dockerfile.sandbox +29 -0
  105. package/docker-compose.yml +61 -0
  106. package/package.json +80 -0
  107. package/src/cli/index.ts +392 -0
  108. package/src/config/index.ts +85 -0
  109. package/src/config/models.ts +156 -0
  110. package/src/core/agents/agent-factory.ts +192 -0
  111. package/src/core/agents/base-agent.ts +333 -0
  112. package/src/core/agents/index.ts +27 -0
  113. package/src/core/agents/personalities/automation.ts +202 -0
  114. package/src/core/agents/personalities/chat.ts +159 -0
  115. package/src/core/agents/personalities/coding.ts +227 -0
  116. package/src/core/agents/personalities/research.ts +177 -0
  117. package/src/core/agents/types.ts +124 -0
  118. package/src/core/orchestrator/graph.ts +305 -0
  119. package/src/database/client.ts +109 -0
  120. package/src/index.ts +104 -0
  121. package/src/llm/openrouter-client.ts +218 -0
  122. package/src/memory/conversation/index.ts +313 -0
  123. package/src/memory/index.ts +23 -0
  124. package/src/memory/knowledge-base/index.ts +357 -0
  125. package/src/memory/longterm/vector-store.ts +364 -0
  126. package/src/safety/audit-logger.ts +357 -0
  127. package/src/safety/guardrails/input-guardrail.ts +191 -0
  128. package/src/safety/guardrails/output-guardrail.ts +160 -0
  129. package/src/safety/index.ts +21 -0
  130. package/src/utils/errors.ts +120 -0
  131. package/src/utils/logger.ts +74 -0
  132. package/tsconfig.json +37 -0
@@ -0,0 +1,357 @@
1
+ import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
2
+ import { query, transaction } from '../../database/client.js';
3
+ import { getOpenRouterClient } from '../../llm/openrouter-client.js';
4
+ import { createLogger, logMemoryOperation } from '../../utils/logger.js';
5
+ import { MemoryError } from '../../utils/errors.js';
6
+
7
+ const logger = createLogger('KnowledgeBase');
8
+
9
+ export interface Document {
10
+ id: string;
11
+ userId?: string;
12
+ filename: string;
13
+ fileType?: string;
14
+ fileSize?: number;
15
+ content?: string;
16
+ metadata: Record<string, unknown>;
17
+ createdAt: Date;
18
+ }
19
+
20
+ export interface DocumentChunk {
21
+ id: string;
22
+ documentId: string;
23
+ chunkIndex: number;
24
+ content: string;
25
+ metadata: Record<string, unknown>;
26
+ }
27
+
28
+ export interface SearchResult {
29
+ documentId: string;
30
+ chunkId: string;
31
+ content: string;
32
+ similarity: number;
33
+ metadata: Record<string, unknown>;
34
+ filename: string;
35
+ }
36
+
37
+ export class KnowledgeBase {
38
+ private client = getOpenRouterClient();
39
+ private textSplitter = new RecursiveCharacterTextSplitter({
40
+ chunkSize: 1000,
41
+ chunkOverlap: 200,
42
+ separators: ['\n\n', '\n', '. ', ' ', ''],
43
+ });
44
+
45
+ // Add a document to the knowledge base
46
+ async addDocument(
47
+ content: string,
48
+ filename: string,
49
+ options: {
50
+ userId?: string;
51
+ fileType?: string;
52
+ fileSize?: number;
53
+ metadata?: Record<string, unknown>;
54
+ } = {}
55
+ ): Promise<string> {
56
+ try {
57
+ // Split content into chunks
58
+ const chunks = await this.textSplitter.splitText(content);
59
+ logger.debug('Document split into chunks', {
60
+ filename,
61
+ chunkCount: chunks.length,
62
+ });
63
+
64
+ // Generate embeddings for all chunks
65
+ const embeddings = await this.client.embed(chunks);
66
+
67
+ // Store document and chunks in transaction
68
+ const documentId = await transaction(async (client) => {
69
+ // Insert document
70
+ const docResult = await client.query<{ id: string }>(
71
+ `INSERT INTO documents (user_id, filename, file_type, file_size, content, metadata)
72
+ VALUES ($1, $2, $3, $4, $5, $6)
73
+ RETURNING id`,
74
+ [
75
+ options.userId ?? null,
76
+ filename,
77
+ options.fileType ?? null,
78
+ options.fileSize ?? null,
79
+ content,
80
+ JSON.stringify(options.metadata ?? {}),
81
+ ]
82
+ );
83
+
84
+ const docId = docResult.rows[0]?.id;
85
+ if (!docId) {
86
+ throw new MemoryError('Failed to insert document');
87
+ }
88
+
89
+ // Insert chunks with embeddings
90
+ for (let i = 0; i < chunks.length; i++) {
91
+ const chunk = chunks[i]!;
92
+ const embedding = embeddings[i]!;
93
+
94
+ await client.query(
95
+ `INSERT INTO document_chunks (document_id, chunk_index, content, embedding, metadata)
96
+ VALUES ($1, $2, $3, $4, $5)`,
97
+ [
98
+ docId,
99
+ i,
100
+ chunk,
101
+ `[${embedding.join(',')}]`,
102
+ JSON.stringify({ chunkIndex: i, totalChunks: chunks.length }),
103
+ ]
104
+ );
105
+ }
106
+
107
+ return docId;
108
+ });
109
+
110
+ logMemoryOperation('write', 'knowledge_base', {
111
+ documentId,
112
+ filename,
113
+ chunkCount: chunks.length,
114
+ });
115
+
116
+ return documentId;
117
+ } catch (error) {
118
+ logger.error('Failed to add document', { error, filename });
119
+ throw new MemoryError(
120
+ `Failed to add document: ${error instanceof Error ? error.message : 'Unknown error'}`
121
+ );
122
+ }
123
+ }
124
+
125
+ // Search the knowledge base
126
+ async search(
127
+ queryText: string,
128
+ options: {
129
+ userId?: string;
130
+ limit?: number;
131
+ minSimilarity?: number;
132
+ } = {}
133
+ ): Promise<SearchResult[]> {
134
+ const { userId, limit = 5, minSimilarity = 0.7 } = options;
135
+
136
+ try {
137
+ // Generate query embedding
138
+ const queryEmbedding = await this.client.embedQuery(queryText);
139
+
140
+ // Build query
141
+ let sql = `
142
+ SELECT
143
+ dc.id as chunk_id,
144
+ dc.document_id,
145
+ dc.content,
146
+ dc.metadata as chunk_metadata,
147
+ d.filename,
148
+ d.metadata as doc_metadata,
149
+ 1 - (dc.embedding <=> $1::vector) as similarity
150
+ FROM document_chunks dc
151
+ JOIN documents d ON d.id = dc.document_id
152
+ WHERE 1 - (dc.embedding <=> $1::vector) >= $2
153
+ `;
154
+ const params: unknown[] = [`[${queryEmbedding.join(',')}]`, minSimilarity];
155
+ let paramIndex = 3;
156
+
157
+ if (userId) {
158
+ sql += ` AND d.user_id = $${paramIndex}`;
159
+ params.push(userId);
160
+ paramIndex++;
161
+ }
162
+
163
+ sql += ` ORDER BY similarity DESC LIMIT $${paramIndex}`;
164
+ params.push(limit);
165
+
166
+ const result = await query<{
167
+ chunk_id: string;
168
+ document_id: string;
169
+ content: string;
170
+ chunk_metadata: Record<string, unknown>;
171
+ filename: string;
172
+ doc_metadata: Record<string, unknown>;
173
+ similarity: number;
174
+ }>(sql, params);
175
+
176
+ const searchResults: SearchResult[] = result.rows.map((row) => ({
177
+ chunkId: row.chunk_id,
178
+ documentId: row.document_id,
179
+ content: row.content,
180
+ similarity: row.similarity,
181
+ metadata: { ...row.doc_metadata, ...row.chunk_metadata },
182
+ filename: row.filename,
183
+ }));
184
+
185
+ logMemoryOperation('search', 'knowledge_base', {
186
+ queryLength: queryText.length,
187
+ resultsCount: searchResults.length,
188
+ });
189
+
190
+ return searchResults;
191
+ } catch (error) {
192
+ logger.error('Failed to search knowledge base', { error });
193
+ throw new MemoryError(
194
+ `Failed to search knowledge base: ${error instanceof Error ? error.message : 'Unknown error'}`
195
+ );
196
+ }
197
+ }
198
+
199
+ // Get document by ID
200
+ async getDocument(id: string): Promise<Document | null> {
201
+ try {
202
+ const result = await query<{
203
+ id: string;
204
+ user_id: string | null;
205
+ filename: string;
206
+ file_type: string | null;
207
+ file_size: number | null;
208
+ content: string | null;
209
+ metadata: Record<string, unknown>;
210
+ created_at: Date;
211
+ }>(
212
+ `SELECT id, user_id, filename, file_type, file_size, content, metadata, created_at
213
+ FROM documents WHERE id = $1`,
214
+ [id]
215
+ );
216
+
217
+ if (result.rows.length === 0) {
218
+ return null;
219
+ }
220
+
221
+ const row = result.rows[0]!;
222
+ return {
223
+ id: row.id,
224
+ userId: row.user_id ?? undefined,
225
+ filename: row.filename,
226
+ fileType: row.file_type ?? undefined,
227
+ fileSize: row.file_size ?? undefined,
228
+ content: row.content ?? undefined,
229
+ metadata: row.metadata,
230
+ createdAt: row.created_at,
231
+ };
232
+ } catch (error) {
233
+ logger.error('Failed to get document', { error, id });
234
+ throw new MemoryError(
235
+ `Failed to get document: ${error instanceof Error ? error.message : 'Unknown error'}`
236
+ );
237
+ }
238
+ }
239
+
240
+ // List documents for a user
241
+ async listDocuments(
242
+ userId: string,
243
+ options: { limit?: number; offset?: number } = {}
244
+ ): Promise<Document[]> {
245
+ const { limit = 20, offset = 0 } = options;
246
+
247
+ try {
248
+ const result = await query<{
249
+ id: string;
250
+ user_id: string | null;
251
+ filename: string;
252
+ file_type: string | null;
253
+ file_size: number | null;
254
+ metadata: Record<string, unknown>;
255
+ created_at: Date;
256
+ }>(
257
+ `SELECT id, user_id, filename, file_type, file_size, metadata, created_at
258
+ FROM documents
259
+ WHERE user_id = $1
260
+ ORDER BY created_at DESC
261
+ LIMIT $2 OFFSET $3`,
262
+ [userId, limit, offset]
263
+ );
264
+
265
+ return result.rows.map((row) => ({
266
+ id: row.id,
267
+ userId: row.user_id ?? undefined,
268
+ filename: row.filename,
269
+ fileType: row.file_type ?? undefined,
270
+ fileSize: row.file_size ?? undefined,
271
+ metadata: row.metadata,
272
+ createdAt: row.created_at,
273
+ }));
274
+ } catch (error) {
275
+ logger.error('Failed to list documents', { error });
276
+ throw new MemoryError(
277
+ `Failed to list documents: ${error instanceof Error ? error.message : 'Unknown error'}`
278
+ );
279
+ }
280
+ }
281
+
282
+ // Delete document and its chunks
283
+ async deleteDocument(id: string): Promise<void> {
284
+ try {
285
+ await query('DELETE FROM documents WHERE id = $1', [id]);
286
+ logMemoryOperation('write', 'knowledge_base', {
287
+ documentId: id,
288
+ action: 'delete',
289
+ });
290
+ } catch (error) {
291
+ logger.error('Failed to delete document', { error, id });
292
+ throw new MemoryError(
293
+ `Failed to delete document: ${error instanceof Error ? error.message : 'Unknown error'}`
294
+ );
295
+ }
296
+ }
297
+
298
+ // Get chunks for a document
299
+ async getDocumentChunks(documentId: string): Promise<DocumentChunk[]> {
300
+ try {
301
+ const result = await query<{
302
+ id: string;
303
+ document_id: string;
304
+ chunk_index: number;
305
+ content: string;
306
+ metadata: Record<string, unknown>;
307
+ }>(
308
+ `SELECT id, document_id, chunk_index, content, metadata
309
+ FROM document_chunks
310
+ WHERE document_id = $1
311
+ ORDER BY chunk_index ASC`,
312
+ [documentId]
313
+ );
314
+
315
+ return result.rows.map((row) => ({
316
+ id: row.id,
317
+ documentId: row.document_id,
318
+ chunkIndex: row.chunk_index,
319
+ content: row.content,
320
+ metadata: row.metadata,
321
+ }));
322
+ } catch (error) {
323
+ logger.error('Failed to get document chunks', { error, documentId });
324
+ throw new MemoryError(
325
+ `Failed to get document chunks: ${error instanceof Error ? error.message : 'Unknown error'}`
326
+ );
327
+ }
328
+ }
329
+
330
+ // Get document count for user
331
+ async getDocumentCount(userId: string): Promise<number> {
332
+ try {
333
+ const result = await query<{ count: string }>(
334
+ 'SELECT COUNT(*) as count FROM documents WHERE user_id = $1',
335
+ [userId]
336
+ );
337
+ return parseInt(result.rows[0]?.count ?? '0', 10);
338
+ } catch (error) {
339
+ logger.error('Failed to get document count', { error });
340
+ throw new MemoryError(
341
+ `Failed to get document count: ${error instanceof Error ? error.message : 'Unknown error'}`
342
+ );
343
+ }
344
+ }
345
+ }
346
+
347
+ // Singleton instance
348
+ let kbInstance: KnowledgeBase | null = null;
349
+
350
+ export function getKnowledgeBase(): KnowledgeBase {
351
+ if (!kbInstance) {
352
+ kbInstance = new KnowledgeBase();
353
+ }
354
+ return kbInstance;
355
+ }
356
+
357
+ export default KnowledgeBase;
@@ -0,0 +1,364 @@
1
+ import { query, transaction } from '../../database/client.js';
2
+ import { getOpenRouterClient } from '../../llm/openrouter-client.js';
3
+ import { createLogger, logMemoryOperation } from '../../utils/logger.js';
4
+ import { MemoryError } from '../../utils/errors.js';
5
+ import type { RetrievedMemory } from '../../core/agents/types.js';
6
+
7
+ const logger = createLogger('VectorStore');
8
+
9
+ export type MemoryType = 'semantic' | 'episodic' | 'procedural';
10
+
11
+ export interface MemoryEntry {
12
+ id: string;
13
+ userId?: string;
14
+ memoryType: MemoryType;
15
+ content: string;
16
+ importance: number;
17
+ metadata?: Record<string, unknown>;
18
+ createdAt: Date;
19
+ updatedAt: Date;
20
+ }
21
+
22
+ export interface SearchOptions {
23
+ userId?: string;
24
+ memoryType?: MemoryType;
25
+ limit?: number;
26
+ minSimilarity?: number;
27
+ }
28
+
29
+ export class VectorStore {
30
+ private client = getOpenRouterClient();
31
+
32
+ // Store a memory with embedding
33
+ async store(
34
+ content: string,
35
+ memoryType: MemoryType,
36
+ options: {
37
+ userId?: string;
38
+ importance?: number;
39
+ metadata?: Record<string, unknown>;
40
+ } = {}
41
+ ): Promise<string> {
42
+ try {
43
+ // Generate embedding
44
+ const embedding = await this.client.embedQuery(content);
45
+
46
+ // Insert into database
47
+ const result = await query<{ id: string }>(
48
+ `INSERT INTO long_term_memories
49
+ (user_id, memory_type, content, embedding, importance, metadata)
50
+ VALUES ($1, $2, $3, $4, $5, $6)
51
+ RETURNING id`,
52
+ [
53
+ options.userId ?? null,
54
+ memoryType,
55
+ content,
56
+ `[${embedding.join(',')}]`,
57
+ options.importance ?? 0.5,
58
+ JSON.stringify(options.metadata ?? {}),
59
+ ]
60
+ );
61
+
62
+ const memoryId = result.rows[0]?.id;
63
+ if (!memoryId) {
64
+ throw new MemoryError('Failed to store memory');
65
+ }
66
+
67
+ logMemoryOperation('write', 'long_term', {
68
+ memoryId,
69
+ memoryType,
70
+ contentLength: content.length,
71
+ });
72
+
73
+ return memoryId;
74
+ } catch (error) {
75
+ logger.error('Failed to store memory', { error });
76
+ throw new MemoryError(
77
+ `Failed to store memory: ${error instanceof Error ? error.message : 'Unknown error'}`
78
+ );
79
+ }
80
+ }
81
+
82
+ // Search for similar memories
83
+ async search(
84
+ queryText: string,
85
+ options: SearchOptions = {}
86
+ ): Promise<RetrievedMemory[]> {
87
+ const {
88
+ userId,
89
+ memoryType,
90
+ limit = 5,
91
+ minSimilarity = 0.7,
92
+ } = options;
93
+
94
+ try {
95
+ // Generate query embedding
96
+ const queryEmbedding = await this.client.embedQuery(queryText);
97
+
98
+ // Build the query with filters
99
+ let sql = `
100
+ SELECT
101
+ id,
102
+ memory_type,
103
+ content,
104
+ metadata,
105
+ 1 - (embedding <=> $1::vector) as similarity
106
+ FROM long_term_memories
107
+ WHERE 1 - (embedding <=> $1::vector) >= $2
108
+ `;
109
+ const params: unknown[] = [`[${queryEmbedding.join(',')}]`, minSimilarity];
110
+ let paramIndex = 3;
111
+
112
+ if (userId) {
113
+ sql += ` AND user_id = $${paramIndex}`;
114
+ params.push(userId);
115
+ paramIndex++;
116
+ }
117
+
118
+ if (memoryType) {
119
+ sql += ` AND memory_type = $${paramIndex}`;
120
+ params.push(memoryType);
121
+ paramIndex++;
122
+ }
123
+
124
+ sql += ` ORDER BY similarity DESC LIMIT $${paramIndex}`;
125
+ params.push(limit);
126
+
127
+ const result = await query<{
128
+ id: string;
129
+ memory_type: MemoryType;
130
+ content: string;
131
+ metadata: Record<string, unknown>;
132
+ similarity: number;
133
+ }>(sql, params);
134
+
135
+ // Update access count for retrieved memories
136
+ if (result.rows.length > 0) {
137
+ const ids = result.rows.map((r) => r.id);
138
+ await query(
139
+ `UPDATE long_term_memories
140
+ SET access_count = access_count + 1, last_accessed = NOW()
141
+ WHERE id = ANY($1)`,
142
+ [ids]
143
+ );
144
+ }
145
+
146
+ const memories: RetrievedMemory[] = result.rows.map((row) => ({
147
+ id: row.id,
148
+ type: row.memory_type,
149
+ content: row.content,
150
+ relevanceScore: row.similarity,
151
+ metadata: row.metadata,
152
+ }));
153
+
154
+ logMemoryOperation('search', 'long_term', {
155
+ queryLength: queryText.length,
156
+ resultsCount: memories.length,
157
+ });
158
+
159
+ return memories;
160
+ } catch (error) {
161
+ logger.error('Failed to search memories', { error });
162
+ throw new MemoryError(
163
+ `Failed to search memories: ${error instanceof Error ? error.message : 'Unknown error'}`
164
+ );
165
+ }
166
+ }
167
+
168
+ // Get memory by ID
169
+ async get(id: string): Promise<MemoryEntry | null> {
170
+ try {
171
+ const result = await query<{
172
+ id: string;
173
+ user_id: string | null;
174
+ memory_type: MemoryType;
175
+ content: string;
176
+ importance: number;
177
+ metadata: Record<string, unknown>;
178
+ created_at: Date;
179
+ updated_at: Date;
180
+ }>(
181
+ `SELECT id, user_id, memory_type, content, importance, metadata, created_at, updated_at
182
+ FROM long_term_memories WHERE id = $1`,
183
+ [id]
184
+ );
185
+
186
+ if (result.rows.length === 0) {
187
+ return null;
188
+ }
189
+
190
+ const row = result.rows[0]!;
191
+ return {
192
+ id: row.id,
193
+ userId: row.user_id ?? undefined,
194
+ memoryType: row.memory_type,
195
+ content: row.content,
196
+ importance: row.importance,
197
+ metadata: row.metadata,
198
+ createdAt: row.created_at,
199
+ updatedAt: row.updated_at,
200
+ };
201
+ } catch (error) {
202
+ logger.error('Failed to get memory', { error, id });
203
+ throw new MemoryError(
204
+ `Failed to get memory: ${error instanceof Error ? error.message : 'Unknown error'}`
205
+ );
206
+ }
207
+ }
208
+
209
+ // Update memory content
210
+ async update(
211
+ id: string,
212
+ content: string,
213
+ options: { importance?: number; metadata?: Record<string, unknown> } = {}
214
+ ): Promise<void> {
215
+ try {
216
+ // Re-generate embedding for new content
217
+ const embedding = await this.client.embedQuery(content);
218
+
219
+ await query(
220
+ `UPDATE long_term_memories
221
+ SET content = $2, embedding = $3, importance = COALESCE($4, importance),
222
+ metadata = COALESCE($5, metadata), updated_at = NOW()
223
+ WHERE id = $1`,
224
+ [
225
+ id,
226
+ content,
227
+ `[${embedding.join(',')}]`,
228
+ options.importance ?? null,
229
+ options.metadata ? JSON.stringify(options.metadata) : null,
230
+ ]
231
+ );
232
+
233
+ logMemoryOperation('write', 'long_term', { memoryId: id, action: 'update' });
234
+ } catch (error) {
235
+ logger.error('Failed to update memory', { error, id });
236
+ throw new MemoryError(
237
+ `Failed to update memory: ${error instanceof Error ? error.message : 'Unknown error'}`
238
+ );
239
+ }
240
+ }
241
+
242
+ // Delete memory
243
+ async delete(id: string): Promise<void> {
244
+ try {
245
+ await query('DELETE FROM long_term_memories WHERE id = $1', [id]);
246
+ logMemoryOperation('write', 'long_term', { memoryId: id, action: 'delete' });
247
+ } catch (error) {
248
+ logger.error('Failed to delete memory', { error, id });
249
+ throw new MemoryError(
250
+ `Failed to delete memory: ${error instanceof Error ? error.message : 'Unknown error'}`
251
+ );
252
+ }
253
+ }
254
+
255
+ // Bulk store memories
256
+ async bulkStore(
257
+ entries: Array<{
258
+ content: string;
259
+ memoryType: MemoryType;
260
+ userId?: string;
261
+ importance?: number;
262
+ metadata?: Record<string, unknown>;
263
+ }>
264
+ ): Promise<string[]> {
265
+ try {
266
+ // Generate embeddings for all entries
267
+ const embeddings = await this.client.embed(entries.map((e) => e.content));
268
+
269
+ // Use transaction for bulk insert
270
+ const ids = await transaction(async (client) => {
271
+ const results: string[] = [];
272
+
273
+ for (let i = 0; i < entries.length; i++) {
274
+ const entry = entries[i]!;
275
+ const embedding = embeddings[i]!;
276
+
277
+ const result = await client.query<{ id: string }>(
278
+ `INSERT INTO long_term_memories
279
+ (user_id, memory_type, content, embedding, importance, metadata)
280
+ VALUES ($1, $2, $3, $4, $5, $6)
281
+ RETURNING id`,
282
+ [
283
+ entry.userId ?? null,
284
+ entry.memoryType,
285
+ entry.content,
286
+ `[${embedding.join(',')}]`,
287
+ entry.importance ?? 0.5,
288
+ JSON.stringify(entry.metadata ?? {}),
289
+ ]
290
+ );
291
+
292
+ if (result.rows[0]) {
293
+ results.push(result.rows[0].id);
294
+ }
295
+ }
296
+
297
+ return results;
298
+ });
299
+
300
+ logMemoryOperation('write', 'long_term', {
301
+ action: 'bulk_store',
302
+ count: ids.length,
303
+ });
304
+
305
+ return ids;
306
+ } catch (error) {
307
+ logger.error('Failed to bulk store memories', { error });
308
+ throw new MemoryError(
309
+ `Failed to bulk store memories: ${error instanceof Error ? error.message : 'Unknown error'}`
310
+ );
311
+ }
312
+ }
313
+
314
+ // Get recent memories for a user
315
+ async getRecent(userId: string, limit: number = 10): Promise<MemoryEntry[]> {
316
+ try {
317
+ const result = await query<{
318
+ id: string;
319
+ user_id: string | null;
320
+ memory_type: MemoryType;
321
+ content: string;
322
+ importance: number;
323
+ metadata: Record<string, unknown>;
324
+ created_at: Date;
325
+ updated_at: Date;
326
+ }>(
327
+ `SELECT id, user_id, memory_type, content, importance, metadata, created_at, updated_at
328
+ FROM long_term_memories
329
+ WHERE user_id = $1
330
+ ORDER BY created_at DESC
331
+ LIMIT $2`,
332
+ [userId, limit]
333
+ );
334
+
335
+ return result.rows.map((row) => ({
336
+ id: row.id,
337
+ userId: row.user_id ?? undefined,
338
+ memoryType: row.memory_type,
339
+ content: row.content,
340
+ importance: row.importance,
341
+ metadata: row.metadata,
342
+ createdAt: row.created_at,
343
+ updatedAt: row.updated_at,
344
+ }));
345
+ } catch (error) {
346
+ logger.error('Failed to get recent memories', { error });
347
+ throw new MemoryError(
348
+ `Failed to get recent memories: ${error instanceof Error ? error.message : 'Unknown error'}`
349
+ );
350
+ }
351
+ }
352
+ }
353
+
354
+ // Singleton instance
355
+ let storeInstance: VectorStore | null = null;
356
+
357
+ export function getVectorStore(): VectorStore {
358
+ if (!storeInstance) {
359
+ storeInstance = new VectorStore();
360
+ }
361
+ return storeInstance;
362
+ }
363
+
364
+ export default VectorStore;