@twelvehart/supermemory-runtime 1.0.0-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +57 -0
- package/README.md +374 -0
- package/dist/index.js +189 -0
- package/dist/mcp/index.js +1132 -0
- package/docker-compose.prod.yml +91 -0
- package/docker-compose.yml +358 -0
- package/drizzle/0000_dapper_the_professor.sql +159 -0
- package/drizzle/0001_api_keys.sql +51 -0
- package/drizzle/meta/0000_snapshot.json +1532 -0
- package/drizzle/meta/_journal.json +13 -0
- package/drizzle.config.ts +20 -0
- package/package.json +114 -0
- package/scripts/add-extraction-job.ts +122 -0
- package/scripts/benchmark-pgvector.ts +122 -0
- package/scripts/bootstrap.sh +209 -0
- package/scripts/check-runtime-pack.ts +111 -0
- package/scripts/claude-mcp-config.ts +336 -0
- package/scripts/docker-entrypoint.sh +183 -0
- package/scripts/doctor.ts +377 -0
- package/scripts/init-db.sql +33 -0
- package/scripts/install.sh +1110 -0
- package/scripts/mcp-setup.ts +271 -0
- package/scripts/migrations/001_create_pgvector_extension.sql +31 -0
- package/scripts/migrations/002_create_memory_embeddings_table.sql +75 -0
- package/scripts/migrations/003_create_hnsw_index.sql +94 -0
- package/scripts/migrations/004_create_memory_embeddings_standalone.sql +70 -0
- package/scripts/migrations/005_create_chunks_table.sql +95 -0
- package/scripts/migrations/006_create_processing_queue.sql +45 -0
- package/scripts/migrations/generate_test_data.sql +42 -0
- package/scripts/migrations/phase1_comprehensive_test.sql +204 -0
- package/scripts/migrations/run_migrations.sh +286 -0
- package/scripts/migrations/test_hnsw_index.sql +255 -0
- package/scripts/pre-commit-secrets +282 -0
- package/scripts/run-extraction-worker.ts +46 -0
- package/scripts/run-phase1-tests.sh +291 -0
- package/scripts/setup.ts +222 -0
- package/scripts/smoke-install.sh +12 -0
- package/scripts/test-health-endpoint.sh +328 -0
- package/src/api/index.ts +2 -0
- package/src/api/middleware/auth.ts +80 -0
- package/src/api/middleware/csrf.ts +308 -0
- package/src/api/middleware/errorHandler.ts +166 -0
- package/src/api/middleware/rateLimit.ts +360 -0
- package/src/api/middleware/validation.ts +514 -0
- package/src/api/routes/documents.ts +286 -0
- package/src/api/routes/profiles.ts +237 -0
- package/src/api/routes/search.ts +71 -0
- package/src/api/stores/index.ts +58 -0
- package/src/config/bootstrap-env.ts +3 -0
- package/src/config/env.ts +71 -0
- package/src/config/feature-flags.ts +25 -0
- package/src/config/index.ts +140 -0
- package/src/config/secrets.config.ts +291 -0
- package/src/db/client.ts +92 -0
- package/src/db/index.ts +73 -0
- package/src/db/postgres.ts +72 -0
- package/src/db/schema/chunks.schema.ts +31 -0
- package/src/db/schema/containers.schema.ts +46 -0
- package/src/db/schema/documents.schema.ts +49 -0
- package/src/db/schema/embeddings.schema.ts +32 -0
- package/src/db/schema/index.ts +11 -0
- package/src/db/schema/memories.schema.ts +72 -0
- package/src/db/schema/profiles.schema.ts +34 -0
- package/src/db/schema/queue.schema.ts +59 -0
- package/src/db/schema/relationships.schema.ts +42 -0
- package/src/db/schema.ts +223 -0
- package/src/db/worker-connection.ts +47 -0
- package/src/index.ts +235 -0
- package/src/mcp/CLAUDE.md +1 -0
- package/src/mcp/index.ts +1380 -0
- package/src/mcp/legacyState.ts +22 -0
- package/src/mcp/rateLimit.ts +358 -0
- package/src/mcp/resources.ts +309 -0
- package/src/mcp/results.ts +104 -0
- package/src/mcp/tools.ts +401 -0
- package/src/queues/config.ts +119 -0
- package/src/queues/index.ts +289 -0
- package/src/sdk/client.ts +225 -0
- package/src/sdk/errors.ts +266 -0
- package/src/sdk/http.ts +560 -0
- package/src/sdk/index.ts +244 -0
- package/src/sdk/resources/base.ts +65 -0
- package/src/sdk/resources/connections.ts +204 -0
- package/src/sdk/resources/documents.ts +163 -0
- package/src/sdk/resources/index.ts +10 -0
- package/src/sdk/resources/memories.ts +150 -0
- package/src/sdk/resources/search.ts +60 -0
- package/src/sdk/resources/settings.ts +36 -0
- package/src/sdk/types.ts +674 -0
- package/src/services/chunking/index.ts +451 -0
- package/src/services/chunking.service.ts +650 -0
- package/src/services/csrf.service.ts +252 -0
- package/src/services/documents.repository.ts +219 -0
- package/src/services/documents.service.ts +191 -0
- package/src/services/embedding.service.ts +404 -0
- package/src/services/extraction.service.ts +300 -0
- package/src/services/extractors/code.extractor.ts +451 -0
- package/src/services/extractors/index.ts +9 -0
- package/src/services/extractors/markdown.extractor.ts +461 -0
- package/src/services/extractors/pdf.extractor.ts +315 -0
- package/src/services/extractors/text.extractor.ts +118 -0
- package/src/services/extractors/url.extractor.ts +243 -0
- package/src/services/index.ts +235 -0
- package/src/services/ingestion.service.ts +177 -0
- package/src/services/llm/anthropic.ts +400 -0
- package/src/services/llm/base.ts +460 -0
- package/src/services/llm/contradiction-detector.service.ts +526 -0
- package/src/services/llm/heuristics.ts +148 -0
- package/src/services/llm/index.ts +309 -0
- package/src/services/llm/memory-classifier.service.ts +383 -0
- package/src/services/llm/memory-extension-detector.service.ts +523 -0
- package/src/services/llm/mock.ts +470 -0
- package/src/services/llm/openai.ts +398 -0
- package/src/services/llm/prompts.ts +438 -0
- package/src/services/llm/types.ts +373 -0
- package/src/services/memory.repository.ts +1769 -0
- package/src/services/memory.service.ts +1338 -0
- package/src/services/memory.types.ts +234 -0
- package/src/services/persistence/index.ts +295 -0
- package/src/services/pipeline.service.ts +509 -0
- package/src/services/profile.repository.ts +436 -0
- package/src/services/profile.service.ts +560 -0
- package/src/services/profile.types.ts +270 -0
- package/src/services/relationships/detector.ts +1128 -0
- package/src/services/relationships/index.ts +268 -0
- package/src/services/relationships/memory-integration.ts +459 -0
- package/src/services/relationships/strategies.ts +132 -0
- package/src/services/relationships/types.ts +370 -0
- package/src/services/search.service.ts +761 -0
- package/src/services/search.types.ts +220 -0
- package/src/services/secrets.service.ts +384 -0
- package/src/services/vectorstore/base.ts +327 -0
- package/src/services/vectorstore/index.ts +444 -0
- package/src/services/vectorstore/memory.ts +286 -0
- package/src/services/vectorstore/migration.ts +295 -0
- package/src/services/vectorstore/mock.ts +403 -0
- package/src/services/vectorstore/pgvector.ts +695 -0
- package/src/services/vectorstore/types.ts +247 -0
- package/src/startup.ts +389 -0
- package/src/types/api.types.ts +193 -0
- package/src/types/document.types.ts +103 -0
- package/src/types/index.ts +241 -0
- package/src/types/profile.base.ts +133 -0
- package/src/utils/errors.ts +447 -0
- package/src/utils/id.ts +15 -0
- package/src/utils/index.ts +101 -0
- package/src/utils/logger.ts +313 -0
- package/src/utils/sanitization.ts +501 -0
- package/src/utils/secret-validation.ts +273 -0
- package/src/utils/synonyms.ts +188 -0
- package/src/utils/validation.ts +581 -0
- package/src/workers/chunking.worker.ts +242 -0
- package/src/workers/embedding.worker.ts +358 -0
- package/src/workers/extraction.worker.ts +346 -0
- package/src/workers/indexing.worker.ts +505 -0
- package/tsconfig.json +38 -0
|
@@ -0,0 +1,1769 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Repository - Database Operations
|
|
3
|
+
*
|
|
4
|
+
* Handles persistence layer for memories and relationships.
|
|
5
|
+
* Uses PostgreSQL for runtime persistence; store injection is retained only for test compatibility.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import {
|
|
9
|
+
Memory,
|
|
10
|
+
Relationship,
|
|
11
|
+
MemoryQueryOptions,
|
|
12
|
+
SemanticSearchOptions,
|
|
13
|
+
RelationshipType,
|
|
14
|
+
type MemoryType,
|
|
15
|
+
} from './memory.types.js'
|
|
16
|
+
import { getLogger } from '../utils/logger.js'
|
|
17
|
+
import { DatabaseError } from '../utils/errors.js'
|
|
18
|
+
import { validate, uuidSchema, memoryQueryOptionsSchema, validateContainerTag } from '../utils/validation.js'
|
|
19
|
+
import { isEmbeddingRelationshipsEnabled } from '../config/feature-flags.js'
|
|
20
|
+
import { getEmbeddingService, cosineSimilarity } from './embedding.service.js'
|
|
21
|
+
import { getPostgresDatabase } from '../db/postgres.js'
|
|
22
|
+
import { getDatabaseUrl, isPostgresUrl } from '../db/client.js'
|
|
23
|
+
import { memories as memoriesTable } from '../db/schema/memories.schema.js'
|
|
24
|
+
import { memoryRelationships } from '../db/schema/relationships.schema.js'
|
|
25
|
+
import { memoryEmbeddings } from '../db/schema/embeddings.schema.js'
|
|
26
|
+
import { and, asc, desc, eq, inArray, notInArray, or, sql, type SQL } from 'drizzle-orm'
|
|
27
|
+
import { createHash } from 'node:crypto'
|
|
28
|
+
|
|
29
|
+
const logger = getLogger('MemoryRepository')
|
|
30
|
+
|
|
31
|
+
let _db: ReturnType<typeof getPostgresDatabase> | null = null
|
|
32
|
+
|
|
33
|
+
function getDb(): ReturnType<typeof getPostgresDatabase> {
|
|
34
|
+
if (_db) return _db
|
|
35
|
+
const databaseUrl = getDatabaseUrl()
|
|
36
|
+
if (!isPostgresUrl(databaseUrl)) {
|
|
37
|
+
throw new Error(
|
|
38
|
+
'MemoryRepository requires a PostgreSQL DATABASE_URL. SQLite is only supported in tests and is not compatible with memory repository persistence.'
|
|
39
|
+
)
|
|
40
|
+
}
|
|
41
|
+
_db = getPostgresDatabase(databaseUrl)
|
|
42
|
+
return _db
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const db = new Proxy({} as ReturnType<typeof getPostgresDatabase>, {
|
|
46
|
+
get(_target, prop) {
|
|
47
|
+
return getDb()[prop as keyof ReturnType<typeof getPostgresDatabase>]
|
|
48
|
+
},
|
|
49
|
+
})
|
|
50
|
+
|
|
51
|
+
const dbMemoryTypes = new Set(['fact', 'preference', 'episode', 'belief', 'skill', 'context'])
|
|
52
|
+
const memoryTypes = new Set<MemoryType>(['fact', 'event', 'preference', 'skill', 'relationship', 'context', 'note'])
|
|
53
|
+
|
|
54
|
+
const relationshipTypes = new Set<RelationshipType>([
|
|
55
|
+
'updates',
|
|
56
|
+
'extends',
|
|
57
|
+
'derives',
|
|
58
|
+
'contradicts',
|
|
59
|
+
'related',
|
|
60
|
+
'supersedes',
|
|
61
|
+
])
|
|
62
|
+
|
|
63
|
+
function isMemoryType(value: unknown): value is MemoryType {
|
|
64
|
+
return typeof value === 'string' && memoryTypes.has(value as MemoryType)
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function isRelationshipType(value: unknown): value is RelationshipType {
|
|
68
|
+
return typeof value === 'string' && relationshipTypes.has(value as RelationshipType)
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function mapMemoryTypeToDb(type: MemoryType): { dbType: string; originalType?: MemoryType } {
|
|
72
|
+
if (dbMemoryTypes.has(type)) {
|
|
73
|
+
return { dbType: type }
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
switch (type) {
|
|
77
|
+
case 'event':
|
|
78
|
+
return { dbType: 'episode', originalType: type }
|
|
79
|
+
case 'relationship':
|
|
80
|
+
return { dbType: 'fact', originalType: type }
|
|
81
|
+
case 'note':
|
|
82
|
+
return { dbType: 'context', originalType: type }
|
|
83
|
+
default:
|
|
84
|
+
return { dbType: 'fact', originalType: type }
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function mapMemoryTypeFromDb(dbType: string, metadata: Record<string, unknown>): MemoryType {
|
|
89
|
+
const original = metadata.originalType
|
|
90
|
+
if (isMemoryType(original)) {
|
|
91
|
+
return original
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (dbMemoryTypes.has(dbType)) {
|
|
95
|
+
return dbType as MemoryType
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return 'fact'
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
function mapRelationshipTypeToDb(type: RelationshipType): {
|
|
102
|
+
dbType: string
|
|
103
|
+
originalType?: RelationshipType
|
|
104
|
+
} {
|
|
105
|
+
if (type === 'related') {
|
|
106
|
+
return { dbType: 'relates', originalType: type }
|
|
107
|
+
}
|
|
108
|
+
if (type === 'supersedes') {
|
|
109
|
+
return { dbType: 'updates', originalType: type }
|
|
110
|
+
}
|
|
111
|
+
return { dbType: type }
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function mapRelationshipTypeFromDb(dbType: string, metadata: Record<string, unknown>): RelationshipType {
|
|
115
|
+
const original = metadata.originalType
|
|
116
|
+
if (isRelationshipType(original)) {
|
|
117
|
+
return original
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
if (dbType === 'relates') return 'related'
|
|
121
|
+
if (dbType === 'updates') return 'updates'
|
|
122
|
+
return (dbType as RelationshipType) ?? 'related'
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
function generateSimilarityHash(content: string): string {
|
|
126
|
+
const normalized = content.toLowerCase().replace(/\s+/g, ' ').trim()
|
|
127
|
+
return createHash('sha256').update(normalized).digest('hex')
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
function normalizeMetadata(metadata?: Record<string, unknown> | null): Record<string, unknown> {
|
|
131
|
+
if (metadata && typeof metadata === 'object') {
|
|
132
|
+
return { ...metadata }
|
|
133
|
+
}
|
|
134
|
+
return {}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
function mapDbMemory(row: typeof memoriesTable.$inferSelect): Memory {
|
|
138
|
+
const metadata = normalizeMetadata(row.metadata as Record<string, unknown> | null)
|
|
139
|
+
const type = mapMemoryTypeFromDb(row.memoryType, metadata)
|
|
140
|
+
const confidence = row.confidenceScore ? parseFloat(row.confidenceScore) : 1
|
|
141
|
+
|
|
142
|
+
return {
|
|
143
|
+
id: row.id,
|
|
144
|
+
content: row.content,
|
|
145
|
+
type,
|
|
146
|
+
sourceId: row.documentId ?? undefined,
|
|
147
|
+
relationships: [],
|
|
148
|
+
isLatest: row.isLatest,
|
|
149
|
+
supersededBy: row.supersedesId ?? undefined,
|
|
150
|
+
containerTag: row.containerTag ?? undefined,
|
|
151
|
+
metadata,
|
|
152
|
+
createdAt: row.createdAt,
|
|
153
|
+
updatedAt: row.updatedAt,
|
|
154
|
+
confidence,
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function mapDbRelationship(row: typeof memoryRelationships.$inferSelect): Relationship {
|
|
159
|
+
const metadata = normalizeMetadata(row.metadata as Record<string, unknown> | null)
|
|
160
|
+
const description = typeof metadata.description === 'string' ? metadata.description : undefined
|
|
161
|
+
const type = mapRelationshipTypeFromDb(row.relationshipType, metadata)
|
|
162
|
+
|
|
163
|
+
return {
|
|
164
|
+
id: row.id,
|
|
165
|
+
sourceMemoryId: row.sourceMemoryId,
|
|
166
|
+
targetMemoryId: row.targetMemoryId,
|
|
167
|
+
type,
|
|
168
|
+
confidence: row.weight ? parseFloat(row.weight) : 1,
|
|
169
|
+
description,
|
|
170
|
+
createdAt: row.createdAt,
|
|
171
|
+
metadata,
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// ============================================================================
|
|
176
|
+
// Memory Store Interface (for dependency injection)
|
|
177
|
+
// ============================================================================
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Memory store interface - retained for test compatibility
|
|
181
|
+
*/
|
|
182
|
+
export interface MemoryStore {
|
|
183
|
+
memories: Map<string, Memory>
|
|
184
|
+
relationships: Map<string, Relationship>
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Factory function to create a new memory store
|
|
189
|
+
* Use this for testing to get isolated stores
|
|
190
|
+
*/
|
|
191
|
+
export function createMemoryStore(): MemoryStore {
|
|
192
|
+
return {
|
|
193
|
+
memories: new Map(),
|
|
194
|
+
relationships: new Map(),
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// ============================================================================
|
|
199
|
+
// In-memory repository (test/default)
|
|
200
|
+
// ============================================================================
|
|
201
|
+
|
|
202
|
+
export class InMemoryMemoryRepository {
|
|
203
|
+
private readonly store: MemoryStore
|
|
204
|
+
|
|
205
|
+
constructor(store: MemoryStore) {
|
|
206
|
+
this.store = store
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
getStore(): MemoryStore {
|
|
210
|
+
return this.store
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
async create(memory: Memory): Promise<Memory> {
|
|
214
|
+
if (memory.containerTag !== undefined) {
|
|
215
|
+
validateContainerTag(memory.containerTag)
|
|
216
|
+
}
|
|
217
|
+
try {
|
|
218
|
+
logger.debug('Creating memory', { id: memory.id, type: memory.type })
|
|
219
|
+
|
|
220
|
+
if (!memory.id) {
|
|
221
|
+
throw new DatabaseError('Memory ID is required', 'create')
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
if (this.store.memories.has(memory.id)) {
|
|
225
|
+
throw new DatabaseError(`Memory with ID ${memory.id} already exists`, 'create', {
|
|
226
|
+
existingId: memory.id,
|
|
227
|
+
})
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
this.store.memories.set(memory.id, { ...memory })
|
|
231
|
+
logger.info('Memory created', { id: memory.id })
|
|
232
|
+
return memory
|
|
233
|
+
} catch (error) {
|
|
234
|
+
if (error instanceof DatabaseError) {
|
|
235
|
+
throw error
|
|
236
|
+
}
|
|
237
|
+
logger.errorWithException('Failed to create memory', error, { memoryId: memory.id })
|
|
238
|
+
throw new DatabaseError('Failed to create memory', 'create', { originalError: error })
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
async createBatch(memories: Memory[]): Promise<Memory[]> {
|
|
243
|
+
for (const memory of memories) {
|
|
244
|
+
if (memory.containerTag !== undefined) {
|
|
245
|
+
validateContainerTag(memory.containerTag)
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
try {
|
|
249
|
+
logger.debug('Creating memories batch', { count: memories.length })
|
|
250
|
+
|
|
251
|
+
const created: Memory[] = []
|
|
252
|
+
for (const memory of memories) {
|
|
253
|
+
this.store.memories.set(memory.id, { ...memory })
|
|
254
|
+
created.push(memory)
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
logger.info('Memories batch created', { count: created.length })
|
|
258
|
+
return created
|
|
259
|
+
} catch (error) {
|
|
260
|
+
logger.errorWithException('Failed to create memories batch', error)
|
|
261
|
+
throw new DatabaseError('Failed to create memories batch', 'createBatch', {
|
|
262
|
+
originalError: error,
|
|
263
|
+
})
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
async update(id: string, updates: Partial<Memory>): Promise<Memory | null> {
|
|
268
|
+
if (updates.containerTag !== undefined) {
|
|
269
|
+
validateContainerTag(updates.containerTag)
|
|
270
|
+
}
|
|
271
|
+
try {
|
|
272
|
+
validate(uuidSchema, id)
|
|
273
|
+
logger.debug('Updating memory', { id })
|
|
274
|
+
|
|
275
|
+
const existing = this.store.memories.get(id)
|
|
276
|
+
if (!existing) {
|
|
277
|
+
logger.warn('Memory not found for update', { id })
|
|
278
|
+
return null
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
const updated: Memory = {
|
|
282
|
+
...existing,
|
|
283
|
+
...updates,
|
|
284
|
+
updatedAt: new Date(),
|
|
285
|
+
}
|
|
286
|
+
this.store.memories.set(id, updated)
|
|
287
|
+
|
|
288
|
+
logger.info('Memory updated', { id })
|
|
289
|
+
return updated
|
|
290
|
+
} catch (error) {
|
|
291
|
+
logger.errorWithException('Failed to update memory', error, { memoryId: id })
|
|
292
|
+
throw new DatabaseError('Failed to update memory', 'update', {
|
|
293
|
+
originalError: error,
|
|
294
|
+
memoryId: id,
|
|
295
|
+
})
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
async delete(id: string): Promise<boolean> {
|
|
300
|
+
try {
|
|
301
|
+
validate(uuidSchema, id)
|
|
302
|
+
logger.debug('Deleting memory', { id })
|
|
303
|
+
|
|
304
|
+
for (const [relId, rel] of this.store.relationships) {
|
|
305
|
+
if (rel.sourceMemoryId === id || rel.targetMemoryId === id) {
|
|
306
|
+
this.store.relationships.delete(relId)
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
const deleted = this.store.memories.delete(id)
|
|
311
|
+
if (deleted) {
|
|
312
|
+
logger.info('Memory deleted', { id })
|
|
313
|
+
} else {
|
|
314
|
+
logger.warn('Memory not found for deletion', { id })
|
|
315
|
+
}
|
|
316
|
+
return deleted
|
|
317
|
+
} catch (error) {
|
|
318
|
+
logger.errorWithException('Failed to delete memory', error, { memoryId: id })
|
|
319
|
+
throw new DatabaseError('Failed to delete memory', 'delete', {
|
|
320
|
+
originalError: error,
|
|
321
|
+
memoryId: id,
|
|
322
|
+
})
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
async findById(id: string): Promise<Memory | null> {
|
|
327
|
+
try {
|
|
328
|
+
validate(uuidSchema, id)
|
|
329
|
+
logger.debug('Finding memory by ID', { id })
|
|
330
|
+
return this.store.memories.get(id) || null
|
|
331
|
+
} catch (error) {
|
|
332
|
+
logger.errorWithException('Failed to find memory', error, { memoryId: id })
|
|
333
|
+
throw new DatabaseError('Failed to find memory', 'findById', {
|
|
334
|
+
originalError: error,
|
|
335
|
+
memoryId: id,
|
|
336
|
+
})
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
async findByContainerTag(containerTag: string, options: MemoryQueryOptions = {}): Promise<Memory[]> {
|
|
341
|
+
validateContainerTag(containerTag)
|
|
342
|
+
try {
|
|
343
|
+
const validatedOptions = validate(memoryQueryOptionsSchema, options)
|
|
344
|
+
logger.debug('Finding memories by container tag', {
|
|
345
|
+
containerTag,
|
|
346
|
+
options: validatedOptions,
|
|
347
|
+
})
|
|
348
|
+
|
|
349
|
+
let results = Array.from(this.store.memories.values()).filter((m) => m.containerTag === containerTag)
|
|
350
|
+
|
|
351
|
+
if (validatedOptions.latestOnly) {
|
|
352
|
+
results = results.filter((m) => m.isLatest)
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
if (validatedOptions.type) {
|
|
356
|
+
results = results.filter((m) => m.type === validatedOptions.type)
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
if (validatedOptions.minConfidence !== undefined) {
|
|
360
|
+
results = results.filter((m) => m.confidence >= validatedOptions.minConfidence!)
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
const sortBy = validatedOptions.sortBy || 'createdAt'
|
|
364
|
+
const sortOrder = validatedOptions.sortOrder || 'desc'
|
|
365
|
+
results.sort((a, b) => {
|
|
366
|
+
const aVal =
|
|
367
|
+
sortBy === 'createdAt'
|
|
368
|
+
? a.createdAt
|
|
369
|
+
: sortBy === 'updatedAt'
|
|
370
|
+
? a.updatedAt
|
|
371
|
+
: sortBy === 'confidence'
|
|
372
|
+
? a.confidence
|
|
373
|
+
: a.createdAt
|
|
374
|
+
const bVal =
|
|
375
|
+
sortBy === 'createdAt'
|
|
376
|
+
? b.createdAt
|
|
377
|
+
: sortBy === 'updatedAt'
|
|
378
|
+
? b.updatedAt
|
|
379
|
+
: sortBy === 'confidence'
|
|
380
|
+
? b.confidence
|
|
381
|
+
: b.createdAt
|
|
382
|
+
|
|
383
|
+
if (aVal instanceof Date && bVal instanceof Date) {
|
|
384
|
+
return sortOrder === 'desc' ? bVal.getTime() - aVal.getTime() : aVal.getTime() - bVal.getTime()
|
|
385
|
+
}
|
|
386
|
+
if (typeof aVal === 'number' && typeof bVal === 'number') {
|
|
387
|
+
return sortOrder === 'desc' ? bVal - aVal : aVal - bVal
|
|
388
|
+
}
|
|
389
|
+
return 0
|
|
390
|
+
})
|
|
391
|
+
|
|
392
|
+
const offset = validatedOptions.offset ?? 0
|
|
393
|
+
const limit = validatedOptions.limit ?? 100
|
|
394
|
+
return results.slice(offset, offset + limit)
|
|
395
|
+
} catch (error) {
|
|
396
|
+
logger.errorWithException('Failed to find memories by container tag', error, {
|
|
397
|
+
containerTag,
|
|
398
|
+
})
|
|
399
|
+
throw new DatabaseError('Failed to find memories', 'findByContainerTag', {
|
|
400
|
+
originalError: error,
|
|
401
|
+
})
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
async findRelated(
|
|
406
|
+
memoryId: string,
|
|
407
|
+
options: {
|
|
408
|
+
relationshipTypes?: RelationshipType[]
|
|
409
|
+
depth?: number
|
|
410
|
+
limit?: number
|
|
411
|
+
} = {}
|
|
412
|
+
): Promise<{ memory: Memory; relationship: Relationship }[]> {
|
|
413
|
+
try {
|
|
414
|
+
validate(uuidSchema, memoryId)
|
|
415
|
+
logger.debug('Finding related memories', { memoryId, options })
|
|
416
|
+
|
|
417
|
+
const { relationshipTypes, depth = 1, limit = 50 } = options
|
|
418
|
+
const results: { memory: Memory; relationship: Relationship }[] = []
|
|
419
|
+
const visited = new Set<string>()
|
|
420
|
+
const queue: { id: string; currentDepth: number }[] = [{ id: memoryId, currentDepth: 0 }]
|
|
421
|
+
|
|
422
|
+
while (queue.length > 0 && results.length < limit) {
|
|
423
|
+
const current = queue.shift()!
|
|
424
|
+
|
|
425
|
+
if (visited.has(current.id) || current.currentDepth >= depth) {
|
|
426
|
+
continue
|
|
427
|
+
}
|
|
428
|
+
visited.add(current.id)
|
|
429
|
+
|
|
430
|
+
for (const rel of this.store.relationships.values()) {
|
|
431
|
+
if (rel.sourceMemoryId === current.id || rel.targetMemoryId === current.id) {
|
|
432
|
+
if (relationshipTypes && !relationshipTypes.includes(rel.type)) {
|
|
433
|
+
continue
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
const relatedId = rel.sourceMemoryId === current.id ? rel.targetMemoryId : rel.sourceMemoryId
|
|
437
|
+
|
|
438
|
+
if (!visited.has(relatedId)) {
|
|
439
|
+
const relatedMemory = this.store.memories.get(relatedId)
|
|
440
|
+
if (relatedMemory) {
|
|
441
|
+
results.push({ memory: relatedMemory, relationship: rel })
|
|
442
|
+
|
|
443
|
+
if (current.currentDepth + 1 < depth) {
|
|
444
|
+
queue.push({ id: relatedId, currentDepth: current.currentDepth + 1 })
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
return results.slice(0, limit)
|
|
453
|
+
} catch (error) {
|
|
454
|
+
logger.errorWithException('Failed to find related memories', error, { memoryId })
|
|
455
|
+
throw new DatabaseError('Failed to find related memories', 'findRelated', {
|
|
456
|
+
originalError: error,
|
|
457
|
+
})
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
async semanticSearch(options: SemanticSearchOptions): Promise<Memory[]> {
|
|
462
|
+
try {
|
|
463
|
+
logger.debug('Performing semantic search', { query: options.query.substring(0, 50) })
|
|
464
|
+
|
|
465
|
+
const limit = options.limit ?? 20
|
|
466
|
+
let candidates = Array.from(this.store.memories.values())
|
|
467
|
+
if (options.containerTag) {
|
|
468
|
+
candidates = candidates.filter((m) => m.containerTag === options.containerTag)
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
if (options.latestOnly) {
|
|
472
|
+
candidates = candidates.filter((m) => m.isLatest)
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
if (options.type) {
|
|
476
|
+
candidates = candidates.filter((m) => m.type === options.type)
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
if (!isEmbeddingRelationshipsEnabled()) {
|
|
480
|
+
const query = options.query.toLowerCase()
|
|
481
|
+
const results = candidates.filter((m) => m.content.toLowerCase().includes(query))
|
|
482
|
+
results.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime())
|
|
483
|
+
return results.slice(0, limit)
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
if (candidates.length === 0) {
|
|
487
|
+
return []
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
const embeddingService = getEmbeddingService()
|
|
491
|
+
const queryEmbedding = await embeddingService.generateEmbedding(options.query)
|
|
492
|
+
|
|
493
|
+
const scored = await Promise.all(
|
|
494
|
+
candidates.map(async (memory) => {
|
|
495
|
+
if (!memory.embedding || memory.embedding.length === 0) {
|
|
496
|
+
memory.embedding = await embeddingService.generateEmbedding(memory.content)
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
const similarity = cosineSimilarity(queryEmbedding, memory.embedding)
|
|
500
|
+
return { memory, similarity }
|
|
501
|
+
})
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
const threshold = options.similarityThreshold ?? 0
|
|
505
|
+
const filtered = scored.filter((item) => item.similarity >= threshold)
|
|
506
|
+
|
|
507
|
+
filtered.sort((a, b) => {
|
|
508
|
+
if (b.similarity !== a.similarity) {
|
|
509
|
+
return b.similarity - a.similarity
|
|
510
|
+
}
|
|
511
|
+
return b.memory.createdAt.getTime() - a.memory.createdAt.getTime()
|
|
512
|
+
})
|
|
513
|
+
|
|
514
|
+
return filtered.slice(0, limit).map((item) => item.memory)
|
|
515
|
+
} catch (error) {
|
|
516
|
+
logger.errorWithException('Failed to perform semantic search', error)
|
|
517
|
+
throw new DatabaseError('Failed to perform semantic search', 'semanticSearch', {
|
|
518
|
+
originalError: error,
|
|
519
|
+
})
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
async findPotentialRelations(
|
|
524
|
+
memory: Memory,
|
|
525
|
+
options: {
|
|
526
|
+
containerTag?: string
|
|
527
|
+
limit?: number
|
|
528
|
+
excludeIds?: string[]
|
|
529
|
+
} = {}
|
|
530
|
+
): Promise<Memory[]> {
|
|
531
|
+
try {
|
|
532
|
+
logger.debug('Finding potential relations', { memoryId: memory.id })
|
|
533
|
+
|
|
534
|
+
const { containerTag, limit = 100, excludeIds = [] } = options
|
|
535
|
+
|
|
536
|
+
let results = Array.from(this.store.memories.values()).filter(
|
|
537
|
+
(m) => m.isLatest && !excludeIds.includes(m.id) && m.id !== memory.id
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
if (containerTag) {
|
|
541
|
+
results = results.filter((m) => m.containerTag === containerTag)
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
results.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime())
|
|
545
|
+
|
|
546
|
+
return results.slice(0, limit)
|
|
547
|
+
} catch (error) {
|
|
548
|
+
logger.errorWithException('Failed to find potential relations', error)
|
|
549
|
+
throw new DatabaseError('Failed to find potential relations', 'findPotentialRelations', {
|
|
550
|
+
originalError: error,
|
|
551
|
+
})
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
async createRelationship(relationship: Relationship): Promise<Relationship> {
|
|
556
|
+
try {
|
|
557
|
+
logger.debug('Creating relationship', {
|
|
558
|
+
id: relationship.id,
|
|
559
|
+
type: relationship.type,
|
|
560
|
+
source: relationship.sourceMemoryId,
|
|
561
|
+
target: relationship.targetMemoryId,
|
|
562
|
+
})
|
|
563
|
+
|
|
564
|
+
this.store.relationships.set(relationship.id, { ...relationship })
|
|
565
|
+
logger.info('Relationship created', { id: relationship.id })
|
|
566
|
+
return relationship
|
|
567
|
+
} catch (error) {
|
|
568
|
+
logger.errorWithException('Failed to create relationship', error)
|
|
569
|
+
throw new DatabaseError('Failed to create relationship', 'createRelationship', {
|
|
570
|
+
originalError: error,
|
|
571
|
+
})
|
|
572
|
+
}
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
async createRelationshipBatch(relationships: Relationship[]): Promise<Relationship[]> {
|
|
576
|
+
try {
|
|
577
|
+
logger.debug('Creating relationships batch', { count: relationships.length })
|
|
578
|
+
|
|
579
|
+
const created: Relationship[] = []
|
|
580
|
+
for (const rel of relationships) {
|
|
581
|
+
this.store.relationships.set(rel.id, { ...rel })
|
|
582
|
+
created.push(rel)
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
logger.info('Relationships batch created', { count: created.length })
|
|
586
|
+
return created
|
|
587
|
+
} catch (error) {
|
|
588
|
+
logger.errorWithException('Failed to create relationships batch', error)
|
|
589
|
+
throw new DatabaseError('Failed to create relationships batch', 'createRelationshipBatch', {
|
|
590
|
+
originalError: error,
|
|
591
|
+
})
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
async findRelationships(
|
|
596
|
+
memoryId: string,
|
|
597
|
+
options: {
|
|
598
|
+
types?: RelationshipType[]
|
|
599
|
+
direction?: 'source' | 'target' | 'both'
|
|
600
|
+
} = {}
|
|
601
|
+
): Promise<Relationship[]> {
|
|
602
|
+
try {
|
|
603
|
+
validate(uuidSchema, memoryId)
|
|
604
|
+
logger.debug('Finding relationships', { memoryId, options })
|
|
605
|
+
|
|
606
|
+
const { types, direction = 'both' } = options
|
|
607
|
+
|
|
608
|
+
const results = Array.from(this.store.relationships.values()).filter((rel) => {
|
|
609
|
+
const matchesDirection =
|
|
610
|
+
direction === 'both' ||
|
|
611
|
+
(direction === 'source' && rel.sourceMemoryId === memoryId) ||
|
|
612
|
+
(direction === 'target' && rel.targetMemoryId === memoryId)
|
|
613
|
+
|
|
614
|
+
if (!matchesDirection) return false
|
|
615
|
+
|
|
616
|
+
if (direction === 'both') {
|
|
617
|
+
if (rel.sourceMemoryId !== memoryId && rel.targetMemoryId !== memoryId) {
|
|
618
|
+
return false
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
if (types && !types.includes(rel.type)) return false
|
|
623
|
+
|
|
624
|
+
return true
|
|
625
|
+
})
|
|
626
|
+
|
|
627
|
+
return results
|
|
628
|
+
} catch (error) {
|
|
629
|
+
logger.errorWithException('Failed to find relationships', error, { memoryId })
|
|
630
|
+
throw new DatabaseError('Failed to find relationships', 'findRelationships', {
|
|
631
|
+
originalError: error,
|
|
632
|
+
})
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
async deleteRelationship(id: string): Promise<boolean> {
|
|
637
|
+
try {
|
|
638
|
+
validate(uuidSchema, id)
|
|
639
|
+
logger.debug('Deleting relationship', { id })
|
|
640
|
+
|
|
641
|
+
const deleted = this.store.relationships.delete(id)
|
|
642
|
+
if (deleted) {
|
|
643
|
+
logger.info('Relationship deleted', { id })
|
|
644
|
+
}
|
|
645
|
+
return deleted
|
|
646
|
+
} catch (error) {
|
|
647
|
+
logger.errorWithException('Failed to delete relationship', error, { relationshipId: id })
|
|
648
|
+
throw new DatabaseError('Failed to delete relationship', 'deleteRelationship', {
|
|
649
|
+
originalError: error,
|
|
650
|
+
})
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
async markSuperseded(memoryId: string, supersededById: string): Promise<Memory | null> {
|
|
655
|
+
logger.debug('Marking memory as superseded', { memoryId, supersededById })
|
|
656
|
+
return this.update(memoryId, {
|
|
657
|
+
isLatest: false,
|
|
658
|
+
supersededBy: supersededById,
|
|
659
|
+
})
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
async getAllMemories(): Promise<Memory[]> {
|
|
663
|
+
return Array.from(this.store.memories.values())
|
|
664
|
+
}
|
|
665
|
+
|
|
666
|
+
async getAllRelationships(): Promise<Relationship[]> {
|
|
667
|
+
return Array.from(this.store.relationships.values())
|
|
668
|
+
}
|
|
669
|
+
|
|
670
|
+
async clearAll(): Promise<void> {
|
|
671
|
+
logger.debug('Clearing all memory data')
|
|
672
|
+
this.store.memories.clear()
|
|
673
|
+
this.store.relationships.clear()
|
|
674
|
+
logger.info('All memory data cleared')
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
async getStats(): Promise<{
|
|
678
|
+
totalMemories: number
|
|
679
|
+
latestMemories: number
|
|
680
|
+
totalRelationships: number
|
|
681
|
+
byType: Record<string, number>
|
|
682
|
+
byContainerTag: Record<string, number>
|
|
683
|
+
}> {
|
|
684
|
+
const memories = Array.from(this.store.memories.values())
|
|
685
|
+
const relationships = Array.from(this.store.relationships.values())
|
|
686
|
+
|
|
687
|
+
const byType: Record<string, number> = {}
|
|
688
|
+
const byContainerTag: Record<string, number> = {}
|
|
689
|
+
|
|
690
|
+
for (const memory of memories) {
|
|
691
|
+
byType[memory.type] = (byType[memory.type] || 0) + 1
|
|
692
|
+
const tag = memory.containerTag ?? 'default'
|
|
693
|
+
byContainerTag[tag] = (byContainerTag[tag] || 0) + 1
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
return {
|
|
697
|
+
totalMemories: memories.length,
|
|
698
|
+
latestMemories: memories.filter((m) => m.isLatest).length,
|
|
699
|
+
totalRelationships: relationships.length,
|
|
700
|
+
byType,
|
|
701
|
+
byContainerTag,
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
exportData(): {
|
|
706
|
+
memories: Memory[]
|
|
707
|
+
relationships: Relationship[]
|
|
708
|
+
exportedAt: string
|
|
709
|
+
version: number
|
|
710
|
+
} {
|
|
711
|
+
return {
|
|
712
|
+
memories: Array.from(this.store.memories.values()),
|
|
713
|
+
relationships: Array.from(this.store.relationships.values()),
|
|
714
|
+
exportedAt: new Date().toISOString(),
|
|
715
|
+
version: 1,
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
async importData(data: {
|
|
720
|
+
memories: Memory[]
|
|
721
|
+
relationships: Relationship[]
|
|
722
|
+
}): Promise<{ memoriesImported: number; relationshipsImported: number }> {
|
|
723
|
+
logger.debug('Importing data', {
|
|
724
|
+
memoryCount: data.memories.length,
|
|
725
|
+
relationshipCount: data.relationships.length,
|
|
726
|
+
})
|
|
727
|
+
|
|
728
|
+
this.store.memories.clear()
|
|
729
|
+
this.store.relationships.clear()
|
|
730
|
+
|
|
731
|
+
for (const memory of data.memories) {
|
|
732
|
+
const normalizedMemory: Memory = {
|
|
733
|
+
...memory,
|
|
734
|
+
createdAt: new Date(memory.createdAt),
|
|
735
|
+
updatedAt: new Date(memory.updatedAt),
|
|
736
|
+
}
|
|
737
|
+
this.store.memories.set(normalizedMemory.id, normalizedMemory)
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
for (const rel of data.relationships) {
|
|
741
|
+
const normalizedRel: Relationship = {
|
|
742
|
+
...rel,
|
|
743
|
+
createdAt: new Date(rel.createdAt),
|
|
744
|
+
}
|
|
745
|
+
this.store.relationships.set(normalizedRel.id, normalizedRel)
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
logger.info('Data imported', {
|
|
749
|
+
memoriesImported: data.memories.length,
|
|
750
|
+
relationshipsImported: data.relationships.length,
|
|
751
|
+
})
|
|
752
|
+
|
|
753
|
+
return {
|
|
754
|
+
memoriesImported: data.memories.length,
|
|
755
|
+
relationshipsImported: data.relationships.length,
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
async saveToFile(filePath: string): Promise<void> {
|
|
760
|
+
const { writeFile, mkdir } = await import('node:fs/promises')
|
|
761
|
+
const { dirname } = await import('node:path')
|
|
762
|
+
const { existsSync } = await import('node:fs')
|
|
763
|
+
|
|
764
|
+
const dir = dirname(filePath)
|
|
765
|
+
if (!existsSync(dir)) {
|
|
766
|
+
await mkdir(dir, { recursive: true })
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
const data = this.exportData()
|
|
770
|
+
await writeFile(filePath, JSON.stringify(data, null, 2), 'utf-8')
|
|
771
|
+
logger.info('Data saved to file', { filePath, memoryCount: data.memories.length })
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
async loadFromFile(filePath: string): Promise<boolean> {
|
|
775
|
+
const { readFile } = await import('node:fs/promises')
|
|
776
|
+
const { existsSync } = await import('node:fs')
|
|
777
|
+
|
|
778
|
+
if (!existsSync(filePath)) {
|
|
779
|
+
logger.debug('No persistence file found', { filePath })
|
|
780
|
+
return false
|
|
781
|
+
}
|
|
782
|
+
|
|
783
|
+
try {
|
|
784
|
+
const content = await readFile(filePath, 'utf-8')
|
|
785
|
+
const data = JSON.parse(content) as {
|
|
786
|
+
memories: Memory[]
|
|
787
|
+
relationships: Relationship[]
|
|
788
|
+
version: number
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
await this.importData(data)
|
|
792
|
+
logger.info('Data loaded from file', { filePath })
|
|
793
|
+
return true
|
|
794
|
+
} catch (error) {
|
|
795
|
+
logger.errorWithException('Failed to load data from file', error, { filePath })
|
|
796
|
+
return false
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
|
|
801
|
+
// ============================================================================
|
|
802
|
+
// Memory Repository
|
|
803
|
+
// ============================================================================
|
|
804
|
+
|
|
805
|
+
/**
|
|
806
|
+
* Memory Repository class for database operations
|
|
807
|
+
* Accepts a store via constructor for compatibility with older tests
|
|
808
|
+
*/
|
|
809
|
+
export class PostgresMemoryRepository {
|
|
810
|
+
private readonly store: MemoryStore
|
|
811
|
+
|
|
812
|
+
constructor(store?: MemoryStore) {
|
|
813
|
+
this.store = store ?? createMemoryStore()
|
|
814
|
+
}
|
|
815
|
+
|
|
816
|
+
/**
|
|
817
|
+
* Get the underlying store (for testing/debugging)
|
|
818
|
+
*/
|
|
819
|
+
getStore(): MemoryStore {
|
|
820
|
+
return this.store
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
/**
|
|
824
|
+
* Create a new memory
|
|
825
|
+
*/
|
|
826
|
+
async create(memory: Memory): Promise<Memory> {
|
|
827
|
+
if (memory.containerTag !== undefined) {
|
|
828
|
+
validateContainerTag(memory.containerTag)
|
|
829
|
+
}
|
|
830
|
+
try {
|
|
831
|
+
logger.debug('Creating memory', { id: memory.id, type: memory.type })
|
|
832
|
+
|
|
833
|
+
if (!memory.id) {
|
|
834
|
+
throw new DatabaseError('Memory ID is required', 'create')
|
|
835
|
+
}
|
|
836
|
+
|
|
837
|
+
const existing = await db.select().from(memoriesTable).where(eq(memoriesTable.id, memory.id))
|
|
838
|
+
if (existing.length > 0) {
|
|
839
|
+
throw new DatabaseError(`Memory with ID ${memory.id} already exists`, 'create', {
|
|
840
|
+
existingId: memory.id,
|
|
841
|
+
})
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
const metadata = normalizeMetadata(memory.metadata)
|
|
845
|
+
const { dbType, originalType } = mapMemoryTypeToDb(memory.type)
|
|
846
|
+
if (originalType) {
|
|
847
|
+
metadata.originalType = originalType
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
await db.insert(memoriesTable).values({
|
|
851
|
+
id: memory.id,
|
|
852
|
+
content: memory.content,
|
|
853
|
+
memoryType: dbType,
|
|
854
|
+
documentId: memory.sourceId ?? null,
|
|
855
|
+
isLatest: memory.isLatest,
|
|
856
|
+
similarityHash: generateSimilarityHash(memory.content),
|
|
857
|
+
containerTag: memory.containerTag ?? 'default',
|
|
858
|
+
confidenceScore: memory.confidence.toString(),
|
|
859
|
+
metadata,
|
|
860
|
+
supersedesId: memory.supersededBy ?? null,
|
|
861
|
+
updatedAt: memory.updatedAt ?? new Date(),
|
|
862
|
+
createdAt: memory.createdAt ?? new Date(),
|
|
863
|
+
version: 1,
|
|
864
|
+
})
|
|
865
|
+
|
|
866
|
+
logger.info('Memory created', { id: memory.id })
|
|
867
|
+
return memory
|
|
868
|
+
} catch (error) {
|
|
869
|
+
if (error instanceof DatabaseError) {
|
|
870
|
+
throw error
|
|
871
|
+
}
|
|
872
|
+
logger.errorWithException('Failed to create memory', error, { memoryId: memory.id })
|
|
873
|
+
throw new DatabaseError('Failed to create memory', 'create', { originalError: error })
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
|
|
877
|
+
/**
|
|
878
|
+
* Create multiple memories in batch
|
|
879
|
+
*/
|
|
880
|
+
async createBatch(memories: Memory[]): Promise<Memory[]> {
|
|
881
|
+
for (const memory of memories) {
|
|
882
|
+
if (memory.containerTag !== undefined) {
|
|
883
|
+
validateContainerTag(memory.containerTag)
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
try {
|
|
887
|
+
logger.debug('Creating memories batch', { count: memories.length })
|
|
888
|
+
|
|
889
|
+
if (memories.length === 0) {
|
|
890
|
+
return []
|
|
891
|
+
}
|
|
892
|
+
|
|
893
|
+
const values = memories.map((memory) => {
|
|
894
|
+
const metadata = normalizeMetadata(memory.metadata)
|
|
895
|
+
const { dbType, originalType } = mapMemoryTypeToDb(memory.type)
|
|
896
|
+
if (originalType) {
|
|
897
|
+
metadata.originalType = originalType
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
return {
|
|
901
|
+
id: memory.id,
|
|
902
|
+
content: memory.content,
|
|
903
|
+
memoryType: dbType,
|
|
904
|
+
documentId: memory.sourceId ?? null,
|
|
905
|
+
isLatest: memory.isLatest,
|
|
906
|
+
similarityHash: generateSimilarityHash(memory.content),
|
|
907
|
+
containerTag: memory.containerTag ?? 'default',
|
|
908
|
+
confidenceScore: memory.confidence.toString(),
|
|
909
|
+
metadata,
|
|
910
|
+
supersedesId: memory.supersededBy ?? null,
|
|
911
|
+
updatedAt: memory.updatedAt ?? new Date(),
|
|
912
|
+
createdAt: memory.createdAt ?? new Date(),
|
|
913
|
+
version: 1,
|
|
914
|
+
}
|
|
915
|
+
})
|
|
916
|
+
|
|
917
|
+
await db.insert(memoriesTable).values(values)
|
|
918
|
+
logger.info('Memories batch created', { count: memories.length })
|
|
919
|
+
return memories
|
|
920
|
+
} catch (error) {
|
|
921
|
+
logger.errorWithException('Failed to create memories batch', error)
|
|
922
|
+
throw new DatabaseError('Failed to create memories batch', 'createBatch', {
|
|
923
|
+
originalError: error,
|
|
924
|
+
})
|
|
925
|
+
}
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
/**
|
|
929
|
+
* Update an existing memory
|
|
930
|
+
*/
|
|
931
|
+
async update(id: string, updates: Partial<Memory>): Promise<Memory | null> {
|
|
932
|
+
if (updates.containerTag !== undefined) {
|
|
933
|
+
validateContainerTag(updates.containerTag)
|
|
934
|
+
}
|
|
935
|
+
try {
|
|
936
|
+
validate(uuidSchema, id)
|
|
937
|
+
logger.debug('Updating memory', { id })
|
|
938
|
+
|
|
939
|
+
const existing = await db.select().from(memoriesTable).where(eq(memoriesTable.id, id))
|
|
940
|
+
if (existing.length === 0) {
|
|
941
|
+
logger.warn('Memory not found for update', { id })
|
|
942
|
+
return null
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
const current = existing[0]!
|
|
946
|
+
const metadata = updates.metadata
|
|
947
|
+
? normalizeMetadata(updates.metadata)
|
|
948
|
+
: normalizeMetadata(current.metadata as Record<string, unknown> | null)
|
|
949
|
+
|
|
950
|
+
const updateData: Partial<typeof memoriesTable.$inferInsert> = {
|
|
951
|
+
updatedAt: new Date(),
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
if (updates.content !== undefined) {
|
|
955
|
+
updateData.content = updates.content
|
|
956
|
+
updateData.similarityHash = generateSimilarityHash(updates.content)
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
if (updates.type !== undefined) {
|
|
960
|
+
const { dbType, originalType } = mapMemoryTypeToDb(updates.type)
|
|
961
|
+
updateData.memoryType = dbType
|
|
962
|
+
if (originalType) {
|
|
963
|
+
metadata.originalType = originalType
|
|
964
|
+
}
|
|
965
|
+
}
|
|
966
|
+
|
|
967
|
+
if (updates.isLatest !== undefined) {
|
|
968
|
+
updateData.isLatest = updates.isLatest
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
if (updates.supersededBy !== undefined) {
|
|
972
|
+
updateData.supersedesId = updates.supersededBy ?? null
|
|
973
|
+
}
|
|
974
|
+
|
|
975
|
+
if (updates.containerTag !== undefined) {
|
|
976
|
+
updateData.containerTag = updates.containerTag ?? 'default'
|
|
977
|
+
}
|
|
978
|
+
|
|
979
|
+
if (updates.confidence !== undefined) {
|
|
980
|
+
updateData.confidenceScore = updates.confidence.toString()
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
updateData.metadata = metadata
|
|
984
|
+
|
|
985
|
+
const [updatedRow] = await db.update(memoriesTable).set(updateData).where(eq(memoriesTable.id, id)).returning()
|
|
986
|
+
|
|
987
|
+
if (!updatedRow) {
|
|
988
|
+
return null
|
|
989
|
+
}
|
|
990
|
+
|
|
991
|
+
logger.info('Memory updated', { id })
|
|
992
|
+
return mapDbMemory(updatedRow)
|
|
993
|
+
} catch (error) {
|
|
994
|
+
logger.errorWithException('Failed to update memory', error, { memoryId: id })
|
|
995
|
+
throw new DatabaseError('Failed to update memory', 'update', {
|
|
996
|
+
originalError: error,
|
|
997
|
+
memoryId: id,
|
|
998
|
+
})
|
|
999
|
+
}
|
|
1000
|
+
}
|
|
1001
|
+
|
|
1002
|
+
/**
|
|
1003
|
+
* Delete a memory by ID
|
|
1004
|
+
*/
|
|
1005
|
+
async delete(id: string): Promise<boolean> {
|
|
1006
|
+
try {
|
|
1007
|
+
validate(uuidSchema, id)
|
|
1008
|
+
logger.debug('Deleting memory', { id })
|
|
1009
|
+
|
|
1010
|
+
const deleted = await db.delete(memoriesTable).where(eq(memoriesTable.id, id)).returning({ id: memoriesTable.id })
|
|
1011
|
+
|
|
1012
|
+
if (deleted.length > 0) {
|
|
1013
|
+
logger.info('Memory deleted', { id })
|
|
1014
|
+
return true
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
logger.warn('Memory not found for deletion', { id })
|
|
1018
|
+
return false
|
|
1019
|
+
} catch (error) {
|
|
1020
|
+
logger.errorWithException('Failed to delete memory', error, { memoryId: id })
|
|
1021
|
+
throw new DatabaseError('Failed to delete memory', 'delete', {
|
|
1022
|
+
originalError: error,
|
|
1023
|
+
memoryId: id,
|
|
1024
|
+
})
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
|
|
1028
|
+
/**
|
|
1029
|
+
* Find a memory by ID
|
|
1030
|
+
*/
|
|
1031
|
+
async findById(id: string): Promise<Memory | null> {
|
|
1032
|
+
try {
|
|
1033
|
+
validate(uuidSchema, id)
|
|
1034
|
+
logger.debug('Finding memory by ID', { id })
|
|
1035
|
+
const [memory] = await db.select().from(memoriesTable).where(eq(memoriesTable.id, id))
|
|
1036
|
+
return memory ? mapDbMemory(memory) : null
|
|
1037
|
+
} catch (error) {
|
|
1038
|
+
logger.errorWithException('Failed to find memory', error, { memoryId: id })
|
|
1039
|
+
throw new DatabaseError('Failed to find memory', 'findById', {
|
|
1040
|
+
originalError: error,
|
|
1041
|
+
memoryId: id,
|
|
1042
|
+
})
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
|
|
1046
|
+
/**
|
|
1047
|
+
* Find memories by container tag
|
|
1048
|
+
*/
|
|
1049
|
+
async findByContainerTag(containerTag: string, options: MemoryQueryOptions = {}): Promise<Memory[]> {
|
|
1050
|
+
validateContainerTag(containerTag)
|
|
1051
|
+
try {
|
|
1052
|
+
const validatedOptions = validate(memoryQueryOptionsSchema, options)
|
|
1053
|
+
logger.debug('Finding memories by container tag', {
|
|
1054
|
+
containerTag,
|
|
1055
|
+
options: validatedOptions,
|
|
1056
|
+
})
|
|
1057
|
+
|
|
1058
|
+
const conditions = [eq(memoriesTable.containerTag, containerTag)]
|
|
1059
|
+
|
|
1060
|
+
if (validatedOptions.latestOnly) {
|
|
1061
|
+
conditions.push(eq(memoriesTable.isLatest, true))
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
if (validatedOptions.type) {
|
|
1065
|
+
const { dbType } = mapMemoryTypeToDb(validatedOptions.type)
|
|
1066
|
+
conditions.push(eq(memoriesTable.memoryType, dbType))
|
|
1067
|
+
}
|
|
1068
|
+
|
|
1069
|
+
if (validatedOptions.minConfidence !== undefined) {
|
|
1070
|
+
conditions.push(sql`${memoriesTable.confidenceScore} >= ${validatedOptions.minConfidence}`)
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
const sortBy = validatedOptions.sortBy || 'createdAt'
|
|
1074
|
+
const sortOrder = validatedOptions.sortOrder || 'desc'
|
|
1075
|
+
const orderField =
|
|
1076
|
+
sortBy === 'updatedAt'
|
|
1077
|
+
? memoriesTable.updatedAt
|
|
1078
|
+
: sortBy === 'confidence'
|
|
1079
|
+
? memoriesTable.confidenceScore
|
|
1080
|
+
: memoriesTable.createdAt
|
|
1081
|
+
|
|
1082
|
+
const orderBy = sortOrder === 'asc' ? asc(orderField) : desc(orderField)
|
|
1083
|
+
const limit = validatedOptions.limit ?? 100
|
|
1084
|
+
const offset = validatedOptions.offset ?? 0
|
|
1085
|
+
|
|
1086
|
+
const rows = await db
|
|
1087
|
+
.select()
|
|
1088
|
+
.from(memoriesTable)
|
|
1089
|
+
.where(and(...conditions))
|
|
1090
|
+
.orderBy(orderBy)
|
|
1091
|
+
.limit(limit)
|
|
1092
|
+
.offset(offset)
|
|
1093
|
+
|
|
1094
|
+
return rows.map(mapDbMemory)
|
|
1095
|
+
} catch (error) {
|
|
1096
|
+
logger.errorWithException('Failed to find memories by container tag', error, {
|
|
1097
|
+
containerTag,
|
|
1098
|
+
})
|
|
1099
|
+
throw new DatabaseError('Failed to find memories', 'findByContainerTag', {
|
|
1100
|
+
originalError: error,
|
|
1101
|
+
})
|
|
1102
|
+
}
|
|
1103
|
+
}
|
|
1104
|
+
|
|
1105
|
+
/**
|
|
1106
|
+
* Find related memories using relationship graph
|
|
1107
|
+
*/
|
|
1108
|
+
async findRelated(
|
|
1109
|
+
memoryId: string,
|
|
1110
|
+
options: {
|
|
1111
|
+
relationshipTypes?: RelationshipType[]
|
|
1112
|
+
depth?: number
|
|
1113
|
+
limit?: number
|
|
1114
|
+
} = {}
|
|
1115
|
+
): Promise<{ memory: Memory; relationship: Relationship }[]> {
|
|
1116
|
+
try {
|
|
1117
|
+
validate(uuidSchema, memoryId)
|
|
1118
|
+
logger.debug('Finding related memories', { memoryId, options })
|
|
1119
|
+
|
|
1120
|
+
const { relationshipTypes: types, depth = 1, limit = 50 } = options
|
|
1121
|
+
const results: { memory: Memory; relationship: Relationship }[] = []
|
|
1122
|
+
const visited = new Set<string>()
|
|
1123
|
+
const queue: { id: string; currentDepth: number }[] = [{ id: memoryId, currentDepth: 0 }]
|
|
1124
|
+
|
|
1125
|
+
while (queue.length > 0 && results.length < limit) {
|
|
1126
|
+
const current = queue.shift()!
|
|
1127
|
+
|
|
1128
|
+
if (visited.has(current.id) || current.currentDepth >= depth) {
|
|
1129
|
+
continue
|
|
1130
|
+
}
|
|
1131
|
+
visited.add(current.id)
|
|
1132
|
+
|
|
1133
|
+
const relationships = await this.findRelationships(current.id, {
|
|
1134
|
+
types,
|
|
1135
|
+
direction: 'both',
|
|
1136
|
+
})
|
|
1137
|
+
|
|
1138
|
+
for (const rel of relationships) {
|
|
1139
|
+
const relatedId = rel.sourceMemoryId === current.id ? rel.targetMemoryId : rel.sourceMemoryId
|
|
1140
|
+
|
|
1141
|
+
if (visited.has(relatedId)) {
|
|
1142
|
+
continue
|
|
1143
|
+
}
|
|
1144
|
+
|
|
1145
|
+
const relatedMemory = await this.findById(relatedId)
|
|
1146
|
+
if (relatedMemory) {
|
|
1147
|
+
results.push({ memory: relatedMemory, relationship: rel })
|
|
1148
|
+
|
|
1149
|
+
if (current.currentDepth + 1 < depth) {
|
|
1150
|
+
queue.push({ id: relatedId, currentDepth: current.currentDepth + 1 })
|
|
1151
|
+
}
|
|
1152
|
+
}
|
|
1153
|
+
}
|
|
1154
|
+
}
|
|
1155
|
+
|
|
1156
|
+
return results.slice(0, limit)
|
|
1157
|
+
} catch (error) {
|
|
1158
|
+
logger.errorWithException('Failed to find related memories', error, { memoryId })
|
|
1159
|
+
throw new DatabaseError('Failed to find related memories', 'findRelated', {
|
|
1160
|
+
originalError: error,
|
|
1161
|
+
})
|
|
1162
|
+
}
|
|
1163
|
+
}
|
|
1164
|
+
|
|
1165
|
+
/**
|
|
1166
|
+
* Semantic search using embeddings (when enabled)
|
|
1167
|
+
*/
|
|
1168
|
+
async semanticSearch(options: SemanticSearchOptions): Promise<Memory[]> {
|
|
1169
|
+
try {
|
|
1170
|
+
logger.debug('Performing semantic search', { query: options.query.substring(0, 50) })
|
|
1171
|
+
|
|
1172
|
+
const limit = options.limit ?? 20
|
|
1173
|
+
|
|
1174
|
+
if (!isEmbeddingRelationshipsEnabled()) {
|
|
1175
|
+
const conditions = [sql`${memoriesTable.content} ILIKE ${`%${options.query}%`}`]
|
|
1176
|
+
|
|
1177
|
+
if (options.containerTag) {
|
|
1178
|
+
conditions.push(eq(memoriesTable.containerTag, options.containerTag))
|
|
1179
|
+
}
|
|
1180
|
+
|
|
1181
|
+
if (options.latestOnly) {
|
|
1182
|
+
conditions.push(eq(memoriesTable.isLatest, true))
|
|
1183
|
+
}
|
|
1184
|
+
|
|
1185
|
+
if (options.type) {
|
|
1186
|
+
const { dbType } = mapMemoryTypeToDb(options.type)
|
|
1187
|
+
conditions.push(eq(memoriesTable.memoryType, dbType))
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
const rows = await db
|
|
1191
|
+
.select()
|
|
1192
|
+
.from(memoriesTable)
|
|
1193
|
+
.where(and(...conditions))
|
|
1194
|
+
.orderBy(desc(memoriesTable.createdAt))
|
|
1195
|
+
.limit(limit)
|
|
1196
|
+
|
|
1197
|
+
return rows.map(mapDbMemory)
|
|
1198
|
+
}
|
|
1199
|
+
|
|
1200
|
+
const embeddingService = getEmbeddingService()
|
|
1201
|
+
const queryEmbedding = await embeddingService.generateEmbedding(options.query)
|
|
1202
|
+
const queryVector = `[${queryEmbedding.join(',')}]`
|
|
1203
|
+
const similarityThreshold = options.similarityThreshold ?? 0
|
|
1204
|
+
|
|
1205
|
+
const whereClauses = [sql`1 = 1`]
|
|
1206
|
+
|
|
1207
|
+
if (options.containerTag) {
|
|
1208
|
+
whereClauses.push(eq(memoriesTable.containerTag, options.containerTag))
|
|
1209
|
+
}
|
|
1210
|
+
|
|
1211
|
+
if (options.latestOnly) {
|
|
1212
|
+
whereClauses.push(eq(memoriesTable.isLatest, true))
|
|
1213
|
+
}
|
|
1214
|
+
|
|
1215
|
+
if (options.type) {
|
|
1216
|
+
const { dbType } = mapMemoryTypeToDb(options.type)
|
|
1217
|
+
whereClauses.push(eq(memoriesTable.memoryType, dbType))
|
|
1218
|
+
}
|
|
1219
|
+
|
|
1220
|
+
if (options.minConfidence !== undefined) {
|
|
1221
|
+
whereClauses.push(sql`${memoriesTable.confidenceScore} >= ${options.minConfidence}`)
|
|
1222
|
+
}
|
|
1223
|
+
|
|
1224
|
+
const similarityExpression = sql`1 - (${memoryEmbeddings.embedding} <=> ${queryVector}::vector)`
|
|
1225
|
+
|
|
1226
|
+
const result = await db.execute(sql`
|
|
1227
|
+
SELECT ${memoriesTable}.*, ${similarityExpression} as similarity
|
|
1228
|
+
FROM ${memoryEmbeddings}
|
|
1229
|
+
JOIN ${memoriesTable}
|
|
1230
|
+
ON ${memoriesTable.id} = ${memoryEmbeddings.memoryId}
|
|
1231
|
+
WHERE ${sql.join(whereClauses, sql` AND `)}
|
|
1232
|
+
AND ${similarityExpression} >= ${similarityThreshold}
|
|
1233
|
+
ORDER BY ${memoryEmbeddings.embedding} <=> ${queryVector}::vector
|
|
1234
|
+
LIMIT ${limit}
|
|
1235
|
+
`)
|
|
1236
|
+
|
|
1237
|
+
const rows = (result as unknown as { rows?: Array<typeof memoriesTable.$inferSelect> }).rows ?? []
|
|
1238
|
+
if (rows.length > 0) {
|
|
1239
|
+
return rows.map(mapDbMemory)
|
|
1240
|
+
}
|
|
1241
|
+
|
|
1242
|
+
const fallbackRows = await db
|
|
1243
|
+
.select()
|
|
1244
|
+
.from(memoriesTable)
|
|
1245
|
+
.where(and(...whereClauses))
|
|
1246
|
+
.orderBy(desc(memoriesTable.createdAt))
|
|
1247
|
+
.limit(limit)
|
|
1248
|
+
|
|
1249
|
+
if (fallbackRows.length === 0) {
|
|
1250
|
+
return []
|
|
1251
|
+
}
|
|
1252
|
+
|
|
1253
|
+
const scored = await Promise.all(
|
|
1254
|
+
fallbackRows.map(async (memory) => {
|
|
1255
|
+
const embedding = await embeddingService.generateEmbedding(memory.content)
|
|
1256
|
+
const similarity = cosineSimilarity(queryEmbedding, embedding)
|
|
1257
|
+
return { memory: mapDbMemory(memory), similarity }
|
|
1258
|
+
})
|
|
1259
|
+
)
|
|
1260
|
+
|
|
1261
|
+
const filtered = scored.filter((item) => item.similarity >= similarityThreshold)
|
|
1262
|
+
filtered.sort((a, b) => {
|
|
1263
|
+
if (b.similarity !== a.similarity) return b.similarity - a.similarity
|
|
1264
|
+
return b.memory.createdAt.getTime() - a.memory.createdAt.getTime()
|
|
1265
|
+
})
|
|
1266
|
+
|
|
1267
|
+
return filtered.slice(0, limit).map((item) => item.memory)
|
|
1268
|
+
} catch (error) {
|
|
1269
|
+
logger.errorWithException('Failed to perform semantic search', error)
|
|
1270
|
+
throw new DatabaseError('Failed to perform semantic search', 'semanticSearch', {
|
|
1271
|
+
originalError: error,
|
|
1272
|
+
})
|
|
1273
|
+
}
|
|
1274
|
+
}
|
|
1275
|
+
|
|
1276
|
+
/**
|
|
1277
|
+
* Find memories that might be related to a new memory
|
|
1278
|
+
* Used for relationship detection
|
|
1279
|
+
*/
|
|
1280
|
+
async findPotentialRelations(
|
|
1281
|
+
memory: Memory,
|
|
1282
|
+
options: {
|
|
1283
|
+
containerTag?: string
|
|
1284
|
+
limit?: number
|
|
1285
|
+
excludeIds?: string[]
|
|
1286
|
+
} = {}
|
|
1287
|
+
): Promise<Memory[]> {
|
|
1288
|
+
try {
|
|
1289
|
+
logger.debug('Finding potential relations', { memoryId: memory.id })
|
|
1290
|
+
|
|
1291
|
+
const { containerTag, limit = 100, excludeIds = [] } = options
|
|
1292
|
+
|
|
1293
|
+
const conditions = [eq(memoriesTable.isLatest, true), sql`${memoriesTable.id} != ${memory.id}`]
|
|
1294
|
+
|
|
1295
|
+
if (excludeIds.length > 0) {
|
|
1296
|
+
conditions.push(notInArray(memoriesTable.id, excludeIds))
|
|
1297
|
+
}
|
|
1298
|
+
|
|
1299
|
+
if (containerTag) {
|
|
1300
|
+
conditions.push(eq(memoriesTable.containerTag, containerTag))
|
|
1301
|
+
}
|
|
1302
|
+
|
|
1303
|
+
const rows = await db
|
|
1304
|
+
.select()
|
|
1305
|
+
.from(memoriesTable)
|
|
1306
|
+
.where(and(...conditions))
|
|
1307
|
+
.orderBy(desc(memoriesTable.createdAt))
|
|
1308
|
+
.limit(limit)
|
|
1309
|
+
|
|
1310
|
+
return rows.map(mapDbMemory)
|
|
1311
|
+
} catch (error) {
|
|
1312
|
+
logger.errorWithException('Failed to find potential relations', error)
|
|
1313
|
+
throw new DatabaseError('Failed to find potential relations', 'findPotentialRelations', {
|
|
1314
|
+
originalError: error,
|
|
1315
|
+
})
|
|
1316
|
+
}
|
|
1317
|
+
}
|
|
1318
|
+
|
|
1319
|
+
// ============ Relationship Operations ============
|
|
1320
|
+
|
|
1321
|
+
/**
|
|
1322
|
+
* Create a relationship between memories
|
|
1323
|
+
*/
|
|
1324
|
+
async createRelationship(relationship: Relationship): Promise<Relationship> {
|
|
1325
|
+
try {
|
|
1326
|
+
logger.debug('Creating relationship', {
|
|
1327
|
+
id: relationship.id,
|
|
1328
|
+
type: relationship.type,
|
|
1329
|
+
source: relationship.sourceMemoryId,
|
|
1330
|
+
target: relationship.targetMemoryId,
|
|
1331
|
+
})
|
|
1332
|
+
|
|
1333
|
+
const metadata = normalizeMetadata(relationship.metadata)
|
|
1334
|
+
const { dbType, originalType } = mapRelationshipTypeToDb(relationship.type)
|
|
1335
|
+
if (originalType) {
|
|
1336
|
+
metadata.originalType = originalType
|
|
1337
|
+
}
|
|
1338
|
+
if (relationship.description) {
|
|
1339
|
+
metadata.description = relationship.description
|
|
1340
|
+
}
|
|
1341
|
+
|
|
1342
|
+
await db.insert(memoryRelationships).values({
|
|
1343
|
+
id: relationship.id,
|
|
1344
|
+
sourceMemoryId: relationship.sourceMemoryId,
|
|
1345
|
+
targetMemoryId: relationship.targetMemoryId,
|
|
1346
|
+
relationshipType: dbType,
|
|
1347
|
+
weight: relationship.confidence.toString(),
|
|
1348
|
+
bidirectional: false,
|
|
1349
|
+
metadata,
|
|
1350
|
+
createdAt: relationship.createdAt ?? new Date(),
|
|
1351
|
+
})
|
|
1352
|
+
|
|
1353
|
+
logger.info('Relationship created', { id: relationship.id })
|
|
1354
|
+
return relationship
|
|
1355
|
+
} catch (error) {
|
|
1356
|
+
logger.errorWithException('Failed to create relationship', error)
|
|
1357
|
+
throw new DatabaseError('Failed to create relationship', 'createRelationship', {
|
|
1358
|
+
originalError: error,
|
|
1359
|
+
})
|
|
1360
|
+
}
|
|
1361
|
+
}
|
|
1362
|
+
|
|
1363
|
+
/**
|
|
1364
|
+
* Create multiple relationships in batch
|
|
1365
|
+
*/
|
|
1366
|
+
async createRelationshipBatch(relationships: Relationship[]): Promise<Relationship[]> {
|
|
1367
|
+
try {
|
|
1368
|
+
logger.debug('Creating relationships batch', { count: relationships.length })
|
|
1369
|
+
|
|
1370
|
+
if (relationships.length === 0) {
|
|
1371
|
+
return []
|
|
1372
|
+
}
|
|
1373
|
+
|
|
1374
|
+
const values = relationships.map((relationship) => {
|
|
1375
|
+
const metadata = normalizeMetadata(relationship.metadata)
|
|
1376
|
+
const { dbType, originalType } = mapRelationshipTypeToDb(relationship.type)
|
|
1377
|
+
if (originalType) {
|
|
1378
|
+
metadata.originalType = originalType
|
|
1379
|
+
}
|
|
1380
|
+
if (relationship.description) {
|
|
1381
|
+
metadata.description = relationship.description
|
|
1382
|
+
}
|
|
1383
|
+
|
|
1384
|
+
return {
|
|
1385
|
+
id: relationship.id,
|
|
1386
|
+
sourceMemoryId: relationship.sourceMemoryId,
|
|
1387
|
+
targetMemoryId: relationship.targetMemoryId,
|
|
1388
|
+
relationshipType: dbType,
|
|
1389
|
+
weight: relationship.confidence.toString(),
|
|
1390
|
+
bidirectional: false,
|
|
1391
|
+
metadata,
|
|
1392
|
+
createdAt: relationship.createdAt ?? new Date(),
|
|
1393
|
+
}
|
|
1394
|
+
})
|
|
1395
|
+
|
|
1396
|
+
await db.insert(memoryRelationships).values(values)
|
|
1397
|
+
logger.info('Relationships batch created', { count: relationships.length })
|
|
1398
|
+
return relationships
|
|
1399
|
+
} catch (error) {
|
|
1400
|
+
logger.errorWithException('Failed to create relationships batch', error)
|
|
1401
|
+
throw new DatabaseError('Failed to create relationships batch', 'createRelationshipBatch', {
|
|
1402
|
+
originalError: error,
|
|
1403
|
+
})
|
|
1404
|
+
}
|
|
1405
|
+
}
|
|
1406
|
+
|
|
1407
|
+
/**
|
|
1408
|
+
* Find relationships for a memory
|
|
1409
|
+
*/
|
|
1410
|
+
async findRelationships(
|
|
1411
|
+
memoryId: string,
|
|
1412
|
+
options: {
|
|
1413
|
+
types?: RelationshipType[]
|
|
1414
|
+
direction?: 'source' | 'target' | 'both'
|
|
1415
|
+
} = {}
|
|
1416
|
+
): Promise<Relationship[]> {
|
|
1417
|
+
try {
|
|
1418
|
+
validate(uuidSchema, memoryId)
|
|
1419
|
+
logger.debug('Finding relationships', { memoryId, options })
|
|
1420
|
+
|
|
1421
|
+
const { types, direction = 'both' } = options
|
|
1422
|
+
const conditions: SQL<unknown>[] = []
|
|
1423
|
+
const directionCondition =
|
|
1424
|
+
direction === 'source'
|
|
1425
|
+
? eq(memoryRelationships.sourceMemoryId, memoryId)
|
|
1426
|
+
: direction === 'target'
|
|
1427
|
+
? eq(memoryRelationships.targetMemoryId, memoryId)
|
|
1428
|
+
: or(eq(memoryRelationships.sourceMemoryId, memoryId), eq(memoryRelationships.targetMemoryId, memoryId))
|
|
1429
|
+
|
|
1430
|
+
if (directionCondition) {
|
|
1431
|
+
conditions.push(directionCondition)
|
|
1432
|
+
}
|
|
1433
|
+
|
|
1434
|
+
if (types && types.length > 0) {
|
|
1435
|
+
const dbTypes = types.map((type) => mapRelationshipTypeToDb(type).dbType)
|
|
1436
|
+
conditions.push(inArray(memoryRelationships.relationshipType, dbTypes))
|
|
1437
|
+
}
|
|
1438
|
+
|
|
1439
|
+
const rows = await db
|
|
1440
|
+
.select()
|
|
1441
|
+
.from(memoryRelationships)
|
|
1442
|
+
.where(and(...conditions))
|
|
1443
|
+
|
|
1444
|
+
return rows.map(mapDbRelationship)
|
|
1445
|
+
} catch (error) {
|
|
1446
|
+
logger.errorWithException('Failed to find relationships', error, { memoryId })
|
|
1447
|
+
throw new DatabaseError('Failed to find relationships', 'findRelationships', {
|
|
1448
|
+
originalError: error,
|
|
1449
|
+
})
|
|
1450
|
+
}
|
|
1451
|
+
}
|
|
1452
|
+
|
|
1453
|
+
/**
|
|
1454
|
+
* Delete a relationship
|
|
1455
|
+
*/
|
|
1456
|
+
async deleteRelationship(id: string): Promise<boolean> {
|
|
1457
|
+
try {
|
|
1458
|
+
validate(uuidSchema, id)
|
|
1459
|
+
logger.debug('Deleting relationship', { id })
|
|
1460
|
+
|
|
1461
|
+
const deleted = await db
|
|
1462
|
+
.delete(memoryRelationships)
|
|
1463
|
+
.where(eq(memoryRelationships.id, id))
|
|
1464
|
+
.returning({ id: memoryRelationships.id })
|
|
1465
|
+
|
|
1466
|
+
if (deleted.length > 0) {
|
|
1467
|
+
logger.info('Relationship deleted', { id })
|
|
1468
|
+
return true
|
|
1469
|
+
}
|
|
1470
|
+
|
|
1471
|
+
return false
|
|
1472
|
+
} catch (error) {
|
|
1473
|
+
logger.errorWithException('Failed to delete relationship', error, { relationshipId: id })
|
|
1474
|
+
throw new DatabaseError('Failed to delete relationship', 'deleteRelationship', {
|
|
1475
|
+
originalError: error,
|
|
1476
|
+
})
|
|
1477
|
+
}
|
|
1478
|
+
}
|
|
1479
|
+
|
|
1480
|
+
/**
|
|
1481
|
+
* Mark a memory as superseded
|
|
1482
|
+
*/
|
|
1483
|
+
async markSuperseded(memoryId: string, supersededById: string): Promise<Memory | null> {
|
|
1484
|
+
logger.debug('Marking memory as superseded', { memoryId, supersededById })
|
|
1485
|
+
return this.update(memoryId, {
|
|
1486
|
+
isLatest: false,
|
|
1487
|
+
supersededBy: supersededById,
|
|
1488
|
+
})
|
|
1489
|
+
}
|
|
1490
|
+
|
|
1491
|
+
// ============ Utility Methods ============
|
|
1492
|
+
|
|
1493
|
+
/**
|
|
1494
|
+
* Get all memories (for testing/debugging)
|
|
1495
|
+
*/
|
|
1496
|
+
async getAllMemories(): Promise<Memory[]> {
|
|
1497
|
+
const rows = await db.select().from(memoriesTable)
|
|
1498
|
+
return rows.map(mapDbMemory)
|
|
1499
|
+
}
|
|
1500
|
+
|
|
1501
|
+
/**
|
|
1502
|
+
* Get all relationships (for testing/debugging)
|
|
1503
|
+
*/
|
|
1504
|
+
async getAllRelationships(): Promise<Relationship[]> {
|
|
1505
|
+
const rows = await db.select().from(memoryRelationships)
|
|
1506
|
+
return rows.map(mapDbRelationship)
|
|
1507
|
+
}
|
|
1508
|
+
|
|
1509
|
+
/**
|
|
1510
|
+
* Clear all data (for testing)
|
|
1511
|
+
*/
|
|
1512
|
+
async clearAll(): Promise<void> {
|
|
1513
|
+
logger.debug('Clearing all memory data')
|
|
1514
|
+
await db.delete(memoryRelationships)
|
|
1515
|
+
await db.delete(memoriesTable)
|
|
1516
|
+
logger.info('All memory data cleared')
|
|
1517
|
+
}
|
|
1518
|
+
|
|
1519
|
+
/**
|
|
1520
|
+
* Get statistics
|
|
1521
|
+
*/
|
|
1522
|
+
async getStats(): Promise<{
|
|
1523
|
+
totalMemories: number
|
|
1524
|
+
latestMemories: number
|
|
1525
|
+
totalRelationships: number
|
|
1526
|
+
byType: Record<string, number>
|
|
1527
|
+
byContainerTag: Record<string, number>
|
|
1528
|
+
}> {
|
|
1529
|
+
const totalMemoriesRows = await db.select({ count: sql<number>`count(*)` }).from(memoriesTable)
|
|
1530
|
+
const totalMemories = Number(totalMemoriesRows[0]?.count ?? 0)
|
|
1531
|
+
|
|
1532
|
+
const latestMemoriesRows = await db
|
|
1533
|
+
.select({ count: sql<number>`count(*)` })
|
|
1534
|
+
.from(memoriesTable)
|
|
1535
|
+
.where(eq(memoriesTable.isLatest, true))
|
|
1536
|
+
const latestMemories = Number(latestMemoriesRows[0]?.count ?? 0)
|
|
1537
|
+
|
|
1538
|
+
const totalRelationshipsRows = await db.select({ count: sql<number>`count(*)` }).from(memoryRelationships)
|
|
1539
|
+
const totalRelationships = Number(totalRelationshipsRows[0]?.count ?? 0)
|
|
1540
|
+
|
|
1541
|
+
const typeRows = await db
|
|
1542
|
+
.select({
|
|
1543
|
+
type: memoriesTable.memoryType,
|
|
1544
|
+
count: sql<number>`count(*)`,
|
|
1545
|
+
})
|
|
1546
|
+
.from(memoriesTable)
|
|
1547
|
+
.groupBy(memoriesTable.memoryType)
|
|
1548
|
+
|
|
1549
|
+
const containerRows = await db
|
|
1550
|
+
.select({
|
|
1551
|
+
tag: memoriesTable.containerTag,
|
|
1552
|
+
count: sql<number>`count(*)`,
|
|
1553
|
+
})
|
|
1554
|
+
.from(memoriesTable)
|
|
1555
|
+
.groupBy(memoriesTable.containerTag)
|
|
1556
|
+
|
|
1557
|
+
const byType: Record<string, number> = {}
|
|
1558
|
+
const byContainerTag: Record<string, number> = {}
|
|
1559
|
+
|
|
1560
|
+
for (const row of typeRows) {
|
|
1561
|
+
byType[row.type] = Number(row.count)
|
|
1562
|
+
}
|
|
1563
|
+
|
|
1564
|
+
for (const row of containerRows) {
|
|
1565
|
+
const tag = row.tag ?? 'default'
|
|
1566
|
+
byContainerTag[tag] = Number(row.count)
|
|
1567
|
+
}
|
|
1568
|
+
|
|
1569
|
+
return {
|
|
1570
|
+
totalMemories,
|
|
1571
|
+
latestMemories,
|
|
1572
|
+
totalRelationships,
|
|
1573
|
+
byType,
|
|
1574
|
+
byContainerTag,
|
|
1575
|
+
}
|
|
1576
|
+
}
|
|
1577
|
+
|
|
1578
|
+
// ============ Persistence Methods ============
|
|
1579
|
+
|
|
1580
|
+
/**
|
|
1581
|
+
* Export all data for backup/persistence
|
|
1582
|
+
*/
|
|
1583
|
+
exportData(): {
|
|
1584
|
+
memories: Memory[]
|
|
1585
|
+
relationships: Relationship[]
|
|
1586
|
+
exportedAt: string
|
|
1587
|
+
version: number
|
|
1588
|
+
} {
|
|
1589
|
+
return {
|
|
1590
|
+
memories: [],
|
|
1591
|
+
relationships: [],
|
|
1592
|
+
exportedAt: new Date().toISOString(),
|
|
1593
|
+
version: 1,
|
|
1594
|
+
}
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
/**
|
|
1598
|
+
* Import data from backup/persistence
|
|
1599
|
+
*/
|
|
1600
|
+
async importData(data: {
|
|
1601
|
+
memories: Memory[]
|
|
1602
|
+
relationships: Relationship[]
|
|
1603
|
+
}): Promise<{ memoriesImported: number; relationshipsImported: number }> {
|
|
1604
|
+
logger.debug('Importing data', {
|
|
1605
|
+
memoryCount: data.memories.length,
|
|
1606
|
+
relationshipCount: data.relationships.length,
|
|
1607
|
+
})
|
|
1608
|
+
|
|
1609
|
+
await this.clearAll()
|
|
1610
|
+
|
|
1611
|
+
if (data.memories.length > 0) {
|
|
1612
|
+
await this.createBatch(
|
|
1613
|
+
data.memories.map((memory) => ({
|
|
1614
|
+
...memory,
|
|
1615
|
+
createdAt: new Date(memory.createdAt),
|
|
1616
|
+
updatedAt: new Date(memory.updatedAt),
|
|
1617
|
+
}))
|
|
1618
|
+
)
|
|
1619
|
+
}
|
|
1620
|
+
|
|
1621
|
+
if (data.relationships.length > 0) {
|
|
1622
|
+
await this.createRelationshipBatch(
|
|
1623
|
+
data.relationships.map((rel) => ({
|
|
1624
|
+
...rel,
|
|
1625
|
+
createdAt: new Date(rel.createdAt),
|
|
1626
|
+
}))
|
|
1627
|
+
)
|
|
1628
|
+
}
|
|
1629
|
+
|
|
1630
|
+
logger.info('Data imported', {
|
|
1631
|
+
memoriesImported: data.memories.length,
|
|
1632
|
+
relationshipsImported: data.relationships.length,
|
|
1633
|
+
})
|
|
1634
|
+
|
|
1635
|
+
return {
|
|
1636
|
+
memoriesImported: data.memories.length,
|
|
1637
|
+
relationshipsImported: data.relationships.length,
|
|
1638
|
+
}
|
|
1639
|
+
}
|
|
1640
|
+
|
|
1641
|
+
/**
|
|
1642
|
+
* Save data to a file (for persistence)
|
|
1643
|
+
*/
|
|
1644
|
+
async saveToFile(filePath: string): Promise<void> {
|
|
1645
|
+
const { writeFile, mkdir } = await import('node:fs/promises')
|
|
1646
|
+
const { dirname } = await import('node:path')
|
|
1647
|
+
const { existsSync } = await import('node:fs')
|
|
1648
|
+
|
|
1649
|
+
const dir = dirname(filePath)
|
|
1650
|
+
if (!existsSync(dir)) {
|
|
1651
|
+
await mkdir(dir, { recursive: true })
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
const memories = await this.getAllMemories()
|
|
1655
|
+
const relationships = await this.getAllRelationships()
|
|
1656
|
+
const data = {
|
|
1657
|
+
memories,
|
|
1658
|
+
relationships,
|
|
1659
|
+
exportedAt: new Date().toISOString(),
|
|
1660
|
+
version: 1,
|
|
1661
|
+
}
|
|
1662
|
+
|
|
1663
|
+
await writeFile(filePath, JSON.stringify(data, null, 2), 'utf-8')
|
|
1664
|
+
logger.info('Data saved to file', { filePath, memoryCount: data.memories.length })
|
|
1665
|
+
}
|
|
1666
|
+
|
|
1667
|
+
/**
|
|
1668
|
+
* Load data from a file (for persistence)
|
|
1669
|
+
*/
|
|
1670
|
+
async loadFromFile(filePath: string): Promise<boolean> {
|
|
1671
|
+
const { readFile } = await import('node:fs/promises')
|
|
1672
|
+
const { existsSync } = await import('node:fs')
|
|
1673
|
+
|
|
1674
|
+
if (!existsSync(filePath)) {
|
|
1675
|
+
logger.debug('No persistence file found', { filePath })
|
|
1676
|
+
return false
|
|
1677
|
+
}
|
|
1678
|
+
|
|
1679
|
+
try {
|
|
1680
|
+
const content = await readFile(filePath, 'utf-8')
|
|
1681
|
+
const data = JSON.parse(content) as {
|
|
1682
|
+
memories: Memory[]
|
|
1683
|
+
relationships: Relationship[]
|
|
1684
|
+
version: number
|
|
1685
|
+
}
|
|
1686
|
+
|
|
1687
|
+
await this.importData(data)
|
|
1688
|
+
logger.info('Data loaded from file', { filePath })
|
|
1689
|
+
return true
|
|
1690
|
+
} catch (error) {
|
|
1691
|
+
logger.errorWithException('Failed to load data from file', error, { filePath })
|
|
1692
|
+
return false
|
|
1693
|
+
}
|
|
1694
|
+
}
|
|
1695
|
+
}
|
|
1696
|
+
|
|
1697
|
+
export type MemoryRepository = InMemoryMemoryRepository | PostgresMemoryRepository
|
|
1698
|
+
|
|
1699
|
+
// ============================================================================
|
|
1700
|
+
// Singleton Pattern (Proxy-based Lazy Initialization)
|
|
1701
|
+
// ============================================================================
|
|
1702
|
+
|
|
1703
|
+
let _inMemoryRepositoryInstance: InMemoryMemoryRepository | null = null
|
|
1704
|
+
let _postgresRepositoryInstance: PostgresMemoryRepository | null = null
|
|
1705
|
+
let _sharedStore: MemoryStore | null = null
|
|
1706
|
+
|
|
1707
|
+
/**
|
|
1708
|
+
* Get the shared memory store (singleton)
|
|
1709
|
+
*/
|
|
1710
|
+
export function getSharedStore(): MemoryStore {
|
|
1711
|
+
if (!_sharedStore) {
|
|
1712
|
+
_sharedStore = createMemoryStore()
|
|
1713
|
+
}
|
|
1714
|
+
return _sharedStore
|
|
1715
|
+
}
|
|
1716
|
+
|
|
1717
|
+
/**
|
|
1718
|
+
* Get the memory repository singleton instance
|
|
1719
|
+
*/
|
|
1720
|
+
export function getInMemoryMemoryRepository(): InMemoryMemoryRepository {
|
|
1721
|
+
if (!_inMemoryRepositoryInstance) {
|
|
1722
|
+
_inMemoryRepositoryInstance = new InMemoryMemoryRepository(getSharedStore())
|
|
1723
|
+
}
|
|
1724
|
+
return _inMemoryRepositoryInstance
|
|
1725
|
+
}
|
|
1726
|
+
|
|
1727
|
+
export function getPostgresMemoryRepository(): PostgresMemoryRepository {
|
|
1728
|
+
if (!_postgresRepositoryInstance) {
|
|
1729
|
+
_postgresRepositoryInstance = new PostgresMemoryRepository()
|
|
1730
|
+
}
|
|
1731
|
+
return _postgresRepositoryInstance
|
|
1732
|
+
}
|
|
1733
|
+
|
|
1734
|
+
export function getMemoryRepository(): MemoryRepository {
|
|
1735
|
+
if (process.env.NODE_ENV === 'test') {
|
|
1736
|
+
return getInMemoryMemoryRepository()
|
|
1737
|
+
}
|
|
1738
|
+
return getPostgresMemoryRepository()
|
|
1739
|
+
}
|
|
1740
|
+
|
|
1741
|
+
/**
|
|
1742
|
+
* Create a new repository instance with isolated store (for testing)
|
|
1743
|
+
*/
|
|
1744
|
+
export function createMemoryRepository(store?: MemoryStore): InMemoryMemoryRepository {
|
|
1745
|
+
return new InMemoryMemoryRepository(store ?? createMemoryStore())
|
|
1746
|
+
}
|
|
1747
|
+
|
|
1748
|
+
export function createPostgresMemoryRepository(): PostgresMemoryRepository {
|
|
1749
|
+
return new PostgresMemoryRepository()
|
|
1750
|
+
}
|
|
1751
|
+
|
|
1752
|
+
/**
|
|
1753
|
+
* Reset the singleton instances (for testing)
|
|
1754
|
+
*/
|
|
1755
|
+
export function resetMemoryRepository(): void {
|
|
1756
|
+
_inMemoryRepositoryInstance = null
|
|
1757
|
+
_postgresRepositoryInstance = null
|
|
1758
|
+
_sharedStore = null
|
|
1759
|
+
_db = null
|
|
1760
|
+
}
|
|
1761
|
+
|
|
1762
|
+
/**
|
|
1763
|
+
* Proxy-based lazy singleton for backwards compatibility
|
|
1764
|
+
*/
|
|
1765
|
+
export const memoryRepository = new Proxy({} as MemoryRepository, {
|
|
1766
|
+
get(_, prop) {
|
|
1767
|
+
return getMemoryRepository()[prop as keyof MemoryRepository]
|
|
1768
|
+
},
|
|
1769
|
+
})
|