@twelvehart/supermemory-runtime 1.0.0-next.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. package/.env.example +57 -0
  2. package/README.md +374 -0
  3. package/dist/index.js +189 -0
  4. package/dist/mcp/index.js +1132 -0
  5. package/docker-compose.prod.yml +91 -0
  6. package/docker-compose.yml +358 -0
  7. package/drizzle/0000_dapper_the_professor.sql +159 -0
  8. package/drizzle/0001_api_keys.sql +51 -0
  9. package/drizzle/meta/0000_snapshot.json +1532 -0
  10. package/drizzle/meta/_journal.json +13 -0
  11. package/drizzle.config.ts +20 -0
  12. package/package.json +114 -0
  13. package/scripts/add-extraction-job.ts +122 -0
  14. package/scripts/benchmark-pgvector.ts +122 -0
  15. package/scripts/bootstrap.sh +209 -0
  16. package/scripts/check-runtime-pack.ts +111 -0
  17. package/scripts/claude-mcp-config.ts +336 -0
  18. package/scripts/docker-entrypoint.sh +183 -0
  19. package/scripts/doctor.ts +377 -0
  20. package/scripts/init-db.sql +33 -0
  21. package/scripts/install.sh +1110 -0
  22. package/scripts/mcp-setup.ts +271 -0
  23. package/scripts/migrations/001_create_pgvector_extension.sql +31 -0
  24. package/scripts/migrations/002_create_memory_embeddings_table.sql +75 -0
  25. package/scripts/migrations/003_create_hnsw_index.sql +94 -0
  26. package/scripts/migrations/004_create_memory_embeddings_standalone.sql +70 -0
  27. package/scripts/migrations/005_create_chunks_table.sql +95 -0
  28. package/scripts/migrations/006_create_processing_queue.sql +45 -0
  29. package/scripts/migrations/generate_test_data.sql +42 -0
  30. package/scripts/migrations/phase1_comprehensive_test.sql +204 -0
  31. package/scripts/migrations/run_migrations.sh +286 -0
  32. package/scripts/migrations/test_hnsw_index.sql +255 -0
  33. package/scripts/pre-commit-secrets +282 -0
  34. package/scripts/run-extraction-worker.ts +46 -0
  35. package/scripts/run-phase1-tests.sh +291 -0
  36. package/scripts/setup.ts +222 -0
  37. package/scripts/smoke-install.sh +12 -0
  38. package/scripts/test-health-endpoint.sh +328 -0
  39. package/src/api/index.ts +2 -0
  40. package/src/api/middleware/auth.ts +80 -0
  41. package/src/api/middleware/csrf.ts +308 -0
  42. package/src/api/middleware/errorHandler.ts +166 -0
  43. package/src/api/middleware/rateLimit.ts +360 -0
  44. package/src/api/middleware/validation.ts +514 -0
  45. package/src/api/routes/documents.ts +286 -0
  46. package/src/api/routes/profiles.ts +237 -0
  47. package/src/api/routes/search.ts +71 -0
  48. package/src/api/stores/index.ts +58 -0
  49. package/src/config/bootstrap-env.ts +3 -0
  50. package/src/config/env.ts +71 -0
  51. package/src/config/feature-flags.ts +25 -0
  52. package/src/config/index.ts +140 -0
  53. package/src/config/secrets.config.ts +291 -0
  54. package/src/db/client.ts +92 -0
  55. package/src/db/index.ts +73 -0
  56. package/src/db/postgres.ts +72 -0
  57. package/src/db/schema/chunks.schema.ts +31 -0
  58. package/src/db/schema/containers.schema.ts +46 -0
  59. package/src/db/schema/documents.schema.ts +49 -0
  60. package/src/db/schema/embeddings.schema.ts +32 -0
  61. package/src/db/schema/index.ts +11 -0
  62. package/src/db/schema/memories.schema.ts +72 -0
  63. package/src/db/schema/profiles.schema.ts +34 -0
  64. package/src/db/schema/queue.schema.ts +59 -0
  65. package/src/db/schema/relationships.schema.ts +42 -0
  66. package/src/db/schema.ts +223 -0
  67. package/src/db/worker-connection.ts +47 -0
  68. package/src/index.ts +235 -0
  69. package/src/mcp/CLAUDE.md +1 -0
  70. package/src/mcp/index.ts +1380 -0
  71. package/src/mcp/legacyState.ts +22 -0
  72. package/src/mcp/rateLimit.ts +358 -0
  73. package/src/mcp/resources.ts +309 -0
  74. package/src/mcp/results.ts +104 -0
  75. package/src/mcp/tools.ts +401 -0
  76. package/src/queues/config.ts +119 -0
  77. package/src/queues/index.ts +289 -0
  78. package/src/sdk/client.ts +225 -0
  79. package/src/sdk/errors.ts +266 -0
  80. package/src/sdk/http.ts +560 -0
  81. package/src/sdk/index.ts +244 -0
  82. package/src/sdk/resources/base.ts +65 -0
  83. package/src/sdk/resources/connections.ts +204 -0
  84. package/src/sdk/resources/documents.ts +163 -0
  85. package/src/sdk/resources/index.ts +10 -0
  86. package/src/sdk/resources/memories.ts +150 -0
  87. package/src/sdk/resources/search.ts +60 -0
  88. package/src/sdk/resources/settings.ts +36 -0
  89. package/src/sdk/types.ts +674 -0
  90. package/src/services/chunking/index.ts +451 -0
  91. package/src/services/chunking.service.ts +650 -0
  92. package/src/services/csrf.service.ts +252 -0
  93. package/src/services/documents.repository.ts +219 -0
  94. package/src/services/documents.service.ts +191 -0
  95. package/src/services/embedding.service.ts +404 -0
  96. package/src/services/extraction.service.ts +300 -0
  97. package/src/services/extractors/code.extractor.ts +451 -0
  98. package/src/services/extractors/index.ts +9 -0
  99. package/src/services/extractors/markdown.extractor.ts +461 -0
  100. package/src/services/extractors/pdf.extractor.ts +315 -0
  101. package/src/services/extractors/text.extractor.ts +118 -0
  102. package/src/services/extractors/url.extractor.ts +243 -0
  103. package/src/services/index.ts +235 -0
  104. package/src/services/ingestion.service.ts +177 -0
  105. package/src/services/llm/anthropic.ts +400 -0
  106. package/src/services/llm/base.ts +460 -0
  107. package/src/services/llm/contradiction-detector.service.ts +526 -0
  108. package/src/services/llm/heuristics.ts +148 -0
  109. package/src/services/llm/index.ts +309 -0
  110. package/src/services/llm/memory-classifier.service.ts +383 -0
  111. package/src/services/llm/memory-extension-detector.service.ts +523 -0
  112. package/src/services/llm/mock.ts +470 -0
  113. package/src/services/llm/openai.ts +398 -0
  114. package/src/services/llm/prompts.ts +438 -0
  115. package/src/services/llm/types.ts +373 -0
  116. package/src/services/memory.repository.ts +1769 -0
  117. package/src/services/memory.service.ts +1338 -0
  118. package/src/services/memory.types.ts +234 -0
  119. package/src/services/persistence/index.ts +295 -0
  120. package/src/services/pipeline.service.ts +509 -0
  121. package/src/services/profile.repository.ts +436 -0
  122. package/src/services/profile.service.ts +560 -0
  123. package/src/services/profile.types.ts +270 -0
  124. package/src/services/relationships/detector.ts +1128 -0
  125. package/src/services/relationships/index.ts +268 -0
  126. package/src/services/relationships/memory-integration.ts +459 -0
  127. package/src/services/relationships/strategies.ts +132 -0
  128. package/src/services/relationships/types.ts +370 -0
  129. package/src/services/search.service.ts +761 -0
  130. package/src/services/search.types.ts +220 -0
  131. package/src/services/secrets.service.ts +384 -0
  132. package/src/services/vectorstore/base.ts +327 -0
  133. package/src/services/vectorstore/index.ts +444 -0
  134. package/src/services/vectorstore/memory.ts +286 -0
  135. package/src/services/vectorstore/migration.ts +295 -0
  136. package/src/services/vectorstore/mock.ts +403 -0
  137. package/src/services/vectorstore/pgvector.ts +695 -0
  138. package/src/services/vectorstore/types.ts +247 -0
  139. package/src/startup.ts +389 -0
  140. package/src/types/api.types.ts +193 -0
  141. package/src/types/document.types.ts +103 -0
  142. package/src/types/index.ts +241 -0
  143. package/src/types/profile.base.ts +133 -0
  144. package/src/utils/errors.ts +447 -0
  145. package/src/utils/id.ts +15 -0
  146. package/src/utils/index.ts +101 -0
  147. package/src/utils/logger.ts +313 -0
  148. package/src/utils/sanitization.ts +501 -0
  149. package/src/utils/secret-validation.ts +273 -0
  150. package/src/utils/synonyms.ts +188 -0
  151. package/src/utils/validation.ts +581 -0
  152. package/src/workers/chunking.worker.ts +242 -0
  153. package/src/workers/embedding.worker.ts +358 -0
  154. package/src/workers/extraction.worker.ts +346 -0
  155. package/src/workers/indexing.worker.ts +505 -0
  156. package/tsconfig.json +38 -0
@@ -0,0 +1,1380 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Supermemory MCP Server
4
+ *
5
+ * Model Context Protocol server that exposes supermemory functionality
6
+ * as tools and resources for AI coding assistants like Claude Code.
7
+ *
8
+ * Usage:
9
+ * node dist/mcp/index.js
10
+ * npx tsx src/mcp/index.ts
11
+ *
12
+ * Add to Claude Code:
13
+ * claude mcp add supermemory -- node /path/to/supermemory-clone/dist/mcp/index.js
14
+ */
15
+
16
+ import '../config/bootstrap-env.js'
17
+ import { Server } from '@modelcontextprotocol/sdk/server/index.js'
18
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'
19
+ import {
20
+ CallToolRequestSchema,
21
+ ListToolsRequestSchema,
22
+ ListResourcesRequestSchema,
23
+ ReadResourceRequestSchema,
24
+ ListResourceTemplatesRequestSchema,
25
+ ErrorCode,
26
+ McpError,
27
+ } from '@modelcontextprotocol/sdk/types.js'
28
+
29
+ import {
30
+ TOOL_DEFINITIONS,
31
+ AddContentInputSchema,
32
+ SearchInputSchema,
33
+ ProfileInputSchema,
34
+ ListDocumentsInputSchema,
35
+ DeleteContentInputSchema,
36
+ RememberInputSchema,
37
+ RecallInputSchema,
38
+ type AddContentResult,
39
+ type SearchResult,
40
+ type ProfileResult,
41
+ type ListResult,
42
+ type DeleteResult,
43
+ type RememberResult,
44
+ type RecallResult,
45
+ } from './tools.js'
46
+
47
+ import {
48
+ RESOURCE_TEMPLATES,
49
+ parseResourceUri,
50
+ generateResourceList,
51
+ type ProfileResource,
52
+ type DocumentResource,
53
+ type SearchResource,
54
+ type FactsResource,
55
+ type StatsResource,
56
+ } from './resources.js'
57
+
58
+ import { getMCPRateLimiter, createRateLimitErrorResponse } from './rateLimit.js'
59
+
60
+ import { MemoryService, createMemoryService } from '../services/memory.service.js'
61
+ import { SearchService, createSearchService } from '../services/search.service.js'
62
+ import { ProfileService } from '../services/profile.service.js'
63
+ import { EmbeddingService, cosineSimilarity } from '../services/embedding.service.js'
64
+ import { generateId } from '../utils/id.js'
65
+ import { ValidationError } from '../utils/errors.js'
66
+ import { getLogger } from '../utils/logger.js'
67
+ import { getDatabaseUrl } from '../db/client.js'
68
+ import { closePostgresDatabase, getPostgresDatabase, type PostgresDatabaseInstance } from '../db/postgres.js'
69
+ import { documents } from '../db/schema/documents.schema.js'
70
+ import { memories } from '../db/schema/memories.schema.js'
71
+ import { archiveFileWithSuffix, pathExists, readJsonFile } from './legacyState.js'
72
+ import { and, asc, desc, eq, inArray, sql } from 'drizzle-orm'
73
+ import { initializeAndValidate } from '../startup.js'
74
+ import { createMcpEnvelopeError, createToolResponse, mapErrorToMcpError } from './results.js'
75
+ import { profileRepository } from '../services/profile.repository.js'
76
+ import * as path from 'path'
77
+
78
+ const logger = getLogger('mcp-server')
79
+ const RECALL_SEMANTIC_SHORTLIST_MULTIPLIER = 5
80
+ const RECALL_SEMANTIC_MIN_SHORTLIST = 25
81
+ const RECALL_SEMANTIC_MAX_SHORTLIST = 50
82
+ const RECALL_SEMANTIC_THRESHOLD = 0.2
83
+ const recallEmbeddingCache = new Map<string, number[]>()
84
+
85
+ process.env.SUPERMEMORY_PG_POOL_MIN ??= '0'
86
+ process.env.SUPERMEMORY_PG_POOL_MAX ??= '5'
87
+ process.env.SUPERMEMORY_PG_POOL_IDLE_TIMEOUT_MS ??= '10000'
88
+
89
+ // ============================================================================
90
+ // Server State & Legacy Migration
91
+ // ============================================================================
92
+
93
+ interface LegacyDocumentRecord {
94
+ id: string
95
+ content: string
96
+ title?: string
97
+ contentType?: string
98
+ containerTag?: string
99
+ sourceUrl?: string
100
+ metadata?: Record<string, unknown>
101
+ createdAt?: string
102
+ updatedAt?: string
103
+ embedding?: number[]
104
+ }
105
+
106
+ interface LegacyPersistedState {
107
+ documents: LegacyDocumentRecord[]
108
+ containerTags: string[]
109
+ version: number
110
+ lastSaved: string
111
+ }
112
+
113
+ interface ServerState {
114
+ db: PostgresDatabaseInstance
115
+ memoryService: MemoryService
116
+ searchService: SearchService
117
+ profileService: ProfileService
118
+ embeddingService: EmbeddingService
119
+ }
120
+
121
+ function isRecord(value: unknown): value is Record<string, unknown> {
122
+ return !!value && typeof value === 'object' && !Array.isArray(value)
123
+ }
124
+
125
+ function getLegacyPersistencePath(): string {
126
+ const dataPath =
127
+ process.env.SUPERMEMORY_DATA_PATH || path.join(process.env.HOME || process.env.USERPROFILE || '.', '.supermemory')
128
+
129
+ return path.join(dataPath, 'mcp-state.json')
130
+ }
131
+
132
+ function mapLegacyContentTypeToDb(contentType?: string): string {
133
+ switch (contentType) {
134
+ case 'pdf':
135
+ return 'application/pdf'
136
+ case 'image':
137
+ return 'image/png'
138
+ case 'url':
139
+ return 'text/html'
140
+ case 'tweet':
141
+ case 'document':
142
+ case 'note':
143
+ default:
144
+ return 'text/plain'
145
+ }
146
+ }
147
+
148
+ function mapMcpContentTypeToDb(contentType?: string): string[] {
149
+ switch (contentType) {
150
+ case 'pdf':
151
+ return ['application/pdf']
152
+ case 'image':
153
+ return ['image/png', 'image/jpeg']
154
+ case 'url':
155
+ return ['text/html']
156
+ case 'tweet':
157
+ case 'document':
158
+ case 'note':
159
+ default:
160
+ return ['text/plain']
161
+ }
162
+ }
163
+
164
+ function mapDbContentTypeToMcp(contentType?: string): 'note' | 'url' | 'pdf' | 'image' | 'tweet' | 'document' {
165
+ switch (contentType) {
166
+ case 'application/pdf':
167
+ return 'pdf'
168
+ case 'image/png':
169
+ case 'image/jpeg':
170
+ return 'image'
171
+ case 'text/html':
172
+ return 'url'
173
+ default:
174
+ return 'note'
175
+ }
176
+ }
177
+
178
+ function extractTitle(metadata: unknown): string | undefined {
179
+ if (!isRecord(metadata)) return undefined
180
+ const title = metadata.title
181
+ return typeof title === 'string' ? title : undefined
182
+ }
183
+
184
+ function extractSourceUrl(metadata: unknown): string | undefined {
185
+ if (!isRecord(metadata)) return undefined
186
+ const sourceUrl = metadata.sourceUrl
187
+ return typeof sourceUrl === 'string' ? sourceUrl : undefined
188
+ }
189
+
190
+ function parseLegacyDate(value: string | undefined): Date | undefined {
191
+ if (!value) return undefined
192
+ const date = new Date(value)
193
+ return Number.isNaN(date.getTime()) ? undefined : date
194
+ }
195
+
196
+ function buildDocumentMetadata(base: unknown, extras: Record<string, unknown>): Record<string, unknown> {
197
+ if (isRecord(base)) {
198
+ return { ...base, ...extras }
199
+ }
200
+
201
+ return { ...extras }
202
+ }
203
+
204
+ function getRecallQueryTokens(queryLower: string): string[] {
205
+ return Array.from(new Set(queryLower.split(/\W+/).map((token) => token.trim()).filter((token) => token.length >= 2)))
206
+ }
207
+
208
+ function getKeywordRecallScore(factContent: string, queryLower: string, queryTokens: string[]): number {
209
+ const contentLower = factContent.toLowerCase()
210
+ const substringScore = contentLower.includes(queryLower) ? 0.6 : 0
211
+
212
+ if (queryTokens.length === 0) {
213
+ return substringScore
214
+ }
215
+
216
+ const tokenMatches = queryTokens.filter((token) => contentLower.includes(token)).length
217
+ const overlapScore = (tokenMatches / queryTokens.length) * 0.4
218
+
219
+ return Math.min(1, substringScore + overlapScore)
220
+ }
221
+
222
+ async function migrateLegacyMcpState(state: ServerState): Promise<void> {
223
+ const legacyPath = getLegacyPersistencePath()
224
+
225
+ if (!(await pathExists(legacyPath))) {
226
+ return
227
+ }
228
+
229
+ let legacyState: LegacyPersistedState | null = null
230
+
231
+ try {
232
+ legacyState = await readJsonFile<LegacyPersistedState>(legacyPath)
233
+ } catch (error) {
234
+ logger.error(
235
+ 'Failed to read legacy MCP state for migration',
236
+ { legacyPath },
237
+ error instanceof Error ? error : undefined
238
+ )
239
+ return
240
+ }
241
+
242
+ if (!legacyState || !Array.isArray(legacyState.documents)) {
243
+ logger.warn('Legacy MCP state missing documents, skipping migration', { legacyPath })
244
+ return
245
+ }
246
+
247
+ let hadFailures = false
248
+
249
+ for (const doc of legacyState.documents) {
250
+ if (!doc.content) {
251
+ continue
252
+ }
253
+
254
+ const documentId = doc.id || generateId()
255
+ const containerTag = doc.containerTag ?? 'default'
256
+ const metadata = buildDocumentMetadata(doc.metadata, {
257
+ ...(doc.title ? { title: doc.title } : {}),
258
+ ...(doc.sourceUrl ? { sourceUrl: doc.sourceUrl } : {}),
259
+ ...(doc.contentType ? { legacyContentType: doc.contentType } : {}),
260
+ })
261
+ const createdAt = parseLegacyDate(doc.createdAt) ?? new Date()
262
+ const updatedAt = parseLegacyDate(doc.updatedAt) ?? createdAt
263
+
264
+ try {
265
+ const inserted = await state.db
266
+ .insert(documents)
267
+ .values({
268
+ id: documentId,
269
+ content: doc.content,
270
+ contentType: mapLegacyContentTypeToDb(doc.contentType),
271
+ status: 'processed',
272
+ containerTag,
273
+ metadata,
274
+ createdAt,
275
+ updatedAt,
276
+ })
277
+ .onConflictDoNothing({ target: documents.id })
278
+ .returning({ id: documents.id })
279
+
280
+ if (inserted.length === 0) {
281
+ continue
282
+ }
283
+
284
+ const processed = await state.memoryService.processAndStoreMemories(doc.content, {
285
+ containerTag,
286
+ sourceId: documentId,
287
+ })
288
+
289
+ for (const memory of processed.memories) {
290
+ await state.searchService.indexMemory(memory)
291
+ }
292
+ } catch (error) {
293
+ hadFailures = true
294
+ logger.error('Failed to migrate legacy MCP document', { documentId }, error instanceof Error ? error : undefined)
295
+ }
296
+ }
297
+
298
+ if (hadFailures) {
299
+ logger.warn('Legacy MCP migration encountered failures, leaving file for retry', {
300
+ legacyPath,
301
+ })
302
+ return
303
+ }
304
+
305
+ try {
306
+ const migratedPath = await archiveFileWithSuffix(legacyPath)
307
+ logger.info('Legacy MCP state migrated and archived', { legacyPath, migratedPath })
308
+ } catch (error) {
309
+ logger.error(
310
+ 'Failed to archive legacy MCP state after migration',
311
+ { legacyPath },
312
+ error instanceof Error ? error : undefined
313
+ )
314
+ }
315
+ }
316
+
317
+ function createServerState(): ServerState {
318
+ return {
319
+ db: getPostgresDatabase(getDatabaseUrl()),
320
+ memoryService: createMemoryService(),
321
+ searchService: createSearchService(),
322
+ profileService: new ProfileService(),
323
+ embeddingService: new EmbeddingService(),
324
+ }
325
+ }
326
+
327
+ // ============================================================================
328
+ // Tool Handlers
329
+ // ============================================================================
330
+
331
+ async function handleAddContent(state: ServerState, args: unknown): Promise<AddContentResult> {
332
+ const input = AddContentInputSchema.parse(args)
333
+
334
+ const customId = input.customId ?? input.idempotencyKey
335
+ const metadata = buildDocumentMetadata(input.metadata, {
336
+ ...(input.title ? { title: input.title } : {}),
337
+ ...(input.sourceUrl ? { sourceUrl: input.sourceUrl } : {}),
338
+ })
339
+
340
+ const existingDocument =
341
+ customId
342
+ ? await state.db
343
+ .select({
344
+ id: documents.id,
345
+ containerTag: documents.containerTag,
346
+ })
347
+ .from(documents)
348
+ .where(eq(documents.customId, customId))
349
+ .limit(1)
350
+ .then((rows) => rows[0] ?? null)
351
+ : null
352
+
353
+ if (existingDocument && !input.upsert) {
354
+ return {
355
+ success: true,
356
+ documentId: existingDocument.id,
357
+ customId,
358
+ created: false,
359
+ reused: true,
360
+ updated: false,
361
+ memoriesExtracted: 0,
362
+ message: `Reused existing document for customId "${customId}"`,
363
+ }
364
+ }
365
+
366
+ const documentId = existingDocument?.id ?? generateId()
367
+ const now = new Date()
368
+ const containerTag = input.containerTag ?? existingDocument?.containerTag ?? 'default'
369
+
370
+ if (existingDocument) {
371
+ await state.db
372
+ .update(documents)
373
+ .set({
374
+ content: input.content,
375
+ containerTag,
376
+ metadata,
377
+ status: 'processed',
378
+ updatedAt: now,
379
+ })
380
+ .where(eq(documents.id, existingDocument.id))
381
+ } else {
382
+ await state.db.insert(documents).values({
383
+ id: documentId,
384
+ customId: customId ?? null,
385
+ content: input.content,
386
+ contentType: 'text/plain',
387
+ status: 'processed',
388
+ containerTag,
389
+ metadata,
390
+ createdAt: now,
391
+ updatedAt: now,
392
+ })
393
+ }
394
+
395
+ let memoriesExtracted = 0
396
+ const errors: string[] = []
397
+ let processedMemories: Awaited<ReturnType<MemoryService['processAndStoreMemories']>>['memories'] = []
398
+
399
+ if (existingDocument) {
400
+ const existingMemories = await state.db
401
+ .select({ id: memories.id })
402
+ .from(memories)
403
+ .where(eq(memories.documentId, existingDocument.id))
404
+
405
+ for (const memory of existingMemories) {
406
+ try {
407
+ await state.searchService.removeMemory(memory.id)
408
+ } catch (cleanupError) {
409
+ const message = cleanupError instanceof Error ? cleanupError.message : 'Unknown search cleanup error'
410
+ errors.push(`Failed to clear indexed memory ${memory.id}: ${message}`)
411
+ }
412
+ }
413
+
414
+ await state.db.delete(memories).where(eq(memories.documentId, existingDocument.id))
415
+
416
+ const profiles = await profileRepository.listAll()
417
+ for (const profile of profiles) {
418
+ const nextStaticFacts = profile.staticFacts.filter((fact) => fact.sourceId !== existingDocument.id)
419
+ const nextDynamicFacts = profile.dynamicFacts.filter((fact) => fact.sourceId !== existingDocument.id)
420
+ if (
421
+ nextStaticFacts.length !== profile.staticFacts.length ||
422
+ nextDynamicFacts.length !== profile.dynamicFacts.length
423
+ ) {
424
+ await profileRepository.updateFacts(profile.containerTag, nextStaticFacts, nextDynamicFacts)
425
+ }
426
+ }
427
+ }
428
+
429
+ try {
430
+ const processed = await state.memoryService.processAndStoreMemories(input.content, {
431
+ containerTag,
432
+ sourceId: documentId,
433
+ })
434
+ processedMemories = processed.memories
435
+ } catch (extractionError) {
436
+ const message = extractionError instanceof Error ? extractionError.message : 'Unknown extraction error'
437
+ errors.push(`Memory extraction failed: ${message}`)
438
+ }
439
+
440
+ for (const memory of processedMemories) {
441
+ try {
442
+ await state.searchService.indexMemory(memory)
443
+ memoriesExtracted++
444
+ } catch (indexError) {
445
+ const message = indexError instanceof Error ? indexError.message : 'Unknown indexing error'
446
+ errors.push(`Failed to index memory ${memory.id}: ${message}`)
447
+ }
448
+ }
449
+
450
+ if (input.containerTag) {
451
+ try {
452
+ await state.profileService.ingestContent(input.containerTag, input.content, documentId)
453
+ } catch (profileError) {
454
+ const message = profileError instanceof Error ? profileError.message : 'Unknown profile error'
455
+ errors.push(`Profile ingestion failed: ${message}`)
456
+ }
457
+ }
458
+
459
+ const hasErrors = errors.length > 0
460
+ const statusMessage = existingDocument
461
+ ? hasErrors
462
+ ? `Updated existing document with ${memoriesExtracted} memories (${errors.length} errors)`
463
+ : `Updated existing document with ${memoriesExtracted} extracted memories`
464
+ : hasErrors
465
+ ? `Added content with ${memoriesExtracted} memories (${errors.length} errors)`
466
+ : `Added content with ${memoriesExtracted} extracted memories`
467
+
468
+ return {
469
+ success: !hasErrors,
470
+ documentId,
471
+ customId,
472
+ created: !existingDocument,
473
+ reused: false,
474
+ updated: Boolean(existingDocument),
475
+ memoriesExtracted,
476
+ message: statusMessage,
477
+ ...(hasErrors ? { errors } : {}),
478
+ }
479
+ }
480
+
481
+ async function handleSearch(state: ServerState, args: unknown): Promise<SearchResult> {
482
+ const input = SearchInputSchema.parse(args)
483
+
484
+ const response = await state.searchService.hybridSearch(input.query, input.containerTag, {
485
+ limit: input.limit,
486
+ threshold: input.threshold,
487
+ searchMode: input.mode,
488
+ rerank: input.rerank,
489
+ })
490
+
491
+ return {
492
+ results: response.results.map((r) => ({
493
+ id: r.id,
494
+ content: r.memory?.content ?? r.chunk?.content ?? '',
495
+ similarity: r.similarity,
496
+ containerTag: r.memory?.containerTag,
497
+ metadata: input.includeMetadata ? r.metadata : undefined,
498
+ createdAt: r.updatedAt?.toISOString(),
499
+ })),
500
+ totalCount: response.totalCount,
501
+ query: response.query,
502
+ searchTimeMs: response.searchTimeMs,
503
+ }
504
+ }
505
+
506
+ async function handleProfile(state: ServerState, args: unknown): Promise<ProfileResult> {
507
+ const input = ProfileInputSchema.parse(args)
508
+
509
+ switch (input.action) {
510
+ case 'get': {
511
+ const profile = await state.profileService.getProfile(input.containerTag)
512
+ return {
513
+ containerTag: profile.containerTag,
514
+ staticFacts: profile.staticFacts.map((f) => ({
515
+ id: f.id,
516
+ content: f.content,
517
+ category: f.category,
518
+ confidence: f.confidence,
519
+ })),
520
+ dynamicFacts: profile.dynamicFacts.map((f) => ({
521
+ id: f.id,
522
+ content: f.content,
523
+ category: f.category,
524
+ expiresAt: f.expiresAt?.toISOString(),
525
+ })),
526
+ lastUpdated: profile.updatedAt.toISOString(),
527
+ }
528
+ }
529
+
530
+ case 'ingest': {
531
+ if (!input.content) {
532
+ throw new ValidationError('Content required for ingest action', {
533
+ content: ['Content field is required for ingest action'],
534
+ })
535
+ }
536
+ await state.profileService.ingestContent(input.containerTag, input.content)
537
+ const profile = await state.profileService.getProfile(input.containerTag)
538
+ return {
539
+ containerTag: profile.containerTag,
540
+ staticFacts: profile.staticFacts.map((f) => ({
541
+ id: f.id,
542
+ content: f.content,
543
+ category: f.category,
544
+ confidence: f.confidence,
545
+ })),
546
+ dynamicFacts: profile.dynamicFacts.map((f) => ({
547
+ id: f.id,
548
+ content: f.content,
549
+ category: f.category,
550
+ expiresAt: f.expiresAt?.toISOString(),
551
+ })),
552
+ lastUpdated: profile.updatedAt.toISOString(),
553
+ }
554
+ }
555
+
556
+ case 'update': {
557
+ if (!input.facts || input.facts.length === 0) {
558
+ throw new ValidationError('Facts required for update action', {
559
+ facts: ['At least one fact is required for update action'],
560
+ })
561
+ }
562
+ // Valid fact categories
563
+ const validCategories = [
564
+ 'identity',
565
+ 'preference',
566
+ 'skill',
567
+ 'background',
568
+ 'relationship',
569
+ 'project',
570
+ 'goal',
571
+ 'context',
572
+ 'other',
573
+ ] as const
574
+ type FactCategory = (typeof validCategories)[number]
575
+
576
+ // Convert input facts to ProfileFact format with validation
577
+ const facts = input.facts.map((f) => {
578
+ const category: FactCategory | undefined =
579
+ f.category && validCategories.includes(f.category as FactCategory) ? (f.category as FactCategory) : undefined
580
+
581
+ return {
582
+ id: generateId(),
583
+ content: f.content,
584
+ type: (f.type ?? 'static') as 'static' | 'dynamic',
585
+ category,
586
+ confidence: 0.9,
587
+ extractedAt: new Date(),
588
+ lastAccessedAt: new Date(),
589
+ reinforcementCount: 0,
590
+ }
591
+ })
592
+ const profile = await state.profileService.updateProfile(input.containerTag, facts)
593
+ return {
594
+ containerTag: profile.containerTag,
595
+ staticFacts: profile.staticFacts.map((f) => ({
596
+ id: f.id,
597
+ content: f.content,
598
+ category: f.category,
599
+ confidence: f.confidence,
600
+ })),
601
+ dynamicFacts: profile.dynamicFacts.map((f) => ({
602
+ id: f.id,
603
+ content: f.content,
604
+ category: f.category,
605
+ expiresAt: f.expiresAt?.toISOString(),
606
+ })),
607
+ lastUpdated: profile.updatedAt.toISOString(),
608
+ }
609
+ }
610
+
611
+ default:
612
+ throw new ValidationError(`Unknown action: ${input.action}`, {
613
+ action: [`Invalid action '${input.action}'. Valid actions: get, ingest, update`],
614
+ })
615
+ }
616
+ }
617
+
618
+ async function handleListDocuments(state: ServerState, args: unknown): Promise<ListResult> {
619
+ const input = ListDocumentsInputSchema.parse(args)
620
+
621
+ const filters = [] as Array<ReturnType<typeof and>>
622
+
623
+ if (input.containerTag) {
624
+ filters.push(eq(documents.containerTag, input.containerTag))
625
+ }
626
+
627
+ if (input.contentType) {
628
+ filters.push(inArray(documents.contentType, mapMcpContentTypeToDb(input.contentType)))
629
+ }
630
+
631
+ const whereClause = filters.length > 0 ? and(...filters) : undefined
632
+ const [countRow] = await state.db
633
+ .select({ count: sql<number>`count(*)` })
634
+ .from(documents)
635
+ .where(whereClause)
636
+ const total = Number(countRow?.count ?? 0)
637
+
638
+ const orderExpression =
639
+ input.sortBy === 'title'
640
+ ? sql`${documents.metadata} ->> 'title'`
641
+ : input.sortBy === 'updatedAt'
642
+ ? documents.updatedAt
643
+ : documents.createdAt
644
+ const orderBy = input.sortOrder === 'asc' ? asc(orderExpression) : desc(orderExpression)
645
+
646
+ const limit = input.limit ?? 20
647
+ const offset = input.offset ?? 0
648
+
649
+ const rows = await state.db
650
+ .select({
651
+ id: documents.id,
652
+ content: documents.content,
653
+ contentType: documents.contentType,
654
+ containerTag: documents.containerTag,
655
+ metadata: documents.metadata,
656
+ createdAt: documents.createdAt,
657
+ updatedAt: documents.updatedAt,
658
+ })
659
+ .from(documents)
660
+ .where(whereClause)
661
+ .orderBy(orderBy)
662
+ .limit(limit)
663
+ .offset(offset)
664
+
665
+ return {
666
+ documents: rows.map((doc) => {
667
+ const metadata = isRecord(doc.metadata) ? doc.metadata : {}
668
+ const createdAt = doc.createdAt instanceof Date ? doc.createdAt : new Date(doc.createdAt)
669
+ const updatedAt = doc.updatedAt instanceof Date ? doc.updatedAt : new Date(doc.updatedAt)
670
+
671
+ return {
672
+ id: doc.id,
673
+ title: extractTitle(metadata),
674
+ contentPreview: doc.content.substring(0, 200) + (doc.content.length > 200 ? '...' : ''),
675
+ contentType: mapDbContentTypeToMcp(doc.contentType),
676
+ containerTag: doc.containerTag,
677
+ createdAt: createdAt.toISOString(),
678
+ updatedAt: updatedAt.toISOString(),
679
+ }
680
+ }),
681
+ total,
682
+ limit,
683
+ offset,
684
+ hasMore: offset + limit < total,
685
+ }
686
+ }
687
+
688
+ async function handleDelete(state: ServerState, args: unknown): Promise<DeleteResult> {
689
+ const input = DeleteContentInputSchema.parse(args)
690
+
691
+ if (!input.confirm) {
692
+ return {
693
+ success: false,
694
+ documentsDeleted: 0,
695
+ memoriesDeleted: 0,
696
+ vectorsDeleted: 0,
697
+ profileFactsDeleted: 0,
698
+ deletedCount: 0,
699
+ message: 'Deletion not confirmed. Set confirm: true to proceed.',
700
+ }
701
+ }
702
+
703
+ if (!input.id && !input.containerTag) {
704
+ return {
705
+ success: false,
706
+ documentsDeleted: 0,
707
+ memoriesDeleted: 0,
708
+ vectorsDeleted: 0,
709
+ profileFactsDeleted: 0,
710
+ deletedCount: 0,
711
+ message: 'Either id or containerTag must be provided',
712
+ }
713
+ }
714
+
715
+ const targetDocuments = await state.db
716
+ .select({
717
+ id: documents.id,
718
+ })
719
+ .from(documents)
720
+ .where(input.id ? eq(documents.id, input.id) : eq(documents.containerTag, input.containerTag!))
721
+
722
+ if (targetDocuments.length === 0) {
723
+ return {
724
+ success: false,
725
+ documentsDeleted: 0,
726
+ memoriesDeleted: 0,
727
+ vectorsDeleted: 0,
728
+ profileFactsDeleted: 0,
729
+ deletedCount: 0,
730
+ message: 'No documents found to delete',
731
+ }
732
+ }
733
+
734
+ const documentIds = targetDocuments.map((document) => document.id)
735
+ const errors: string[] = []
736
+ let vectorsDeleted = 0
737
+
738
+ const associatedMemories = await state.db
739
+ .select({ id: memories.id })
740
+ .from(memories)
741
+ .where(inArray(memories.documentId, documentIds))
742
+
743
+ for (const memory of associatedMemories) {
744
+ try {
745
+ const cleanup = await state.searchService.removeMemory(memory.id)
746
+ vectorsDeleted += cleanup.vectorsDeleted
747
+ } catch (cleanupError) {
748
+ const message = cleanupError instanceof Error ? cleanupError.message : 'Unknown vector cleanup error'
749
+ errors.push(`Failed to clear indexed memory ${memory.id}: ${message}`)
750
+ }
751
+ }
752
+
753
+ const deletedMemories = await state.db
754
+ .delete(memories)
755
+ .where(inArray(memories.documentId, documentIds))
756
+ .returning({ id: memories.id })
757
+
758
+ let profileFactsDeleted = 0
759
+ const profiles = await profileRepository.listAll()
760
+ for (const profile of profiles) {
761
+ const nextStaticFacts = profile.staticFacts.filter((fact) => !documentIds.includes(fact.sourceId ?? ''))
762
+ const nextDynamicFacts = profile.dynamicFacts.filter((fact) => !documentIds.includes(fact.sourceId ?? ''))
763
+ const removedFacts =
764
+ profile.staticFacts.length -
765
+ nextStaticFacts.length +
766
+ (profile.dynamicFacts.length - nextDynamicFacts.length)
767
+
768
+ if (removedFacts === 0) {
769
+ continue
770
+ }
771
+
772
+ try {
773
+ await profileRepository.updateFacts(profile.containerTag, nextStaticFacts, nextDynamicFacts)
774
+ profileFactsDeleted += removedFacts
775
+ } catch (profileError) {
776
+ const message = profileError instanceof Error ? profileError.message : 'Unknown profile cleanup error'
777
+ errors.push(`Failed to remove profile facts for container ${profile.containerTag}: ${message}`)
778
+ }
779
+ }
780
+
781
+ const deletedDocuments = await state.db
782
+ .delete(documents)
783
+ .where(inArray(documents.id, documentIds))
784
+ .returning({ id: documents.id })
785
+
786
+ const documentsDeleted = deletedDocuments.length
787
+ const memoriesDeleted = deletedMemories.length
788
+ const deletedCount = documentsDeleted
789
+ const hasErrors = errors.length > 0
790
+ const success = documentsDeleted > 0 && !hasErrors
791
+
792
+ const message = success
793
+ ? `Deleted ${documentsDeleted} document(s), ${memoriesDeleted} derived memory row(s), and ${profileFactsDeleted} profile fact(s)`
794
+ : `Deleted ${documentsDeleted} document(s) with ${errors.length} cleanup issue(s)`
795
+
796
+ return {
797
+ success,
798
+ documentsDeleted,
799
+ memoriesDeleted,
800
+ vectorsDeleted,
801
+ profileFactsDeleted,
802
+ deletedCount,
803
+ message,
804
+ ...(hasErrors ? { errors } : {}),
805
+ }
806
+ }
807
+
808
+ async function handleRemember(state: ServerState, args: unknown): Promise<RememberResult> {
809
+ const input = RememberInputSchema.parse(args)
810
+ const containerTag = input.containerTag ?? 'default'
811
+
812
+ const factId = generateId()
813
+ const now = new Date()
814
+
815
+ const fact = {
816
+ id: factId,
817
+ content: input.fact,
818
+ type: input.type ?? 'static',
819
+ category: input.category,
820
+ confidence: 0.95,
821
+ extractedAt: now,
822
+ lastAccessedAt: now,
823
+ reinforcementCount: 0,
824
+ expiresAt:
825
+ input.type === 'dynamic' && input.expirationHours
826
+ ? new Date(now.getTime() + input.expirationHours * 60 * 60 * 1000)
827
+ : undefined,
828
+ }
829
+
830
+ await state.profileService.updateProfile(containerTag, [fact as import('../services/profile.types.js').ProfileFact])
831
+
832
+ return {
833
+ success: true,
834
+ factId,
835
+ message: `Remembered: "${input.fact.substring(0, 50)}${input.fact.length > 50 ? '...' : ''}"`,
836
+ }
837
+ }
838
+
839
+ async function handleRecall(state: ServerState, args: unknown): Promise<RecallResult> {
840
+ const input = RecallInputSchema.parse(args)
841
+ const containerTag = input.containerTag ?? 'default'
842
+ const profile =
843
+ (await profileRepository.findByContainerTag(containerTag)) ?? {
844
+ containerTag,
845
+ staticFacts: [],
846
+ dynamicFacts: [],
847
+ createdAt: new Date(),
848
+ updatedAt: new Date(),
849
+ version: 1,
850
+ }
851
+ const queryLower = input.query.toLowerCase()
852
+ const queryTokens = getRecallQueryTokens(queryLower)
853
+ const limit = input.limit ?? 10
854
+
855
+ interface ScoredFact {
856
+ id: string
857
+ content: string
858
+ type: 'static' | 'dynamic'
859
+ category?: string
860
+ confidence: number
861
+ createdAt: string
862
+ similarity: number
863
+ }
864
+
865
+ const factCandidates = [
866
+ ...(input.includeStatic !== false
867
+ ? profile.staticFacts.map((fact) => ({ fact, type: 'static' as const }))
868
+ : []),
869
+ ...(input.includeDynamic !== false
870
+ ? profile.dynamicFacts.map((fact) => ({ fact, type: 'dynamic' as const }))
871
+ : []),
872
+ ]
873
+
874
+ const keywordCandidates = factCandidates.map(({ fact, type }) => ({
875
+ fact,
876
+ type,
877
+ keywordScore: getKeywordRecallScore(fact.content, queryLower, queryTokens),
878
+ }))
879
+
880
+ keywordCandidates.sort((a, b) => {
881
+ const keywordDiff = b.keywordScore - a.keywordScore
882
+ if (Math.abs(keywordDiff) > 0.001) return keywordDiff
883
+ return b.fact.confidence - a.fact.confidence
884
+ })
885
+
886
+ const semanticEnabled = !state.embeddingService.isUsingLocalFallback()
887
+ const shortlistSize = Math.min(
888
+ Math.max(limit * RECALL_SEMANTIC_SHORTLIST_MULTIPLIER, RECALL_SEMANTIC_MIN_SHORTLIST),
889
+ RECALL_SEMANTIC_MAX_SHORTLIST,
890
+ keywordCandidates.length
891
+ )
892
+
893
+ let semanticScores = new Map<string, number>()
894
+ if (semanticEnabled && shortlistSize > 0) {
895
+ try {
896
+ const queryEmbedding = await state.embeddingService.generateEmbedding(input.query)
897
+ const semanticShortlist = keywordCandidates.slice(0, shortlistSize)
898
+ const missingEmbeddings = semanticShortlist
899
+ .map(({ fact }) => ({
900
+ cacheKey: `${fact.id}:${fact.content}`,
901
+ fact,
902
+ }))
903
+ .filter(({ cacheKey }) => !recallEmbeddingCache.has(cacheKey))
904
+
905
+ if (missingEmbeddings.length > 0) {
906
+ const embeddings = await state.embeddingService.batchEmbed(missingEmbeddings.map(({ fact }) => fact.content))
907
+ missingEmbeddings.forEach(({ cacheKey }, index) => {
908
+ const embedding = embeddings[index]
909
+ if (embedding && embedding.length > 0) {
910
+ recallEmbeddingCache.set(cacheKey, embedding)
911
+ }
912
+ })
913
+ }
914
+
915
+ semanticScores = new Map(
916
+ semanticShortlist.flatMap(({ fact }) => {
917
+ const embedding = recallEmbeddingCache.get(`${fact.id}:${fact.content}`)
918
+ return embedding ? [[fact.id, cosineSimilarity(queryEmbedding, embedding)] as const] : []
919
+ })
920
+ )
921
+ } catch (error) {
922
+ logger.warn(
923
+ 'Failed to generate recall semantic scores, falling back to keyword-only recall',
924
+ { query: input.query, containerTag },
925
+ error instanceof Error ? error : undefined
926
+ )
927
+ }
928
+ }
929
+
930
+ const scoredFacts: ScoredFact[] = keywordCandidates
931
+ .map(({ fact, type, keywordScore }) => ({
932
+ id: fact.id,
933
+ content: fact.content,
934
+ type,
935
+ category: fact.category,
936
+ confidence: fact.confidence,
937
+ createdAt: fact.extractedAt.toISOString(),
938
+ similarity:
939
+ semanticScores.size > 0 && semanticScores.has(fact.id)
940
+ ? semanticScores.get(fact.id)! * 0.7 + keywordScore * 0.3
941
+ : keywordScore,
942
+ }))
943
+ .filter((fact) => fact.similarity >= RECALL_SEMANTIC_THRESHOLD || fact.content.toLowerCase().includes(queryLower))
944
+
945
+ scoredFacts.sort((a, b) => {
946
+ const simDiff = b.similarity - a.similarity
947
+ if (Math.abs(simDiff) > 0.01) return simDiff
948
+ return b.confidence - a.confidence
949
+ })
950
+
951
+ const limited = scoredFacts.slice(0, limit)
952
+
953
+ return {
954
+ facts: limited.map(({ similarity: _similarity, ...rest }) => rest),
955
+ query: input.query,
956
+ totalFound: scoredFacts.length,
957
+ }
958
+ }
959
+
960
+ // ============================================================================
961
+ // Resource Handlers
962
+ // ============================================================================
963
+
964
+ async function handleReadResource(state: ServerState, uri: string): Promise<string> {
965
+ const parsed = parseResourceUri(uri)
966
+
967
+ switch (parsed.type) {
968
+ case 'profile': {
969
+ const containerTag = parsed.params.containerTag
970
+ if (!containerTag) {
971
+ throw new McpError(ErrorCode.InvalidParams, 'Container tag required')
972
+ }
973
+ const profile = await state.profileService.getProfile(containerTag)
974
+ const resource: ProfileResource = {
975
+ uri,
976
+ containerTag: profile.containerTag,
977
+ staticFacts: profile.staticFacts.map((f) => ({
978
+ id: f.id,
979
+ content: f.content,
980
+ category: f.category,
981
+ confidence: f.confidence,
982
+ extractedAt: f.extractedAt.toISOString(),
983
+ })),
984
+ dynamicFacts: profile.dynamicFacts.map((f) => ({
985
+ id: f.id,
986
+ content: f.content,
987
+ category: f.category,
988
+ expiresAt: f.expiresAt?.toISOString(),
989
+ extractedAt: f.extractedAt.toISOString(),
990
+ })),
991
+ createdAt: profile.createdAt.toISOString(),
992
+ updatedAt: profile.updatedAt.toISOString(),
993
+ version: profile.version,
994
+ }
995
+ return JSON.stringify(resource, null, 2)
996
+ }
997
+
998
+ case 'document': {
999
+ const id = parsed.params.id
1000
+ if (!id) {
1001
+ throw new McpError(ErrorCode.InvalidParams, 'Document ID required')
1002
+ }
1003
+ const [doc] = await state.db
1004
+ .select({
1005
+ id: documents.id,
1006
+ content: documents.content,
1007
+ contentType: documents.contentType,
1008
+ containerTag: documents.containerTag,
1009
+ metadata: documents.metadata,
1010
+ createdAt: documents.createdAt,
1011
+ updatedAt: documents.updatedAt,
1012
+ })
1013
+ .from(documents)
1014
+ .where(eq(documents.id, id))
1015
+ .limit(1)
1016
+
1017
+ if (!doc) {
1018
+ throw new McpError(ErrorCode.InvalidRequest, `Document not found: ${id}`)
1019
+ }
1020
+ const metadata = isRecord(doc.metadata) ? doc.metadata : {}
1021
+ const createdAt = doc.createdAt instanceof Date ? doc.createdAt : new Date(doc.createdAt)
1022
+ const updatedAt = doc.updatedAt instanceof Date ? doc.updatedAt : new Date(doc.updatedAt)
1023
+ const resource: DocumentResource = {
1024
+ uri,
1025
+ id: doc.id,
1026
+ title: extractTitle(metadata),
1027
+ content: doc.content,
1028
+ contentType: mapDbContentTypeToMcp(doc.contentType),
1029
+ containerTag: doc.containerTag,
1030
+ sourceUrl: extractSourceUrl(metadata),
1031
+ metadata,
1032
+ createdAt: createdAt.toISOString(),
1033
+ updatedAt: updatedAt.toISOString(),
1034
+ }
1035
+ return JSON.stringify(resource, null, 2)
1036
+ }
1037
+
1038
+ case 'search': {
1039
+ const query = parsed.params.q ?? parsed.params.query ?? ''
1040
+ const containerTag = parsed.params.container ?? parsed.params.containerTag
1041
+ const limit = parseInt(parsed.params.limit ?? '10', 10)
1042
+ const mode = (parsed.params.mode ?? 'hybrid') as 'vector' | 'memory' | 'hybrid'
1043
+
1044
+ if (!query) {
1045
+ throw new McpError(ErrorCode.InvalidParams, 'Query parameter (q) required')
1046
+ }
1047
+
1048
+ const response = await state.searchService.hybridSearch(query, containerTag, {
1049
+ limit,
1050
+ searchMode: mode,
1051
+ })
1052
+
1053
+ const resource: SearchResource = {
1054
+ uri,
1055
+ query,
1056
+ results: response.results.map((r) => ({
1057
+ id: r.id,
1058
+ content: r.memory?.content ?? r.chunk?.content ?? '',
1059
+ similarity: r.similarity,
1060
+ containerTag: r.memory?.containerTag,
1061
+ metadata: r.metadata,
1062
+ })),
1063
+ totalCount: response.totalCount,
1064
+ searchTimeMs: response.searchTimeMs,
1065
+ }
1066
+ return JSON.stringify(resource, null, 2)
1067
+ }
1068
+
1069
+ case 'facts': {
1070
+ const containerTag = parsed.params.containerTag
1071
+ if (!containerTag) {
1072
+ throw new McpError(ErrorCode.InvalidParams, 'Container tag required')
1073
+ }
1074
+ const profile = await state.profileService.getProfile(containerTag)
1075
+ const allFacts = [
1076
+ ...profile.staticFacts.map((f) => ({ ...f, type: 'static' as const })),
1077
+ ...profile.dynamicFacts.map((f) => ({ ...f, type: 'dynamic' as const })),
1078
+ ]
1079
+ const resource: FactsResource = {
1080
+ uri,
1081
+ containerTag,
1082
+ facts: allFacts.map((f) => ({
1083
+ id: f.id,
1084
+ content: f.content,
1085
+ type: f.type,
1086
+ category: f.category,
1087
+ confidence: f.confidence,
1088
+ createdAt: f.extractedAt.toISOString(),
1089
+ expiresAt: 'expiresAt' in f ? f.expiresAt?.toISOString() : undefined,
1090
+ })),
1091
+ totalCount: allFacts.length,
1092
+ }
1093
+ return JSON.stringify(resource, null, 2)
1094
+ }
1095
+
1096
+ case 'stats': {
1097
+ const stats = await state.searchService.getStats()
1098
+ const [countRow] = await state.db.select({ count: sql<number>`count(*)` }).from(documents)
1099
+ const totalDocuments = Number(countRow?.count ?? 0)
1100
+ const tagRows = await state.db
1101
+ .select({ containerTag: documents.containerTag })
1102
+ .from(documents)
1103
+ .groupBy(documents.containerTag)
1104
+ const containerTags = tagRows.map((row) => row.containerTag)
1105
+
1106
+ // Aggregate facts across all container tags
1107
+ let totalFacts = 0
1108
+ for (const tag of containerTags) {
1109
+ try {
1110
+ const profile = await state.profileService.getProfile(tag)
1111
+ totalFacts += profile.staticFacts.length + profile.dynamicFacts.length
1112
+ } catch {
1113
+ // Profile may not exist for this tag yet
1114
+ }
1115
+ }
1116
+
1117
+ const resource: StatsResource = {
1118
+ uri,
1119
+ totalDocuments,
1120
+ totalMemories: stats.memoryCount,
1121
+ totalFacts,
1122
+ containerTags,
1123
+ indexedVectors: stats.vectorCount,
1124
+ lastUpdated: new Date().toISOString(),
1125
+ }
1126
+ return JSON.stringify(resource, null, 2)
1127
+ }
1128
+
1129
+ default:
1130
+ throw new McpError(ErrorCode.InvalidRequest, `Unknown resource type: ${uri}`)
1131
+ }
1132
+ }
1133
+
1134
+ // ============================================================================
1135
+ // Helper Functions
1136
+ // ============================================================================
1137
+
1138
+ /**
1139
+ * Extract containerTag from tool arguments for rate limiting
1140
+ * Falls back to 'default' if not found
1141
+ */
1142
+ function extractContainerTag(args: unknown): string {
1143
+ if (args && typeof args === 'object' && args !== null) {
1144
+ const argsObj = args as Record<string, unknown>
1145
+ if (typeof argsObj.containerTag === 'string' && argsObj.containerTag) {
1146
+ return argsObj.containerTag
1147
+ }
1148
+ }
1149
+ return 'default'
1150
+ }
1151
+
1152
+ function buildToolResponse(toolName: string, result: unknown) {
1153
+ if (result && typeof result === 'object' && !Array.isArray(result)) {
1154
+ const typedResult = result as Record<string, unknown>
1155
+ const hasSuccessFlag = typeof typedResult.success === 'boolean'
1156
+ const ok = hasSuccessFlag ? Boolean(typedResult.success) : true
1157
+ const rawErrors = Array.isArray(typedResult.errors)
1158
+ ? typedResult.errors.filter((value): value is string => typeof value === 'string')
1159
+ : []
1160
+ const partial =
1161
+ rawErrors.length > 0 &&
1162
+ (typedResult.documentId !== undefined ||
1163
+ (typeof typedResult.deletedCount === 'number' && typedResult.deletedCount > 0) ||
1164
+ (typeof typedResult.documentsDeleted === 'number' && typedResult.documentsDeleted > 0))
1165
+
1166
+ return createToolResponse({
1167
+ tool: toolName,
1168
+ ok: ok && rawErrors.length === 0,
1169
+ data: result,
1170
+ errors:
1171
+ rawErrors.length > 0
1172
+ ? rawErrors.map((message) => createMcpEnvelopeError('PARTIAL_FAILURE', message))
1173
+ : !ok && typeof typedResult.message === 'string'
1174
+ ? [createMcpEnvelopeError('TOOL_OPERATION_FAILED', typedResult.message)]
1175
+ : [],
1176
+ partial,
1177
+ })
1178
+ }
1179
+
1180
+ return createToolResponse({
1181
+ tool: toolName,
1182
+ ok: true,
1183
+ data: result,
1184
+ })
1185
+ }
1186
+
1187
+ // ============================================================================
1188
+ // Server Setup
1189
+ // ============================================================================
1190
+
1191
+ async function main() {
1192
+ await initializeAndValidate()
1193
+
1194
+ const state = createServerState()
1195
+ await migrateLegacyMcpState(state)
1196
+
1197
+ const server = new Server(
1198
+ {
1199
+ name: 'supermemory',
1200
+ version: '1.0.0',
1201
+ },
1202
+ {
1203
+ capabilities: {
1204
+ tools: {},
1205
+ resources: {},
1206
+ },
1207
+ }
1208
+ )
1209
+
1210
+ // Register tool handlers
1211
+ server.setRequestHandler(ListToolsRequestSchema, async () => {
1212
+ return {
1213
+ tools: TOOL_DEFINITIONS,
1214
+ }
1215
+ })
1216
+
1217
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
1218
+ const { name, arguments: args } = request.params
1219
+
1220
+ // Extract containerTag from arguments for rate limiting
1221
+ // Different tools use containerTag in different argument positions
1222
+ const containerTag = extractContainerTag(args)
1223
+
1224
+ // Check rate limit before processing
1225
+ const rateLimiter = getMCPRateLimiter()
1226
+ const rateLimitResult = await rateLimiter.checkLimit(containerTag, name)
1227
+
1228
+ if (!rateLimitResult.allowed) {
1229
+ logger.warn('Rate limit exceeded', {
1230
+ tool: name,
1231
+ containerTag,
1232
+ limitType: rateLimitResult.limitType,
1233
+ resetIn: rateLimitResult.resetIn,
1234
+ })
1235
+ return createRateLimitErrorResponse(rateLimitResult, name)
1236
+ }
1237
+
1238
+ try {
1239
+ let result: unknown
1240
+
1241
+ switch (name) {
1242
+ case 'supermemory_add':
1243
+ result = await handleAddContent(state, args)
1244
+ break
1245
+ case 'supermemory_search':
1246
+ result = await handleSearch(state, args)
1247
+ break
1248
+ case 'supermemory_profile':
1249
+ result = await handleProfile(state, args)
1250
+ break
1251
+ case 'supermemory_list':
1252
+ result = await handleListDocuments(state, args)
1253
+ break
1254
+ case 'supermemory_delete':
1255
+ result = await handleDelete(state, args)
1256
+ break
1257
+ case 'supermemory_remember':
1258
+ result = await handleRemember(state, args)
1259
+ break
1260
+ case 'supermemory_recall':
1261
+ result = await handleRecall(state, args)
1262
+ break
1263
+ default:
1264
+ throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`)
1265
+ }
1266
+
1267
+ return buildToolResponse(name, result)
1268
+ } catch (error) {
1269
+ throw mapErrorToMcpError(error)
1270
+ }
1271
+ })
1272
+
1273
+ // Register resource handlers
1274
+ server.setRequestHandler(ListResourceTemplatesRequestSchema, async () => {
1275
+ return {
1276
+ resourceTemplates: RESOURCE_TEMPLATES.map((t) => ({
1277
+ uriTemplate: t.uriTemplate,
1278
+ name: t.name,
1279
+ description: t.description,
1280
+ mimeType: t.mimeType,
1281
+ })),
1282
+ }
1283
+ })
1284
+
1285
+ server.setRequestHandler(ListResourcesRequestSchema, async () => {
1286
+ const rows = await state.db
1287
+ .select({ id: documents.id, containerTag: documents.containerTag })
1288
+ .from(documents)
1289
+ .orderBy(desc(documents.updatedAt))
1290
+ .limit(10)
1291
+ const documentIds = rows.map((row) => row.id)
1292
+ const containerTags = Array.from(new Set(rows.map((row) => row.containerTag)))
1293
+ const resources = generateResourceList(containerTags, documentIds)
1294
+
1295
+ return {
1296
+ resources: resources.map((r) => ({
1297
+ uri: r.uri,
1298
+ name: r.name,
1299
+ description: r.description,
1300
+ mimeType: r.mimeType,
1301
+ })),
1302
+ }
1303
+ })
1304
+
1305
+ server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
1306
+ const { uri } = request.params
1307
+
1308
+ try {
1309
+ const content = await handleReadResource(state, uri)
1310
+ return {
1311
+ contents: [
1312
+ {
1313
+ uri,
1314
+ mimeType: 'application/json',
1315
+ text: content,
1316
+ },
1317
+ ],
1318
+ }
1319
+ } catch (error) {
1320
+ throw mapErrorToMcpError(error)
1321
+ }
1322
+ })
1323
+
1324
+ // Error handling
1325
+ server.onerror = (error) => {
1326
+ logger.error('MCP server error', {}, error instanceof Error ? error : undefined)
1327
+ }
1328
+
1329
+ let shutdownPromise: Promise<void> | null = null
1330
+ const shutdown = async (reason: string, exitCode: number, error?: unknown) => {
1331
+ if (shutdownPromise) {
1332
+ return shutdownPromise
1333
+ }
1334
+
1335
+ shutdownPromise = (async () => {
1336
+ if (error) {
1337
+ logger.error(
1338
+ `Shutting down after ${reason}`,
1339
+ {},
1340
+ error instanceof Error ? error : new Error(String(error))
1341
+ )
1342
+ } else {
1343
+ logger.info(`Shutting down (${reason})`)
1344
+ }
1345
+
1346
+ await Promise.allSettled([server.close(), state.searchService.close(), closePostgresDatabase()])
1347
+ process.exit(exitCode)
1348
+ })()
1349
+
1350
+ return shutdownPromise
1351
+ }
1352
+
1353
+ process.on('SIGINT', () => {
1354
+ void shutdown('SIGINT', 0)
1355
+ })
1356
+
1357
+ process.on('SIGTERM', () => {
1358
+ void shutdown('SIGTERM', 0)
1359
+ })
1360
+
1361
+ process.on('uncaughtException', (error) => {
1362
+ void shutdown('uncaughtException', 1, error)
1363
+ })
1364
+
1365
+ process.on('unhandledRejection', (reason) => {
1366
+ void shutdown('unhandledRejection', 1, reason)
1367
+ })
1368
+
1369
+ // Start server
1370
+ const transport = new StdioServerTransport()
1371
+ await server.connect(transport)
1372
+
1373
+ logger.info('Supermemory MCP server started on stdio')
1374
+ }
1375
+
1376
+ // Run main
1377
+ main().catch((error) => {
1378
+ logger.error('Fatal error', {}, error instanceof Error ? error : undefined)
1379
+ process.exit(1)
1380
+ })