@twelvehart/supermemory-runtime 1.0.0-next.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. package/.env.example +57 -0
  2. package/README.md +374 -0
  3. package/dist/index.js +189 -0
  4. package/dist/mcp/index.js +1132 -0
  5. package/docker-compose.prod.yml +91 -0
  6. package/docker-compose.yml +358 -0
  7. package/drizzle/0000_dapper_the_professor.sql +159 -0
  8. package/drizzle/0001_api_keys.sql +51 -0
  9. package/drizzle/meta/0000_snapshot.json +1532 -0
  10. package/drizzle/meta/_journal.json +13 -0
  11. package/drizzle.config.ts +20 -0
  12. package/package.json +114 -0
  13. package/scripts/add-extraction-job.ts +122 -0
  14. package/scripts/benchmark-pgvector.ts +122 -0
  15. package/scripts/bootstrap.sh +209 -0
  16. package/scripts/check-runtime-pack.ts +111 -0
  17. package/scripts/claude-mcp-config.ts +336 -0
  18. package/scripts/docker-entrypoint.sh +183 -0
  19. package/scripts/doctor.ts +377 -0
  20. package/scripts/init-db.sql +33 -0
  21. package/scripts/install.sh +1110 -0
  22. package/scripts/mcp-setup.ts +271 -0
  23. package/scripts/migrations/001_create_pgvector_extension.sql +31 -0
  24. package/scripts/migrations/002_create_memory_embeddings_table.sql +75 -0
  25. package/scripts/migrations/003_create_hnsw_index.sql +94 -0
  26. package/scripts/migrations/004_create_memory_embeddings_standalone.sql +70 -0
  27. package/scripts/migrations/005_create_chunks_table.sql +95 -0
  28. package/scripts/migrations/006_create_processing_queue.sql +45 -0
  29. package/scripts/migrations/generate_test_data.sql +42 -0
  30. package/scripts/migrations/phase1_comprehensive_test.sql +204 -0
  31. package/scripts/migrations/run_migrations.sh +286 -0
  32. package/scripts/migrations/test_hnsw_index.sql +255 -0
  33. package/scripts/pre-commit-secrets +282 -0
  34. package/scripts/run-extraction-worker.ts +46 -0
  35. package/scripts/run-phase1-tests.sh +291 -0
  36. package/scripts/setup.ts +222 -0
  37. package/scripts/smoke-install.sh +12 -0
  38. package/scripts/test-health-endpoint.sh +328 -0
  39. package/src/api/index.ts +2 -0
  40. package/src/api/middleware/auth.ts +80 -0
  41. package/src/api/middleware/csrf.ts +308 -0
  42. package/src/api/middleware/errorHandler.ts +166 -0
  43. package/src/api/middleware/rateLimit.ts +360 -0
  44. package/src/api/middleware/validation.ts +514 -0
  45. package/src/api/routes/documents.ts +286 -0
  46. package/src/api/routes/profiles.ts +237 -0
  47. package/src/api/routes/search.ts +71 -0
  48. package/src/api/stores/index.ts +58 -0
  49. package/src/config/bootstrap-env.ts +3 -0
  50. package/src/config/env.ts +71 -0
  51. package/src/config/feature-flags.ts +25 -0
  52. package/src/config/index.ts +140 -0
  53. package/src/config/secrets.config.ts +291 -0
  54. package/src/db/client.ts +92 -0
  55. package/src/db/index.ts +73 -0
  56. package/src/db/postgres.ts +72 -0
  57. package/src/db/schema/chunks.schema.ts +31 -0
  58. package/src/db/schema/containers.schema.ts +46 -0
  59. package/src/db/schema/documents.schema.ts +49 -0
  60. package/src/db/schema/embeddings.schema.ts +32 -0
  61. package/src/db/schema/index.ts +11 -0
  62. package/src/db/schema/memories.schema.ts +72 -0
  63. package/src/db/schema/profiles.schema.ts +34 -0
  64. package/src/db/schema/queue.schema.ts +59 -0
  65. package/src/db/schema/relationships.schema.ts +42 -0
  66. package/src/db/schema.ts +223 -0
  67. package/src/db/worker-connection.ts +47 -0
  68. package/src/index.ts +235 -0
  69. package/src/mcp/CLAUDE.md +1 -0
  70. package/src/mcp/index.ts +1380 -0
  71. package/src/mcp/legacyState.ts +22 -0
  72. package/src/mcp/rateLimit.ts +358 -0
  73. package/src/mcp/resources.ts +309 -0
  74. package/src/mcp/results.ts +104 -0
  75. package/src/mcp/tools.ts +401 -0
  76. package/src/queues/config.ts +119 -0
  77. package/src/queues/index.ts +289 -0
  78. package/src/sdk/client.ts +225 -0
  79. package/src/sdk/errors.ts +266 -0
  80. package/src/sdk/http.ts +560 -0
  81. package/src/sdk/index.ts +244 -0
  82. package/src/sdk/resources/base.ts +65 -0
  83. package/src/sdk/resources/connections.ts +204 -0
  84. package/src/sdk/resources/documents.ts +163 -0
  85. package/src/sdk/resources/index.ts +10 -0
  86. package/src/sdk/resources/memories.ts +150 -0
  87. package/src/sdk/resources/search.ts +60 -0
  88. package/src/sdk/resources/settings.ts +36 -0
  89. package/src/sdk/types.ts +674 -0
  90. package/src/services/chunking/index.ts +451 -0
  91. package/src/services/chunking.service.ts +650 -0
  92. package/src/services/csrf.service.ts +252 -0
  93. package/src/services/documents.repository.ts +219 -0
  94. package/src/services/documents.service.ts +191 -0
  95. package/src/services/embedding.service.ts +404 -0
  96. package/src/services/extraction.service.ts +300 -0
  97. package/src/services/extractors/code.extractor.ts +451 -0
  98. package/src/services/extractors/index.ts +9 -0
  99. package/src/services/extractors/markdown.extractor.ts +461 -0
  100. package/src/services/extractors/pdf.extractor.ts +315 -0
  101. package/src/services/extractors/text.extractor.ts +118 -0
  102. package/src/services/extractors/url.extractor.ts +243 -0
  103. package/src/services/index.ts +235 -0
  104. package/src/services/ingestion.service.ts +177 -0
  105. package/src/services/llm/anthropic.ts +400 -0
  106. package/src/services/llm/base.ts +460 -0
  107. package/src/services/llm/contradiction-detector.service.ts +526 -0
  108. package/src/services/llm/heuristics.ts +148 -0
  109. package/src/services/llm/index.ts +309 -0
  110. package/src/services/llm/memory-classifier.service.ts +383 -0
  111. package/src/services/llm/memory-extension-detector.service.ts +523 -0
  112. package/src/services/llm/mock.ts +470 -0
  113. package/src/services/llm/openai.ts +398 -0
  114. package/src/services/llm/prompts.ts +438 -0
  115. package/src/services/llm/types.ts +373 -0
  116. package/src/services/memory.repository.ts +1769 -0
  117. package/src/services/memory.service.ts +1338 -0
  118. package/src/services/memory.types.ts +234 -0
  119. package/src/services/persistence/index.ts +295 -0
  120. package/src/services/pipeline.service.ts +509 -0
  121. package/src/services/profile.repository.ts +436 -0
  122. package/src/services/profile.service.ts +560 -0
  123. package/src/services/profile.types.ts +270 -0
  124. package/src/services/relationships/detector.ts +1128 -0
  125. package/src/services/relationships/index.ts +268 -0
  126. package/src/services/relationships/memory-integration.ts +459 -0
  127. package/src/services/relationships/strategies.ts +132 -0
  128. package/src/services/relationships/types.ts +370 -0
  129. package/src/services/search.service.ts +761 -0
  130. package/src/services/search.types.ts +220 -0
  131. package/src/services/secrets.service.ts +384 -0
  132. package/src/services/vectorstore/base.ts +327 -0
  133. package/src/services/vectorstore/index.ts +444 -0
  134. package/src/services/vectorstore/memory.ts +286 -0
  135. package/src/services/vectorstore/migration.ts +295 -0
  136. package/src/services/vectorstore/mock.ts +403 -0
  137. package/src/services/vectorstore/pgvector.ts +695 -0
  138. package/src/services/vectorstore/types.ts +247 -0
  139. package/src/startup.ts +389 -0
  140. package/src/types/api.types.ts +193 -0
  141. package/src/types/document.types.ts +103 -0
  142. package/src/types/index.ts +241 -0
  143. package/src/types/profile.base.ts +133 -0
  144. package/src/utils/errors.ts +447 -0
  145. package/src/utils/id.ts +15 -0
  146. package/src/utils/index.ts +101 -0
  147. package/src/utils/logger.ts +313 -0
  148. package/src/utils/sanitization.ts +501 -0
  149. package/src/utils/secret-validation.ts +273 -0
  150. package/src/utils/synonyms.ts +188 -0
  151. package/src/utils/validation.ts +581 -0
  152. package/src/workers/chunking.worker.ts +242 -0
  153. package/src/workers/embedding.worker.ts +358 -0
  154. package/src/workers/extraction.worker.ts +346 -0
  155. package/src/workers/indexing.worker.ts +505 -0
  156. package/tsconfig.json +38 -0
@@ -0,0 +1,13 @@
1
+ {
2
+ "version": "7",
3
+ "dialect": "postgresql",
4
+ "entries": [
5
+ {
6
+ "idx": 0,
7
+ "version": "7",
8
+ "when": 1770055278660,
9
+ "tag": "0000_dapper_the_professor",
10
+ "breakpoints": true
11
+ }
12
+ ]
13
+ }
@@ -0,0 +1,20 @@
1
+ import { defineConfig } from 'drizzle-kit';
2
+
3
+ // Determine database type from DATABASE_URL
4
+ const databaseUrl = process.env.DATABASE_URL ?? './data/supermemory.db';
5
+ const isPostgres = databaseUrl.startsWith('postgresql://') || databaseUrl.startsWith('postgres://');
6
+
7
+ export default defineConfig({
8
+ schema: './src/db/schema/index.ts',
9
+ out: './drizzle',
10
+ dialect: isPostgres ? 'postgresql' : 'sqlite',
11
+ dbCredentials: isPostgres
12
+ ? {
13
+ url: databaseUrl,
14
+ }
15
+ : {
16
+ url: databaseUrl,
17
+ },
18
+ verbose: true,
19
+ strict: true,
20
+ });
package/package.json ADDED
@@ -0,0 +1,114 @@
1
+ {
2
+ "name": "@twelvehart/supermemory-runtime",
3
+ "version": "1.0.0-next.0",
4
+ "description": "A personal AI memory assistant - supermemory.ai clone",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
7
+ "type": "module",
8
+ "files": [
9
+ "README.md",
10
+ ".env.example",
11
+ "package-lock.json",
12
+ "src",
13
+ "scripts",
14
+ "drizzle",
15
+ "docker-compose.yml",
16
+ "docker-compose.prod.yml",
17
+ "drizzle.config.ts",
18
+ "tsconfig.json"
19
+ ],
20
+ "bin": {
21
+ "supermemory-mcp": "dist/mcp/index.js"
22
+ },
23
+ "scripts": {
24
+ "dev": "tsx watch src/index.ts",
25
+ "build": "tsc",
26
+ "build:install": "npm --prefix packages/install run build",
27
+ "start": "node dist/index.js",
28
+ "mcp": "node dist/mcp/index.js",
29
+ "mcp:dev": "tsx src/mcp/index.ts",
30
+ "mcp:setup": "tsx scripts/mcp-setup.ts",
31
+ "setup": "tsx scripts/setup.ts",
32
+ "setup:turnkey": "bash scripts/install.sh",
33
+ "stack:up": "docker compose -f docker-compose.yml -f docker-compose.prod.yml --profile production up -d api postgres redis",
34
+ "stack:down": "docker compose -f docker-compose.yml -f docker-compose.prod.yml down",
35
+ "stack:logs": "docker compose -f docker-compose.yml -f docker-compose.prod.yml logs -f api",
36
+ "doctor": "tsx scripts/doctor.ts",
37
+ "test": "vitest",
38
+ "test:run": "vitest run",
39
+ "test:coverage": "vitest run --coverage",
40
+ "test:ui": "vitest --ui",
41
+ "test:watch": "vitest --watch",
42
+ "db:generate": "drizzle-kit generate",
43
+ "db:migrate": "drizzle-kit migrate",
44
+ "db:studio": "drizzle-kit studio",
45
+ "db:push": "drizzle-kit push",
46
+ "db:test:phase1": "bash scripts/run-phase1-tests.sh",
47
+ "db:test:phase1:keep": "bash scripts/run-phase1-tests.sh --keep-db",
48
+ "db:test:phase1:verbose": "bash scripts/run-phase1-tests.sh --verbose",
49
+ "lint": "eslint src tests --ext .ts",
50
+ "lint:fix": "eslint src tests --ext .ts --fix",
51
+ "format": "prettier --write \"src/**/*.ts\" \"tests/**/*.ts\"",
52
+ "format:check": "prettier --check \"src/**/*.ts\" \"tests/**/*.ts\"",
53
+ "typecheck": "tsc --noEmit",
54
+ "typecheck:install": "node node_modules/typescript/bin/tsc -p packages/install/tsconfig.json --noEmit",
55
+ "validate": "npm run typecheck && npm run lint && npm run format:check && npm run test:run",
56
+ "pack:check:runtime": "tsx scripts/check-runtime-pack.ts",
57
+ "test:install": "vitest run tests/install/*.test.ts",
58
+ "clean": "rm -rf dist coverage",
59
+ "prepublishOnly": "npm run build"
60
+ },
61
+ "keywords": [
62
+ "ai",
63
+ "memory",
64
+ "rag",
65
+ "embeddings",
66
+ "vector-search"
67
+ ],
68
+ "author": "",
69
+ "license": "MIT",
70
+ "dependencies": {
71
+ "@hono/node-server": "^1.13.7",
72
+ "@modelcontextprotocol/sdk": "^1.25.3",
73
+ "bcrypt": "^5.1.1",
74
+ "better-sqlite3": "^11.6.0",
75
+ "bullmq": "^5.67.2",
76
+ "dotenv": "^16.4.7",
77
+ "drizzle-orm": "^0.45.1",
78
+ "hono": "^4.6.14",
79
+ "ioredis": "^5.9.2",
80
+ "isomorphic-dompurify": "^2.35.0",
81
+ "js-yaml": "^4.1.1",
82
+ "openai": "^4.77.0",
83
+ "pdf-parse": "^1.1.1",
84
+ "pg": "^8.18.0",
85
+ "uuid": "^10.0.0",
86
+ "zod": "^3.24.1"
87
+ },
88
+ "devDependencies": {
89
+ "@types/bcrypt": "^5.0.2",
90
+ "@types/better-sqlite3": "^7.6.12",
91
+ "@types/dompurify": "^3.0.5",
92
+ "@types/ioredis": "^4.28.10",
93
+ "@types/js-yaml": "^4.0.9",
94
+ "@types/node": "^22.10.5",
95
+ "@types/pdf-parse": "^1.1.4",
96
+ "@types/pg": "^8.16.0",
97
+ "@types/uuid": "^10.0.0",
98
+ "@typescript-eslint/eslint-plugin": "^8.54.0",
99
+ "@typescript-eslint/parser": "^8.54.0",
100
+ "@vitest/coverage-v8": "^2.1.8",
101
+ "@vitest/ui": "^2.1.8",
102
+ "drizzle-kit": "^0.30.1",
103
+ "eslint": "^9.39.2",
104
+ "eslint-config-prettier": "^10.1.8",
105
+ "globals": "^17.3.0",
106
+ "prettier": "^3.8.1",
107
+ "tsx": "^4.19.2",
108
+ "typescript": "^5.7.2",
109
+ "vitest": "^2.1.8"
110
+ },
111
+ "engines": {
112
+ "node": ">=20.0.0"
113
+ }
114
+ }
@@ -0,0 +1,122 @@
1
+ #!/usr/bin/env tsx
2
+ /**
3
+ * Add Extraction Job
4
+ *
5
+ * Example script to add a job to the extraction queue.
6
+ * Run with: npx tsx scripts/add-extraction-job.ts
7
+ */
8
+
9
+ import { createExtractionQueue } from '../src/workers/extraction.worker.js';
10
+ import { getDatabase } from '../src/db/index.js';
11
+ import { documents, processingQueue } from '../src/db/schema/index.js';
12
+ import IORedis from 'ioredis';
13
+ import dotenv from 'dotenv';
14
+ import { v4 as uuidv4 } from 'uuid';
15
+
16
+ // Load environment variables
17
+ dotenv.config();
18
+
19
+ const DATABASE_URL = process.env.DATABASE_URL || './data/supermemory.db';
20
+ const db = getDatabase(DATABASE_URL);
21
+
22
+ // Redis connection
23
+ const connection = new IORedis({
24
+ host: process.env.REDIS_HOST || 'localhost',
25
+ port: parseInt(process.env.REDIS_PORT || '6379', 10),
26
+ maxRetriesPerRequest: null,
27
+ });
28
+
29
+ async function main() {
30
+ console.log('[AddJob] Creating test document...');
31
+
32
+ // Create test document
33
+ const documentId = uuidv4();
34
+ const containerTag = 'test-user';
35
+
36
+ await db.insert(documents).values({
37
+ id: documentId,
38
+ content: `# Test Document
39
+
40
+ This is a test document for the extraction worker.
41
+
42
+ ## Features
43
+
44
+ - Text extraction
45
+ - URL extraction
46
+ - PDF extraction
47
+ - Markdown extraction
48
+ - Code extraction
49
+
50
+ ## Example Code
51
+
52
+ \`\`\`javascript
53
+ function hello() {
54
+ console.log('Hello, World!');
55
+ }
56
+ \`\`\`
57
+
58
+ Visit [our website](https://example.com) for more information.
59
+ `,
60
+ contentType: 'text/plain',
61
+ status: 'pending',
62
+ containerTag,
63
+ metadata: {
64
+ source: 'test-script',
65
+ createdBy: 'add-extraction-job.ts',
66
+ },
67
+ });
68
+
69
+ console.log(`[AddJob] Document created: ${documentId}`);
70
+
71
+ // Create processing queue entry
72
+ await db.insert(processingQueue).values({
73
+ documentId,
74
+ stage: 'extraction',
75
+ status: 'pending',
76
+ priority: 5,
77
+ });
78
+
79
+ console.log('[AddJob] Processing queue entry created');
80
+
81
+ // Add job to extraction queue
82
+ const queue = createExtractionQueue(connection);
83
+
84
+ const job = await queue.add(
85
+ 'extract',
86
+ {
87
+ documentId,
88
+ sourceType: 'text',
89
+ containerTag,
90
+ },
91
+ {
92
+ priority: 5,
93
+ jobId: `extraction-${documentId}`,
94
+ }
95
+ );
96
+
97
+ console.log(`[AddJob] Job added to queue: ${job.id}`);
98
+ console.log('[AddJob] Waiting for job to complete...');
99
+
100
+ // Wait for completion (with timeout)
101
+ try {
102
+ const result = await job.waitUntilFinished(queue.events, 60000); // 60 second timeout
103
+ console.log('[AddJob] Job completed successfully!');
104
+ console.log(`[AddJob] Extracted ${result.extractedContent.length} characters`);
105
+ console.log(`[AddJob] Content type: ${result.contentType}`);
106
+ console.log(`[AddJob] Processing time: ${result.processingTimeMs}ms`);
107
+ } catch (error) {
108
+ console.error('[AddJob] Job failed or timed out:', error);
109
+ }
110
+
111
+ // Cleanup
112
+ await queue.close();
113
+ await connection.quit();
114
+
115
+ console.log('[AddJob] Done!');
116
+ process.exit(0);
117
+ }
118
+
119
+ main().catch((error) => {
120
+ console.error('[AddJob] Error:', error);
121
+ process.exit(1);
122
+ });
@@ -0,0 +1,122 @@
1
+ #!/usr/bin/env tsx
2
+ /**
3
+ * PgVectorStore Performance Benchmark
4
+ *
5
+ * Measures insert and search performance with various dataset sizes
6
+ */
7
+
8
+ import { createPgVectorStore } from '../src/services/vectorstore/pgvector.js';
9
+ import { VectorEntry } from '../src/services/vectorstore/types.js';
10
+
11
+ const POSTGRES_URL = process.env.TEST_POSTGRES_URL ?? 'postgresql://supermemory:supermemory_secret@localhost:5432/supermemory';
12
+ const DIMENSIONS = 1536;
13
+
14
+ interface BenchmarkResult {
15
+ operation: string;
16
+ itemCount: number;
17
+ totalTimeMs: number;
18
+ avgTimePerItemMs: number;
19
+ opsPerSecond: number;
20
+ }
21
+
22
+ function formatResults(results: BenchmarkResult[]) {
23
+ console.log('\n╔════════════════════════════════════════════════════════════════════╗');
24
+ console.log('║ PgVectorStore Performance Benchmark Results ║');
25
+ console.log('╚════════════════════════════════════════════════════════════════════╝\n');
26
+
27
+ for (const result of results) {
28
+ console.log(`Operation: ${result.operation}`);
29
+ console.log(` Items: ${result.itemCount}`);
30
+ console.log(` Total Time: ${result.totalTimeMs.toFixed(2)}ms`);
31
+ console.log(` Avg Time/Item: ${result.avgTimePerItemMs.toFixed(4)}ms`);
32
+ console.log(` Ops/Second: ${result.opsPerSecond.toFixed(2)}`);
33
+ console.log('');
34
+ }
35
+ }
36
+
37
+ async function benchmarkInsert(store: any, count: number): Promise<BenchmarkResult> {
38
+ const entries: VectorEntry[] = Array.from({ length: count }, (_, i) => ({
39
+ id: `bench-insert-${i}`,
40
+ embedding: new Array(DIMENSIONS).fill(0).map(() => Math.random()),
41
+ metadata: { index: i, benchmark: 'insert' },
42
+ }));
43
+
44
+ const start = performance.now();
45
+ await store.addBatch(entries);
46
+ const totalTime = performance.now() - start;
47
+
48
+ return {
49
+ operation: 'Batch Insert',
50
+ itemCount: count,
51
+ totalTimeMs: totalTime,
52
+ avgTimePerItemMs: totalTime / count,
53
+ opsPerSecond: (count / totalTime) * 1000,
54
+ };
55
+ }
56
+
57
+ async function benchmarkSearch(store: any, vectorCount: number, searchCount: number): Promise<BenchmarkResult> {
58
+ const queryVector = new Array(DIMENSIONS).fill(0.5);
59
+ const start = performance.now();
60
+
61
+ for (let i = 0; i < searchCount; i++) {
62
+ await store.search(queryVector, { limit: 10 });
63
+ }
64
+
65
+ const totalTime = performance.now() - start;
66
+
67
+ return {
68
+ operation: `Search (${vectorCount} vectors)`,
69
+ itemCount: searchCount,
70
+ totalTimeMs: totalTime,
71
+ avgTimePerItemMs: totalTime / searchCount,
72
+ opsPerSecond: (searchCount / totalTime) * 1000,
73
+ };
74
+ }
75
+
76
+ async function main() {
77
+ console.log('Starting PgVectorStore performance benchmarks...\n');
78
+
79
+ const store = createPgVectorStore(POSTGRES_URL, DIMENSIONS, {
80
+ tableName: 'benchmark_vectors',
81
+ hnswConfig: {
82
+ M: 16,
83
+ efConstruction: 64,
84
+ },
85
+ });
86
+
87
+ await store.initialize();
88
+ await store.clear();
89
+
90
+ const results: BenchmarkResult[] = [];
91
+
92
+ // Benchmark 1: Insert 1,000 vectors
93
+ console.log('Running benchmark: Insert 1,000 vectors...');
94
+ results.push(await benchmarkInsert(store, 1000));
95
+
96
+ // Benchmark 2: Search with 1,000 vectors
97
+ console.log('Running benchmark: Search (1,000 vectors)...');
98
+ results.push(await benchmarkSearch(store, 1000, 100));
99
+
100
+ // Benchmark 3: Insert 10,000 vectors
101
+ console.log('Running benchmark: Insert 10,000 vectors...');
102
+ await store.clear();
103
+ results.push(await benchmarkInsert(store, 10000));
104
+
105
+ // Benchmark 4: Search with 10,000 vectors
106
+ console.log('Running benchmark: Search (10,000 vectors)...');
107
+ results.push(await benchmarkSearch(store, 10000, 100));
108
+
109
+ // Clean up
110
+ await store.clear();
111
+ await store.close();
112
+
113
+ formatResults(results);
114
+
115
+ // Check performance targets
116
+ console.log('Performance Target Validation:');
117
+ console.log(' Insert < 10ms per item:', results[0]!.avgTimePerItemMs < 10 ? '✓ PASS' : '✗ FAIL');
118
+ console.log(' Search < 100ms (10K vectors):', results[3]!.avgTimePerItemMs < 100 ? '✓ PASS' : '✗ FAIL');
119
+ console.log(' Batch insert < 500ms (100 items):', results[0]!.totalTimeMs < 500 ? '✓ PASS' : '✗ FAIL');
120
+ }
121
+
122
+ main().catch(console.error);
@@ -0,0 +1,209 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ log() {
5
+ local level="$1"
6
+ shift
7
+ printf '[%s] %s\n' "$level" "$*"
8
+ }
9
+
10
+ fail() {
11
+ log "FAIL" "$*"
12
+ exit 1
13
+ }
14
+
15
+ command_exists() {
16
+ command -v "$1" >/dev/null 2>&1
17
+ }
18
+
19
+ require_command() {
20
+ local name="$1"
21
+ if ! command_exists "$name"; then
22
+ fail "Missing required command: $name"
23
+ fi
24
+ }
25
+
26
+ usage() {
27
+ cat <<'USAGE'
28
+ Usage: bootstrap.sh [options] [-- <install.sh args>]
29
+
30
+ Options:
31
+ --repo-url URL Git repository URL
32
+ --ref REF Git branch/tag/sha to checkout
33
+ --dir DIR Install directory (default: supermemory-clone)
34
+ --update-if-exists Update an existing compatible checkout in place
35
+ -h, --help Show this help
36
+
37
+ Examples:
38
+ bootstrap.sh
39
+ bootstrap.sh --repo-url https://github.com/acme/supermemory-clone.git
40
+ bootstrap.sh --dir ./supermemory -- --non-interactive --skip-api-keys
41
+ bootstrap.sh --dir ./supermemory --update-if-exists -- --non-interactive --skip-mcp
42
+ USAGE
43
+ }
44
+
45
+ repo_url="${REPO_URL:-https://github.com/ASRagab/supermemory-clone.git}"
46
+ install_ref="${INSTALL_REF:-}"
47
+ install_dir="${INSTALL_DIR:-supermemory-clone}"
48
+ update_if_exists=0
49
+ temp_clone_dir=""
50
+ declare -a install_args=()
51
+
52
+ cleanup() {
53
+ if [[ -n "$temp_clone_dir" && -d "$temp_clone_dir" ]]; then
54
+ rm -rf "$temp_clone_dir"
55
+ fi
56
+ }
57
+
58
+ trap cleanup EXIT
59
+
60
+ run_installer() {
61
+ local target_dir="$1"
62
+ shift || true
63
+ (
64
+ cd "$target_dir"
65
+ bash ./scripts/install.sh "$@"
66
+ )
67
+ }
68
+
69
+ checkout_requested_ref() {
70
+ local target_dir="$1"
71
+
72
+ if [[ -z "$install_ref" ]]; then
73
+ return 0
74
+ fi
75
+
76
+ log "INFO" "Fetching requested ref: $install_ref"
77
+ git -C "$target_dir" fetch --depth 1 origin "$install_ref"
78
+ log "INFO" "Checking out ref: $install_ref"
79
+ git -C "$target_dir" checkout FETCH_HEAD
80
+ }
81
+
82
+ clone_into_temp_dir() {
83
+ local parent_dir="$1"
84
+ mkdir -p "$parent_dir"
85
+ temp_clone_dir="$(mktemp -d "${parent_dir%/}/.supermemory-bootstrap.XXXXXX")"
86
+
87
+ log "INFO" "Cloning repository: $repo_url"
88
+ git clone --depth 1 "$repo_url" "$temp_clone_dir"
89
+ checkout_requested_ref "$temp_clone_dir"
90
+ }
91
+
92
+ ensure_compatible_checkout() {
93
+ local target_dir="$1"
94
+
95
+ if ! git -C "$target_dir" rev-parse --is-inside-work-tree >/dev/null 2>&1; then
96
+ fail "--update-if-exists requires an existing git checkout: $target_dir"
97
+ fi
98
+
99
+ if [[ ! -f "$target_dir/scripts/install.sh" ]]; then
100
+ fail "--update-if-exists requires a compatible checkout with scripts/install.sh: $target_dir"
101
+ fi
102
+
103
+ if ! git -C "$target_dir" diff --quiet --ignore-submodules -- || ! git -C "$target_dir" diff --cached --quiet --ignore-submodules --; then
104
+ fail "--update-if-exists cannot run on a checkout with uncommitted tracked changes: $target_dir"
105
+ fi
106
+ }
107
+
108
+ update_existing_checkout() {
109
+ local target_dir="$1"
110
+ local current_branch
111
+ local -a effective_args=()
112
+
113
+ ensure_compatible_checkout "$target_dir"
114
+
115
+ if [[ -n "$install_ref" ]]; then
116
+ checkout_requested_ref "$target_dir"
117
+ else
118
+ current_branch="$(git -C "$target_dir" symbolic-ref --quiet --short HEAD || true)"
119
+ if [[ -z "$current_branch" ]]; then
120
+ fail "--update-if-exists on a detached HEAD requires --ref"
121
+ fi
122
+
123
+ log "INFO" "Fast-forwarding existing checkout"
124
+ git -C "$target_dir" pull --ff-only origin "$current_branch"
125
+ fi
126
+
127
+ if [[ "${#install_args[@]}" -eq 0 ]]; then
128
+ effective_args=(update)
129
+ else
130
+ case "${install_args[0]}" in
131
+ install|update|uninstall)
132
+ effective_args=("${install_args[@]}")
133
+ ;;
134
+ agent|api|full)
135
+ effective_args=(update --mode "${install_args[0]}" "${install_args[@]:1}")
136
+ ;;
137
+ *)
138
+ effective_args=(update "${install_args[@]}")
139
+ ;;
140
+ esac
141
+ fi
142
+
143
+ log "INFO" "Running installer against existing checkout"
144
+ run_installer "$target_dir" "${effective_args[@]}"
145
+ }
146
+
147
+ while [[ $# -gt 0 ]]; do
148
+ case "$1" in
149
+ --repo-url)
150
+ [[ $# -lt 2 ]] && fail "--repo-url requires a value"
151
+ repo_url="$2"
152
+ shift 2
153
+ ;;
154
+ --ref)
155
+ [[ $# -lt 2 ]] && fail "--ref requires a value"
156
+ install_ref="$2"
157
+ shift 2
158
+ ;;
159
+ --dir)
160
+ [[ $# -lt 2 ]] && fail "--dir requires a value"
161
+ install_dir="$2"
162
+ shift 2
163
+ ;;
164
+ --update-if-exists)
165
+ update_if_exists=1
166
+ shift
167
+ ;;
168
+ --)
169
+ shift
170
+ install_args=("$@")
171
+ break
172
+ ;;
173
+ -h|--help)
174
+ usage
175
+ exit 0
176
+ ;;
177
+ *)
178
+ fail "Unknown option: $1"
179
+ ;;
180
+ esac
181
+ done
182
+
183
+ require_command git
184
+ require_command bash
185
+
186
+ if [[ -e "$install_dir" ]]; then
187
+ if [[ "$update_if_exists" -ne 1 ]]; then
188
+ fail "Install directory already exists: $install_dir (use --update-if-exists to reuse a compatible checkout)"
189
+ fi
190
+ else
191
+ if [[ "$update_if_exists" -eq 1 ]]; then
192
+ fail "--update-if-exists requires an existing install directory: $install_dir"
193
+ fi
194
+ fi
195
+
196
+ if [[ "$update_if_exists" -eq 1 ]]; then
197
+ update_existing_checkout "$install_dir"
198
+ else
199
+ clone_into_temp_dir "$(dirname "$install_dir")"
200
+
201
+ log "INFO" "Moving installed checkout into place: $install_dir"
202
+ mv "$temp_clone_dir" "$install_dir"
203
+ temp_clone_dir=""
204
+
205
+ log "INFO" "Running turnkey installer"
206
+ run_installer "$install_dir" "${install_args[@]}"
207
+ fi
208
+
209
+ log "OK" "Bootstrap complete"