@twelvehart/supermemory-runtime 1.0.0-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +57 -0
- package/README.md +374 -0
- package/dist/index.js +189 -0
- package/dist/mcp/index.js +1132 -0
- package/docker-compose.prod.yml +91 -0
- package/docker-compose.yml +358 -0
- package/drizzle/0000_dapper_the_professor.sql +159 -0
- package/drizzle/0001_api_keys.sql +51 -0
- package/drizzle/meta/0000_snapshot.json +1532 -0
- package/drizzle/meta/_journal.json +13 -0
- package/drizzle.config.ts +20 -0
- package/package.json +114 -0
- package/scripts/add-extraction-job.ts +122 -0
- package/scripts/benchmark-pgvector.ts +122 -0
- package/scripts/bootstrap.sh +209 -0
- package/scripts/check-runtime-pack.ts +111 -0
- package/scripts/claude-mcp-config.ts +336 -0
- package/scripts/docker-entrypoint.sh +183 -0
- package/scripts/doctor.ts +377 -0
- package/scripts/init-db.sql +33 -0
- package/scripts/install.sh +1110 -0
- package/scripts/mcp-setup.ts +271 -0
- package/scripts/migrations/001_create_pgvector_extension.sql +31 -0
- package/scripts/migrations/002_create_memory_embeddings_table.sql +75 -0
- package/scripts/migrations/003_create_hnsw_index.sql +94 -0
- package/scripts/migrations/004_create_memory_embeddings_standalone.sql +70 -0
- package/scripts/migrations/005_create_chunks_table.sql +95 -0
- package/scripts/migrations/006_create_processing_queue.sql +45 -0
- package/scripts/migrations/generate_test_data.sql +42 -0
- package/scripts/migrations/phase1_comprehensive_test.sql +204 -0
- package/scripts/migrations/run_migrations.sh +286 -0
- package/scripts/migrations/test_hnsw_index.sql +255 -0
- package/scripts/pre-commit-secrets +282 -0
- package/scripts/run-extraction-worker.ts +46 -0
- package/scripts/run-phase1-tests.sh +291 -0
- package/scripts/setup.ts +222 -0
- package/scripts/smoke-install.sh +12 -0
- package/scripts/test-health-endpoint.sh +328 -0
- package/src/api/index.ts +2 -0
- package/src/api/middleware/auth.ts +80 -0
- package/src/api/middleware/csrf.ts +308 -0
- package/src/api/middleware/errorHandler.ts +166 -0
- package/src/api/middleware/rateLimit.ts +360 -0
- package/src/api/middleware/validation.ts +514 -0
- package/src/api/routes/documents.ts +286 -0
- package/src/api/routes/profiles.ts +237 -0
- package/src/api/routes/search.ts +71 -0
- package/src/api/stores/index.ts +58 -0
- package/src/config/bootstrap-env.ts +3 -0
- package/src/config/env.ts +71 -0
- package/src/config/feature-flags.ts +25 -0
- package/src/config/index.ts +140 -0
- package/src/config/secrets.config.ts +291 -0
- package/src/db/client.ts +92 -0
- package/src/db/index.ts +73 -0
- package/src/db/postgres.ts +72 -0
- package/src/db/schema/chunks.schema.ts +31 -0
- package/src/db/schema/containers.schema.ts +46 -0
- package/src/db/schema/documents.schema.ts +49 -0
- package/src/db/schema/embeddings.schema.ts +32 -0
- package/src/db/schema/index.ts +11 -0
- package/src/db/schema/memories.schema.ts +72 -0
- package/src/db/schema/profiles.schema.ts +34 -0
- package/src/db/schema/queue.schema.ts +59 -0
- package/src/db/schema/relationships.schema.ts +42 -0
- package/src/db/schema.ts +223 -0
- package/src/db/worker-connection.ts +47 -0
- package/src/index.ts +235 -0
- package/src/mcp/CLAUDE.md +1 -0
- package/src/mcp/index.ts +1380 -0
- package/src/mcp/legacyState.ts +22 -0
- package/src/mcp/rateLimit.ts +358 -0
- package/src/mcp/resources.ts +309 -0
- package/src/mcp/results.ts +104 -0
- package/src/mcp/tools.ts +401 -0
- package/src/queues/config.ts +119 -0
- package/src/queues/index.ts +289 -0
- package/src/sdk/client.ts +225 -0
- package/src/sdk/errors.ts +266 -0
- package/src/sdk/http.ts +560 -0
- package/src/sdk/index.ts +244 -0
- package/src/sdk/resources/base.ts +65 -0
- package/src/sdk/resources/connections.ts +204 -0
- package/src/sdk/resources/documents.ts +163 -0
- package/src/sdk/resources/index.ts +10 -0
- package/src/sdk/resources/memories.ts +150 -0
- package/src/sdk/resources/search.ts +60 -0
- package/src/sdk/resources/settings.ts +36 -0
- package/src/sdk/types.ts +674 -0
- package/src/services/chunking/index.ts +451 -0
- package/src/services/chunking.service.ts +650 -0
- package/src/services/csrf.service.ts +252 -0
- package/src/services/documents.repository.ts +219 -0
- package/src/services/documents.service.ts +191 -0
- package/src/services/embedding.service.ts +404 -0
- package/src/services/extraction.service.ts +300 -0
- package/src/services/extractors/code.extractor.ts +451 -0
- package/src/services/extractors/index.ts +9 -0
- package/src/services/extractors/markdown.extractor.ts +461 -0
- package/src/services/extractors/pdf.extractor.ts +315 -0
- package/src/services/extractors/text.extractor.ts +118 -0
- package/src/services/extractors/url.extractor.ts +243 -0
- package/src/services/index.ts +235 -0
- package/src/services/ingestion.service.ts +177 -0
- package/src/services/llm/anthropic.ts +400 -0
- package/src/services/llm/base.ts +460 -0
- package/src/services/llm/contradiction-detector.service.ts +526 -0
- package/src/services/llm/heuristics.ts +148 -0
- package/src/services/llm/index.ts +309 -0
- package/src/services/llm/memory-classifier.service.ts +383 -0
- package/src/services/llm/memory-extension-detector.service.ts +523 -0
- package/src/services/llm/mock.ts +470 -0
- package/src/services/llm/openai.ts +398 -0
- package/src/services/llm/prompts.ts +438 -0
- package/src/services/llm/types.ts +373 -0
- package/src/services/memory.repository.ts +1769 -0
- package/src/services/memory.service.ts +1338 -0
- package/src/services/memory.types.ts +234 -0
- package/src/services/persistence/index.ts +295 -0
- package/src/services/pipeline.service.ts +509 -0
- package/src/services/profile.repository.ts +436 -0
- package/src/services/profile.service.ts +560 -0
- package/src/services/profile.types.ts +270 -0
- package/src/services/relationships/detector.ts +1128 -0
- package/src/services/relationships/index.ts +268 -0
- package/src/services/relationships/memory-integration.ts +459 -0
- package/src/services/relationships/strategies.ts +132 -0
- package/src/services/relationships/types.ts +370 -0
- package/src/services/search.service.ts +761 -0
- package/src/services/search.types.ts +220 -0
- package/src/services/secrets.service.ts +384 -0
- package/src/services/vectorstore/base.ts +327 -0
- package/src/services/vectorstore/index.ts +444 -0
- package/src/services/vectorstore/memory.ts +286 -0
- package/src/services/vectorstore/migration.ts +295 -0
- package/src/services/vectorstore/mock.ts +403 -0
- package/src/services/vectorstore/pgvector.ts +695 -0
- package/src/services/vectorstore/types.ts +247 -0
- package/src/startup.ts +389 -0
- package/src/types/api.types.ts +193 -0
- package/src/types/document.types.ts +103 -0
- package/src/types/index.ts +241 -0
- package/src/types/profile.base.ts +133 -0
- package/src/utils/errors.ts +447 -0
- package/src/utils/id.ts +15 -0
- package/src/utils/index.ts +101 -0
- package/src/utils/logger.ts +313 -0
- package/src/utils/sanitization.ts +501 -0
- package/src/utils/secret-validation.ts +273 -0
- package/src/utils/synonyms.ts +188 -0
- package/src/utils/validation.ts +581 -0
- package/src/workers/chunking.worker.ts +242 -0
- package/src/workers/embedding.worker.ts +358 -0
- package/src/workers/extraction.worker.ts +346 -0
- package/src/workers/indexing.worker.ts +505 -0
- package/tsconfig.json +38 -0
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
#!/usr/bin/env tsx
|
|
2
|
+
import { execSync } from 'node:child_process';
|
|
3
|
+
import { existsSync, readFileSync } from 'node:fs';
|
|
4
|
+
import { createInterface } from 'node:readline';
|
|
5
|
+
import { resolve } from 'node:path';
|
|
6
|
+
import pkg from 'pg';
|
|
7
|
+
import { loadEnvFile } from '../src/config/env.js';
|
|
8
|
+
import {
|
|
9
|
+
checkClaudeMcpRegistration,
|
|
10
|
+
type ClaudeMcpScope,
|
|
11
|
+
} from './claude-mcp-config.js';
|
|
12
|
+
|
|
13
|
+
const { Client } = pkg;
|
|
14
|
+
|
|
15
|
+
function parseEnv(raw: string): Record<string, string> {
|
|
16
|
+
const env: Record<string, string> = {};
|
|
17
|
+
for (const line of raw.split('\n')) {
|
|
18
|
+
const trimmed = line.trim();
|
|
19
|
+
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
20
|
+
const idx = trimmed.indexOf('=');
|
|
21
|
+
if (idx <= 0) continue;
|
|
22
|
+
const key = trimmed.slice(0, idx).trim();
|
|
23
|
+
const value = trimmed.slice(idx + 1).trim();
|
|
24
|
+
env[key] = value;
|
|
25
|
+
}
|
|
26
|
+
return env;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function ask(question: string): Promise<string> {
|
|
30
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
31
|
+
return new Promise((resolve) => {
|
|
32
|
+
rl.question(question, (answer) => {
|
|
33
|
+
rl.close();
|
|
34
|
+
resolve(answer.trim());
|
|
35
|
+
});
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function validateScope(scope: string): ClaudeMcpScope {
|
|
40
|
+
if (scope === 'user' || scope === 'project' || scope === 'local') {
|
|
41
|
+
return scope;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
throw new Error(`Invalid scope: ${scope} (expected: user, project, or local)`);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function formatRegistrationCommand(scope: ClaudeMcpScope, entryPoint: string): string {
|
|
48
|
+
return `claude mcp add supermemory --scope ${scope} -- node ${JSON.stringify(entryPoint)}`;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function formatRemovalCommand(scope: ClaudeMcpScope): string {
|
|
52
|
+
return `claude mcp remove --scope ${scope} supermemory`;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function commandExists(name: string): boolean {
|
|
56
|
+
try {
|
|
57
|
+
execSync(`command -v ${name}`, { stdio: 'ignore', shell: '/bin/zsh' });
|
|
58
|
+
return true;
|
|
59
|
+
} catch {
|
|
60
|
+
return false;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function askScope(): Promise<ClaudeMcpScope> {
|
|
65
|
+
return ask('\nRegister for "user", "project", or "local" scope? [user] ').then((answer) => {
|
|
66
|
+
const normalized = answer.toLowerCase();
|
|
67
|
+
if (!normalized) return 'user';
|
|
68
|
+
return validateScope(normalized);
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function parseArgs(): {
|
|
73
|
+
envFile?: string;
|
|
74
|
+
nonInteractive: boolean;
|
|
75
|
+
registerMcp: boolean;
|
|
76
|
+
scope?: ClaudeMcpScope;
|
|
77
|
+
skipMcp: boolean;
|
|
78
|
+
} {
|
|
79
|
+
const args = process.argv.slice(2);
|
|
80
|
+
let envFile: string | undefined;
|
|
81
|
+
let nonInteractive = false;
|
|
82
|
+
let registerMcp = false;
|
|
83
|
+
let scope: ClaudeMcpScope | undefined;
|
|
84
|
+
let skipMcp = false;
|
|
85
|
+
|
|
86
|
+
for (let index = 0; index < args.length; index += 1) {
|
|
87
|
+
const arg = args[index];
|
|
88
|
+
|
|
89
|
+
if (arg === '--env-file') {
|
|
90
|
+
const value = args[index + 1];
|
|
91
|
+
if (!value) {
|
|
92
|
+
throw new Error('--env-file requires a value');
|
|
93
|
+
}
|
|
94
|
+
envFile = value;
|
|
95
|
+
index += 1;
|
|
96
|
+
continue;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (arg.startsWith('--env-file=')) {
|
|
100
|
+
envFile = arg.slice('--env-file='.length);
|
|
101
|
+
continue;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (arg === '--scope') {
|
|
105
|
+
const value = args[index + 1];
|
|
106
|
+
if (!value) {
|
|
107
|
+
throw new Error('--scope requires a value');
|
|
108
|
+
}
|
|
109
|
+
scope = validateScope(value.toLowerCase());
|
|
110
|
+
index += 1;
|
|
111
|
+
continue;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
if (arg.startsWith('--scope=')) {
|
|
115
|
+
scope = validateScope(arg.slice('--scope='.length).toLowerCase());
|
|
116
|
+
continue;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (arg === '--register-mcp') {
|
|
120
|
+
registerMcp = true;
|
|
121
|
+
continue;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (arg === '--skip-mcp' || arg === '--skip-claude') {
|
|
125
|
+
skipMcp = true;
|
|
126
|
+
continue;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
if (arg === '--non-interactive') {
|
|
130
|
+
nonInteractive = true;
|
|
131
|
+
continue;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
throw new Error(`Unknown argument: ${arg}`);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return { envFile, scope, registerMcp, skipMcp, nonInteractive };
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
async function run(): Promise<void> {
|
|
141
|
+
const { envFile, nonInteractive, registerMcp, scope, skipMcp } = parseArgs();
|
|
142
|
+
console.log('Supermemory MCP Setup\n');
|
|
143
|
+
|
|
144
|
+
// Load .env if present
|
|
145
|
+
let env: Record<string, string> = {};
|
|
146
|
+
const envResolution = loadEnvFile({ cliEnvFile: envFile });
|
|
147
|
+
if (envResolution.exists && existsSync(envResolution.path)) {
|
|
148
|
+
env = parseEnv(readFileSync(envResolution.path, 'utf-8'));
|
|
149
|
+
for (const [k, v] of Object.entries(env)) {
|
|
150
|
+
if (!process.env[k]) process.env[k] = v;
|
|
151
|
+
}
|
|
152
|
+
console.log(`[OK] Using env file: ${envResolution.path}`);
|
|
153
|
+
} else if (envResolution.explicit) {
|
|
154
|
+
console.log(`[WARN] Env file not found at ${envResolution.path}; falling back to current process environment`);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Step 1: Check for built MCP entry point
|
|
158
|
+
const entryPoint = resolve('dist/mcp/index.js');
|
|
159
|
+
if (!existsSync(entryPoint)) {
|
|
160
|
+
console.log(`Build output not found at ${entryPoint}`);
|
|
161
|
+
const answer = await ask('Run "npm run build" now? [Y/n] ');
|
|
162
|
+
if (answer === '' || answer.toLowerCase() === 'y') {
|
|
163
|
+
console.log('Building...');
|
|
164
|
+
try {
|
|
165
|
+
execSync('npm run build', { stdio: 'inherit' });
|
|
166
|
+
} catch {
|
|
167
|
+
console.error('Build failed. Fix errors and try again.');
|
|
168
|
+
process.exit(1);
|
|
169
|
+
}
|
|
170
|
+
} else {
|
|
171
|
+
console.log('Skipping build. The MCP server may not work without a build.');
|
|
172
|
+
}
|
|
173
|
+
} else {
|
|
174
|
+
console.log('[OK] Build output found');
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Step 2: Quick Postgres connectivity check
|
|
178
|
+
const databaseUrl = env.DATABASE_URL || process.env.DATABASE_URL || '';
|
|
179
|
+
if (databaseUrl) {
|
|
180
|
+
const client = new Client({ connectionString: databaseUrl });
|
|
181
|
+
try {
|
|
182
|
+
await client.connect();
|
|
183
|
+
await client.query('SELECT 1');
|
|
184
|
+
console.log('[OK] PostgreSQL connection successful');
|
|
185
|
+
} catch (error) {
|
|
186
|
+
console.log(
|
|
187
|
+
`[WARN] PostgreSQL connection failed: ${error instanceof Error ? error.message : String(error)}`
|
|
188
|
+
);
|
|
189
|
+
console.log(' The MCP server needs Postgres at runtime. Check DATABASE_URL.');
|
|
190
|
+
} finally {
|
|
191
|
+
await client.end().catch(() => undefined);
|
|
192
|
+
}
|
|
193
|
+
} else {
|
|
194
|
+
console.log('[WARN] DATABASE_URL not set. The MCP server will need it at runtime.');
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (!commandExists('claude')) {
|
|
198
|
+
console.error(
|
|
199
|
+
'\nCould not find the "claude" CLI.\nInstall Claude Code first: https://docs.anthropic.com/en/docs/claude-code'
|
|
200
|
+
);
|
|
201
|
+
process.exit(1);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (skipMcp) {
|
|
205
|
+
console.log('[WARN] Skipping MCP registration by request');
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
let selectedScope = scope;
|
|
210
|
+
if (!selectedScope && !nonInteractive) {
|
|
211
|
+
selectedScope = await askScope();
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
if (!selectedScope) {
|
|
215
|
+
console.log('[WARN] Non-interactive mode requires --scope or --register-mcp to perform Claude MCP registration');
|
|
216
|
+
return;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
if (nonInteractive && !registerMcp && !scope) {
|
|
220
|
+
console.log('[WARN] Non-interactive mode skipped Claude MCP registration because no explicit scope or --register-mcp flag was provided');
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
const registrationCheck = checkClaudeMcpRegistration({
|
|
225
|
+
scope: selectedScope,
|
|
226
|
+
name: 'supermemory',
|
|
227
|
+
expectedCommand: 'node',
|
|
228
|
+
expectedArgs: [entryPoint],
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
if (registrationCheck.status === 'match') {
|
|
232
|
+
console.log(`[OK] Supermemory is already registered in ${selectedScope} scope with the expected command path`);
|
|
233
|
+
return;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const cmd = formatRegistrationCommand(selectedScope, entryPoint);
|
|
237
|
+
if (registrationCheck.status === 'mismatch') {
|
|
238
|
+
const removeCmd = formatRemovalCommand(selectedScope);
|
|
239
|
+
console.log(`[INFO] Existing ${selectedScope} scope registration does not match the current build output; repairing with: ${removeCmd} && ${cmd}`);
|
|
240
|
+
try {
|
|
241
|
+
execSync(removeCmd, { stdio: 'inherit', shell: '/bin/zsh' });
|
|
242
|
+
} catch (error) {
|
|
243
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
244
|
+
console.error(`\nCould not remove the existing ${selectedScope} scope registration: ${msg}`);
|
|
245
|
+
process.exit(1);
|
|
246
|
+
}
|
|
247
|
+
} else {
|
|
248
|
+
console.log(`[INFO] No ${selectedScope} scope registration found; registering with: ${cmd}`);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
try {
|
|
252
|
+
execSync(`claude mcp add supermemory --scope ${selectedScope} -- node ${JSON.stringify(entryPoint)}`, {
|
|
253
|
+
stdio: 'inherit',
|
|
254
|
+
shell: '/bin/zsh',
|
|
255
|
+
});
|
|
256
|
+
} catch (error) {
|
|
257
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
258
|
+
console.error(`\nRegistration failed: ${msg}`);
|
|
259
|
+
process.exit(1);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Step 5: Success
|
|
263
|
+
console.log('\nSupermemory MCP server registered successfully!');
|
|
264
|
+
console.log(`Scope: ${selectedScope}`);
|
|
265
|
+
console.log('\nVerify with: claude mcp get supermemory');
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
run().catch((error) => {
|
|
269
|
+
console.error('Setup failed:', error);
|
|
270
|
+
process.exit(1);
|
|
271
|
+
});
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
-- Migration: 001_create_pgvector_extension.sql
|
|
2
|
+
-- Description: Enable pgvector extension for PostgreSQL
|
|
3
|
+
-- Dependencies: None (requires PostgreSQL 12+ with pgvector installed)
|
|
4
|
+
-- Created: 2026-02-02
|
|
5
|
+
|
|
6
|
+
-- Enable pgvector extension
|
|
7
|
+
CREATE EXTENSION IF NOT EXISTS vector;
|
|
8
|
+
|
|
9
|
+
-- Verify installation
|
|
10
|
+
DO $$
|
|
11
|
+
BEGIN
|
|
12
|
+
IF NOT EXISTS (
|
|
13
|
+
SELECT 1 FROM pg_extension WHERE extname = 'vector'
|
|
14
|
+
) THEN
|
|
15
|
+
RAISE EXCEPTION 'pgvector extension failed to install';
|
|
16
|
+
END IF;
|
|
17
|
+
END $$;
|
|
18
|
+
|
|
19
|
+
-- Test basic vector operations
|
|
20
|
+
DO $$
|
|
21
|
+
DECLARE
|
|
22
|
+
test_distance FLOAT;
|
|
23
|
+
BEGIN
|
|
24
|
+
-- Test euclidean distance
|
|
25
|
+
SELECT '[1,2,3]'::vector <-> '[4,5,6]'::vector INTO test_distance;
|
|
26
|
+
|
|
27
|
+
-- Test cosine distance
|
|
28
|
+
SELECT '[1,2,3]'::vector <=> '[4,5,6]'::vector INTO test_distance;
|
|
29
|
+
|
|
30
|
+
RAISE NOTICE 'pgvector extension verified successfully';
|
|
31
|
+
END $$;
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
-- SUPERSEDED: This migration is superseded by the Drizzle ORM migration
|
|
2
|
+
-- (drizzle/0000_dapper_the_professor.sql). The canonical memory_embeddings
|
|
3
|
+
-- schema uses memory_id as the sole primary key and has no id, chunk_id,
|
|
4
|
+
-- dimensions, or updated_at columns. Do NOT run this migration.
|
|
5
|
+
--
|
|
6
|
+
-- Migration: 002_create_memory_embeddings_table.sql
|
|
7
|
+
-- Description: Create memory_embeddings table with vector support
|
|
8
|
+
-- Dependencies: 001_create_pgvector_extension.sql
|
|
9
|
+
-- Created: 2026-02-02
|
|
10
|
+
-- Related: TASK-002 from BACKLOG.md
|
|
11
|
+
|
|
12
|
+
-- Create memory_embeddings table
|
|
13
|
+
-- This table stores vector embeddings for semantic search
|
|
14
|
+
-- Based on the architecture research and SQLite schema
|
|
15
|
+
|
|
16
|
+
CREATE TABLE IF NOT EXISTS memory_embeddings (
|
|
17
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
18
|
+
chunk_id UUID NOT NULL,
|
|
19
|
+
memory_id UUID NOT NULL,
|
|
20
|
+
|
|
21
|
+
-- Vector embedding (default dimensions: 1536 for text-embedding-3-small)
|
|
22
|
+
-- Adjust dimensions based on your embedding model
|
|
23
|
+
embedding vector(1536) NOT NULL,
|
|
24
|
+
|
|
25
|
+
-- Metadata
|
|
26
|
+
model VARCHAR(255) NOT NULL,
|
|
27
|
+
dimensions INTEGER NOT NULL CHECK (dimensions > 0),
|
|
28
|
+
|
|
29
|
+
-- Timestamps
|
|
30
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
31
|
+
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
32
|
+
|
|
33
|
+
-- Constraints
|
|
34
|
+
CONSTRAINT fk_chunk FOREIGN KEY (chunk_id)
|
|
35
|
+
REFERENCES chunks(id)
|
|
36
|
+
ON DELETE CASCADE,
|
|
37
|
+
CONSTRAINT fk_memory FOREIGN KEY (memory_id)
|
|
38
|
+
REFERENCES memories(id)
|
|
39
|
+
ON DELETE CASCADE,
|
|
40
|
+
CONSTRAINT check_dimensions_match
|
|
41
|
+
CHECK (dimensions = vector_dims(embedding))
|
|
42
|
+
);
|
|
43
|
+
|
|
44
|
+
-- Create standard indexes for foreign keys and lookups
|
|
45
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_chunk_id
|
|
46
|
+
ON memory_embeddings(chunk_id);
|
|
47
|
+
|
|
48
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_memory_id
|
|
49
|
+
ON memory_embeddings(memory_id);
|
|
50
|
+
|
|
51
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_model
|
|
52
|
+
ON memory_embeddings(model);
|
|
53
|
+
|
|
54
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_created_at
|
|
55
|
+
ON memory_embeddings(created_at DESC);
|
|
56
|
+
|
|
57
|
+
-- Add trigger for updated_at timestamp
|
|
58
|
+
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
|
59
|
+
RETURNS TRIGGER AS $$
|
|
60
|
+
BEGIN
|
|
61
|
+
NEW.updated_at = NOW();
|
|
62
|
+
RETURN NEW;
|
|
63
|
+
END;
|
|
64
|
+
$$ LANGUAGE plpgsql;
|
|
65
|
+
|
|
66
|
+
CREATE TRIGGER trg_memory_embeddings_updated_at
|
|
67
|
+
BEFORE UPDATE ON memory_embeddings
|
|
68
|
+
FOR EACH ROW
|
|
69
|
+
EXECUTE FUNCTION update_updated_at_column();
|
|
70
|
+
|
|
71
|
+
-- Add comment for documentation
|
|
72
|
+
COMMENT ON TABLE memory_embeddings IS 'Stores vector embeddings for semantic search with pgvector support';
|
|
73
|
+
COMMENT ON COLUMN memory_embeddings.embedding IS 'Vector embedding for cosine similarity search (default: 1536 dimensions for text-embedding-3-small)';
|
|
74
|
+
COMMENT ON COLUMN memory_embeddings.dimensions IS 'Number of dimensions in the embedding vector';
|
|
75
|
+
COMMENT ON COLUMN memory_embeddings.model IS 'Embedding model used (e.g., text-embedding-3-small, text-embedding-ada-002)';
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
-- Migration: 003_create_hnsw_index.sql
|
|
2
|
+
-- Description: Create HNSW index for fast vector similarity search
|
|
3
|
+
-- Dependencies: 002_create_memory_embeddings_table.sql
|
|
4
|
+
-- Created: 2026-02-02
|
|
5
|
+
-- Related: TASK-005 from BACKLOG.md
|
|
6
|
+
|
|
7
|
+
-- HNSW (Hierarchical Navigable Small World) Index Configuration
|
|
8
|
+
-- Performance targets from BACKLOG.md:
|
|
9
|
+
-- - Query performance < 100ms for 10K vectors
|
|
10
|
+
-- - ~99% recall accuracy
|
|
11
|
+
-- - Sub-100ms approximate nearest neighbor search
|
|
12
|
+
|
|
13
|
+
-- Create HNSW index with optimized parameters
|
|
14
|
+
-- m=16: Number of bi-directional links per node (higher = better recall, more memory)
|
|
15
|
+
-- ef_construction=64: Size of dynamic candidate list during construction (higher = better quality, slower build)
|
|
16
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_hnsw
|
|
17
|
+
ON memory_embeddings
|
|
18
|
+
USING hnsw (embedding vector_cosine_ops)
|
|
19
|
+
WITH (
|
|
20
|
+
m = 16,
|
|
21
|
+
ef_construction = 64
|
|
22
|
+
);
|
|
23
|
+
|
|
24
|
+
-- Set search-time tuning parameter
|
|
25
|
+
-- ef_search controls the size of the dynamic candidate list during search
|
|
26
|
+
-- Higher values = better recall but slower search
|
|
27
|
+
-- Default: 40, Recommended for 99% recall: 100
|
|
28
|
+
-- This can be adjusted per-session based on performance requirements
|
|
29
|
+
|
|
30
|
+
-- Global setting (applies to all sessions)
|
|
31
|
+
ALTER DATABASE CURRENT SET hnsw.ef_search = 100;
|
|
32
|
+
|
|
33
|
+
-- Session-level setting (can be adjusted dynamically)
|
|
34
|
+
-- SET hnsw.ef_search = 100;
|
|
35
|
+
|
|
36
|
+
-- Add statistics for query planning
|
|
37
|
+
ANALYZE memory_embeddings;
|
|
38
|
+
|
|
39
|
+
-- Create a helper function for optimal search configuration
|
|
40
|
+
CREATE OR REPLACE FUNCTION set_hnsw_search_quality(quality_level TEXT DEFAULT 'balanced')
|
|
41
|
+
RETURNS TEXT AS $$
|
|
42
|
+
BEGIN
|
|
43
|
+
CASE quality_level
|
|
44
|
+
WHEN 'fast' THEN
|
|
45
|
+
-- Fast but lower recall (~95%)
|
|
46
|
+
EXECUTE 'SET hnsw.ef_search = 40';
|
|
47
|
+
RETURN 'HNSW search quality set to FAST (ef_search=40, ~95% recall)';
|
|
48
|
+
WHEN 'balanced' THEN
|
|
49
|
+
-- Balanced performance and recall (~99%)
|
|
50
|
+
EXECUTE 'SET hnsw.ef_search = 100';
|
|
51
|
+
RETURN 'HNSW search quality set to BALANCED (ef_search=100, ~99% recall)';
|
|
52
|
+
WHEN 'accurate' THEN
|
|
53
|
+
-- Highest recall (~99.5%+) but slower
|
|
54
|
+
EXECUTE 'SET hnsw.ef_search = 200';
|
|
55
|
+
RETURN 'HNSW search quality set to ACCURATE (ef_search=200, ~99.5%+ recall)';
|
|
56
|
+
ELSE
|
|
57
|
+
RAISE EXCEPTION 'Invalid quality_level. Use: fast, balanced, or accurate';
|
|
58
|
+
END CASE;
|
|
59
|
+
END;
|
|
60
|
+
$$ LANGUAGE plpgsql;
|
|
61
|
+
|
|
62
|
+
-- Add comments for documentation
|
|
63
|
+
COMMENT ON INDEX idx_memory_embeddings_hnsw IS 'HNSW index for fast approximate nearest neighbor search using cosine similarity';
|
|
64
|
+
COMMENT ON FUNCTION set_hnsw_search_quality IS 'Helper function to adjust HNSW search quality (fast/balanced/accurate)';
|
|
65
|
+
|
|
66
|
+
-- Performance validation query
|
|
67
|
+
-- This query should use the HNSW index for sub-100ms performance
|
|
68
|
+
CREATE OR REPLACE FUNCTION validate_hnsw_performance(
|
|
69
|
+
query_embedding vector(1536),
|
|
70
|
+
result_limit INTEGER DEFAULT 10
|
|
71
|
+
)
|
|
72
|
+
RETURNS TABLE (
|
|
73
|
+
memory_id UUID,
|
|
74
|
+
similarity FLOAT,
|
|
75
|
+
execution_time_ms NUMERIC
|
|
76
|
+
) AS $$
|
|
77
|
+
DECLARE
|
|
78
|
+
start_time TIMESTAMPTZ;
|
|
79
|
+
end_time TIMESTAMPTZ;
|
|
80
|
+
BEGIN
|
|
81
|
+
start_time := clock_timestamp();
|
|
82
|
+
|
|
83
|
+
RETURN QUERY
|
|
84
|
+
SELECT
|
|
85
|
+
me.memory_id,
|
|
86
|
+
1 - (me.embedding <=> query_embedding) AS similarity,
|
|
87
|
+
EXTRACT(MILLISECONDS FROM (clock_timestamp() - start_time)) AS execution_time_ms
|
|
88
|
+
FROM memory_embeddings me
|
|
89
|
+
ORDER BY me.embedding <=> query_embedding
|
|
90
|
+
LIMIT result_limit;
|
|
91
|
+
END;
|
|
92
|
+
$$ LANGUAGE plpgsql;
|
|
93
|
+
|
|
94
|
+
COMMENT ON FUNCTION validate_hnsw_performance IS 'Validation function to measure HNSW index performance (target: <100ms for 10K vectors)';
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
-- SUPERSEDED: This migration is superseded by the Drizzle ORM migration
|
|
2
|
+
-- (drizzle/0000_dapper_the_professor.sql). The canonical memory_embeddings
|
|
3
|
+
-- schema uses memory_id as the sole primary key and has no id, chunk_id,
|
|
4
|
+
-- dimensions, or updated_at columns. Do NOT run this migration.
|
|
5
|
+
--
|
|
6
|
+
-- Migration: 004_create_memory_embeddings_standalone.sql
|
|
7
|
+
-- Description: Standalone memory_embeddings table for HNSW testing (no FK dependencies)
|
|
8
|
+
-- Created: 2026-02-02
|
|
9
|
+
-- Purpose: TASK-005 HNSW Phase 1 Testing
|
|
10
|
+
|
|
11
|
+
-- Drop existing table if it has FK constraints
|
|
12
|
+
DROP TABLE IF EXISTS memory_embeddings CASCADE;
|
|
13
|
+
|
|
14
|
+
-- Create memory_embeddings table WITHOUT foreign key dependencies
|
|
15
|
+
CREATE TABLE memory_embeddings (
|
|
16
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
17
|
+
chunk_id UUID NOT NULL,
|
|
18
|
+
memory_id UUID NOT NULL,
|
|
19
|
+
|
|
20
|
+
-- Vector embedding (1536 dimensions for text-embedding-3-small)
|
|
21
|
+
embedding vector(1536) NOT NULL,
|
|
22
|
+
|
|
23
|
+
-- Metadata
|
|
24
|
+
model VARCHAR(255) NOT NULL DEFAULT 'text-embedding-3-small',
|
|
25
|
+
dimensions INTEGER NOT NULL DEFAULT 1536 CHECK (dimensions > 0),
|
|
26
|
+
|
|
27
|
+
-- Timestamps
|
|
28
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
29
|
+
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
30
|
+
|
|
31
|
+
-- Dimension validation
|
|
32
|
+
CONSTRAINT check_dimensions_match
|
|
33
|
+
CHECK (dimensions = vector_dims(embedding))
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
-- Create standard indexes
|
|
37
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_chunk_id
|
|
38
|
+
ON memory_embeddings(chunk_id);
|
|
39
|
+
|
|
40
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_memory_id
|
|
41
|
+
ON memory_embeddings(memory_id);
|
|
42
|
+
|
|
43
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_model
|
|
44
|
+
ON memory_embeddings(model);
|
|
45
|
+
|
|
46
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_created_at
|
|
47
|
+
ON memory_embeddings(created_at DESC);
|
|
48
|
+
|
|
49
|
+
-- Create HNSW index with optimized parameters
|
|
50
|
+
CREATE INDEX IF NOT EXISTS idx_memory_embeddings_hnsw
|
|
51
|
+
ON memory_embeddings
|
|
52
|
+
USING hnsw (embedding vector_cosine_ops)
|
|
53
|
+
WITH (
|
|
54
|
+
m = 16,
|
|
55
|
+
ef_construction = 64
|
|
56
|
+
);
|
|
57
|
+
|
|
58
|
+
-- Set search-time tuning parameter
|
|
59
|
+
ALTER DATABASE supermemory SET hnsw.ef_search = 100;
|
|
60
|
+
|
|
61
|
+
-- Add statistics for query planning
|
|
62
|
+
ANALYZE memory_embeddings;
|
|
63
|
+
|
|
64
|
+
-- Add comments
|
|
65
|
+
COMMENT ON TABLE memory_embeddings IS 'Stores vector embeddings for semantic search with pgvector HNSW support';
|
|
66
|
+
COMMENT ON COLUMN memory_embeddings.embedding IS 'Vector embedding for cosine similarity search (1536 dimensions)';
|
|
67
|
+
COMMENT ON INDEX idx_memory_embeddings_hnsw IS 'HNSW index for fast approximate nearest neighbor search';
|
|
68
|
+
|
|
69
|
+
-- Confirm creation
|
|
70
|
+
\echo 'Memory embeddings table and HNSW index created successfully'
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
-- Migration 005: Create Chunks Table
|
|
2
|
+
-- Purpose: Store text chunks with token tracking and metadata for embedding generation
|
|
3
|
+
-- Dependencies: memories table (from Phase 1 schema)
|
|
4
|
+
-- Created: February 2, 2026
|
|
5
|
+
|
|
6
|
+
-- Create chunks table
|
|
7
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
8
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
9
|
+
memory_id UUID NOT NULL REFERENCES memories(id) ON DELETE CASCADE,
|
|
10
|
+
content TEXT NOT NULL,
|
|
11
|
+
chunk_index INTEGER NOT NULL,
|
|
12
|
+
start_offset INTEGER,
|
|
13
|
+
end_offset INTEGER,
|
|
14
|
+
token_count INTEGER,
|
|
15
|
+
metadata JSONB DEFAULT '{}'::jsonb,
|
|
16
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
17
|
+
|
|
18
|
+
-- Ensure chunk_index is unique per memory
|
|
19
|
+
CONSTRAINT unique_chunk_per_memory UNIQUE (memory_id, chunk_index),
|
|
20
|
+
|
|
21
|
+
-- Ensure offsets are valid if provided
|
|
22
|
+
CONSTRAINT valid_offsets CHECK (
|
|
23
|
+
(start_offset IS NULL AND end_offset IS NULL) OR
|
|
24
|
+
(start_offset IS NOT NULL AND end_offset IS NOT NULL AND start_offset < end_offset)
|
|
25
|
+
),
|
|
26
|
+
|
|
27
|
+
-- Ensure chunk_index is non-negative
|
|
28
|
+
CONSTRAINT non_negative_chunk_index CHECK (chunk_index >= 0),
|
|
29
|
+
|
|
30
|
+
-- Ensure token_count is positive if provided
|
|
31
|
+
CONSTRAINT positive_token_count CHECK (token_count IS NULL OR token_count > 0)
|
|
32
|
+
);
|
|
33
|
+
|
|
34
|
+
-- Create indexes for efficient querying
|
|
35
|
+
|
|
36
|
+
-- Index for looking up chunks by memory
|
|
37
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_memory_id ON chunks(memory_id);
|
|
38
|
+
|
|
39
|
+
-- Composite index for ordering chunks within a memory
|
|
40
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_chunk_index ON chunks(memory_id, chunk_index);
|
|
41
|
+
|
|
42
|
+
-- Index for filtering by token count (useful for batch processing)
|
|
43
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_token_count ON chunks(token_count);
|
|
44
|
+
|
|
45
|
+
-- GIN index for JSONB metadata queries
|
|
46
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_metadata ON chunks USING gin(metadata jsonb_path_ops);
|
|
47
|
+
|
|
48
|
+
-- Add helpful comments
|
|
49
|
+
COMMENT ON TABLE chunks IS 'Stores text chunks with positional and token information for embedding generation';
|
|
50
|
+
COMMENT ON COLUMN chunks.id IS 'Unique identifier for the chunk';
|
|
51
|
+
COMMENT ON COLUMN chunks.memory_id IS 'Reference to the parent memory this chunk belongs to';
|
|
52
|
+
COMMENT ON COLUMN chunks.content IS 'The actual text content of the chunk';
|
|
53
|
+
COMMENT ON COLUMN chunks.chunk_index IS 'Sequential index of this chunk within its parent memory (0-based)';
|
|
54
|
+
COMMENT ON COLUMN chunks.start_offset IS 'Character offset where this chunk starts in the original content';
|
|
55
|
+
COMMENT ON COLUMN chunks.end_offset IS 'Character offset where this chunk ends in the original content';
|
|
56
|
+
COMMENT ON COLUMN chunks.token_count IS 'Number of tokens in this chunk (for rate limiting and cost estimation)';
|
|
57
|
+
COMMENT ON COLUMN chunks.metadata IS 'Additional metadata about the chunk (e.g., chunking strategy used, overlap info)';
|
|
58
|
+
COMMENT ON COLUMN chunks.created_at IS 'Timestamp when this chunk was created';
|
|
59
|
+
|
|
60
|
+
-- Verification queries
|
|
61
|
+
DO $$
|
|
62
|
+
BEGIN
|
|
63
|
+
-- Verify table exists
|
|
64
|
+
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'chunks') THEN
|
|
65
|
+
RAISE NOTICE 'Table chunks created successfully';
|
|
66
|
+
ELSE
|
|
67
|
+
RAISE EXCEPTION 'Failed to create chunks table';
|
|
68
|
+
END IF;
|
|
69
|
+
|
|
70
|
+
-- Verify indexes exist
|
|
71
|
+
IF EXISTS (
|
|
72
|
+
SELECT 1 FROM pg_indexes
|
|
73
|
+
WHERE tablename = 'chunks'
|
|
74
|
+
AND indexname IN ('idx_chunks_memory_id', 'idx_chunks_chunk_index', 'idx_chunks_token_count', 'idx_chunks_metadata')
|
|
75
|
+
) THEN
|
|
76
|
+
RAISE NOTICE 'All indexes created successfully';
|
|
77
|
+
END IF;
|
|
78
|
+
|
|
79
|
+
-- Count total indexes
|
|
80
|
+
RAISE NOTICE 'Total indexes on chunks table: %', (
|
|
81
|
+
SELECT COUNT(*) FROM pg_indexes WHERE tablename = 'chunks'
|
|
82
|
+
);
|
|
83
|
+
END $$;
|
|
84
|
+
|
|
85
|
+
-- Sample usage (commented out - for documentation only)
|
|
86
|
+
-- INSERT INTO chunks (memory_id, content, chunk_index, start_offset, end_offset, token_count, metadata)
|
|
87
|
+
-- VALUES (
|
|
88
|
+
-- 'memory-uuid-here',
|
|
89
|
+
-- 'This is a sample chunk of text.',
|
|
90
|
+
-- 0,
|
|
91
|
+
-- 0,
|
|
92
|
+
-- 32,
|
|
93
|
+
-- 8,
|
|
94
|
+
-- '{"strategy": "fixed-size", "overlap": 50}'::jsonb
|
|
95
|
+
-- );
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
-- Create processing_queue table for async job management
|
|
2
|
+
-- TASK-008: Chunking Worker Implementation
|
|
3
|
+
-- This table manages the processing pipeline for documents
|
|
4
|
+
|
|
5
|
+
CREATE TABLE IF NOT EXISTS processing_queue (
|
|
6
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
7
|
+
document_id UUID NOT NULL REFERENCES documents(id) ON DELETE CASCADE,
|
|
8
|
+
stage VARCHAR(30) NOT NULL DEFAULT 'extraction',
|
|
9
|
+
status VARCHAR(20) NOT NULL DEFAULT 'pending',
|
|
10
|
+
priority INTEGER DEFAULT 0,
|
|
11
|
+
error TEXT,
|
|
12
|
+
error_code VARCHAR(50),
|
|
13
|
+
attempts INTEGER DEFAULT 0,
|
|
14
|
+
max_attempts INTEGER DEFAULT 3,
|
|
15
|
+
worker_id VARCHAR(100),
|
|
16
|
+
metadata JSONB DEFAULT '{}'::jsonb,
|
|
17
|
+
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
|
18
|
+
started_at TIMESTAMP WITH TIME ZONE,
|
|
19
|
+
completed_at TIMESTAMP WITH TIME ZONE,
|
|
20
|
+
scheduled_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
21
|
+
|
|
22
|
+
-- Constraints
|
|
23
|
+
CONSTRAINT processing_queue_stage_check
|
|
24
|
+
CHECK (stage IN ('extraction', 'embedding', 'deduplication', 'relationship', 'profile_update', 'cleanup')),
|
|
25
|
+
CONSTRAINT processing_queue_status_check
|
|
26
|
+
CHECK (status IN ('pending', 'processing', 'completed', 'failed', 'cancelled', 'retry')),
|
|
27
|
+
CONSTRAINT processing_queue_attempts_check
|
|
28
|
+
CHECK (attempts <= max_attempts)
|
|
29
|
+
);
|
|
30
|
+
|
|
31
|
+
-- Indexes for performance
|
|
32
|
+
CREATE INDEX IF NOT EXISTS idx_processing_queue_document ON processing_queue(document_id);
|
|
33
|
+
CREATE INDEX IF NOT EXISTS idx_processing_queue_status ON processing_queue(status) WHERE status IN ('pending', 'retry');
|
|
34
|
+
CREATE INDEX IF NOT EXISTS idx_processing_queue_stage ON processing_queue(stage);
|
|
35
|
+
CREATE INDEX IF NOT EXISTS idx_processing_queue_worker ON processing_queue(worker_id) WHERE worker_id IS NOT NULL;
|
|
36
|
+
CREATE INDEX IF NOT EXISTS idx_processing_queue_priority ON processing_queue(priority DESC, scheduled_at ASC) WHERE status IN ('pending', 'retry');
|
|
37
|
+
CREATE INDEX IF NOT EXISTS idx_processing_queue_stale ON processing_queue(started_at) WHERE status = 'processing';
|
|
38
|
+
CREATE INDEX IF NOT EXISTS idx_processing_queue_worker_select ON processing_queue(status, stage, priority, scheduled_at) WHERE status IN ('pending', 'retry');
|
|
39
|
+
|
|
40
|
+
COMMENT ON TABLE processing_queue IS 'Manages async processing pipeline for documents';
|
|
41
|
+
COMMENT ON COLUMN processing_queue.stage IS 'Processing stage: extraction, embedding, deduplication, relationship, profile_update, cleanup';
|
|
42
|
+
COMMENT ON COLUMN processing_queue.status IS 'Job status: pending, processing, completed, failed, cancelled, retry';
|
|
43
|
+
COMMENT ON COLUMN processing_queue.priority IS 'Higher values = higher priority';
|
|
44
|
+
COMMENT ON COLUMN processing_queue.attempts IS 'Number of processing attempts';
|
|
45
|
+
COMMENT ON COLUMN processing_queue.max_attempts IS 'Maximum allowed attempts before marking as failed';
|