rag-memory-pg-mcp 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,78 @@
1
+ /**
2
+ * Check Supabase schema and data
3
+ */
4
+
5
+ import { createClient } from '@supabase/supabase-js';
6
+
7
+ const SUPABASE_URL = 'https://qystmdysjemiqlqmhfbh.supabase.co';
8
+ const SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InF5c3RtZHlzamVtaXFscW1oZmJoIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzAzMDcxNCwiZXhwIjoyMDY4NjA2NzE0fQ.prPn_vUbpSDMghlVodIfXXWFWfXT_GM0m4PX06YaSkU';
9
+
10
+ const supabase = createClient(SUPABASE_URL, SERVICE_KEY);
11
+
12
+ console.log('šŸ” Checking Supabase Database Schema\n');
13
+ console.log('='.repeat(70) + '\n');
14
+
15
+ // Check each table
16
+ const tables = [
17
+ 'rag_entities',
18
+ 'rag_relationships',
19
+ 'rag_documents',
20
+ 'rag_chunks',
21
+ 'rag_entity_embeddings',
22
+ 'rag_chunk_entities',
23
+ 'rag_stats'
24
+ ];
25
+
26
+ for (const table of tables) {
27
+ try {
28
+ const { data, error, count } = await supabase
29
+ .from(table)
30
+ .select('*', { count: 'exact', head: true });
31
+
32
+ if (error) {
33
+ console.log(`āŒ Table "${table}": ERROR - ${error.message}`);
34
+ } else {
35
+ console.log(`āœ… Table "${table}": ${count || 0} rows`);
36
+ }
37
+ } catch (err) {
38
+ console.log(`āŒ Table "${table}": DOES NOT EXIST or ACCESS DENIED`);
39
+ }
40
+ }
41
+
42
+ console.log('\n' + '='.repeat(70));
43
+ console.log('\nšŸ“Š Checking table structures...\n');
44
+
45
+ // Check rag_chunks structure
46
+ try {
47
+ const { data: chunks } = await supabase
48
+ .from('rag_chunks')
49
+ .select('*')
50
+ .limit(1);
51
+
52
+ if (chunks && chunks.length > 0) {
53
+ console.log('rag_chunks columns:', Object.keys(chunks[0]));
54
+ } else {
55
+ console.log('rag_chunks: Empty table');
56
+ }
57
+ } catch (err) {
58
+ console.log('āŒ Cannot access rag_chunks:', err.message);
59
+ }
60
+
61
+ // Check rag_entity_embeddings structure
62
+ try {
63
+ const { data: embeddings } = await supabase
64
+ .from('rag_entity_embeddings')
65
+ .select('*')
66
+ .limit(1);
67
+
68
+ if (embeddings && embeddings.length > 0) {
69
+ console.log('rag_entity_embeddings columns:', Object.keys(embeddings[0]));
70
+ } else {
71
+ console.log('rag_entity_embeddings: Empty table');
72
+ }
73
+ } catch (err) {
74
+ console.log('āŒ Cannot access rag_entity_embeddings:', err.message);
75
+ }
76
+
77
+ console.log('\n' + '='.repeat(70));
78
+ console.log('\nāœ… Schema check complete\n');
@@ -0,0 +1,70 @@
1
+ import { createClient } from '@supabase/supabase-js';
2
+
3
+ const SUPABASE_URL = 'https://qystmdysjemiqlqmhfbh.supabase.co';
4
+ const SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InF5c3RtZHlzamVtaXFscW1oZmJoIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzAzMDcxNCwiZXhwIjoyMDY4NjA2NzE0fQ.prPn_vUbpSDMghlVodIfXXWFWfXT_GM0m4PX06YaSkU';
5
+
6
+ const supabase = createClient(SUPABASE_URL, SERVICE_KEY);
7
+
8
+ console.log('šŸ” Getting table schemas from PostgreSQL\n');
9
+
10
+ // Query information_schema to get actual columns
11
+ const tables = ['rag_chunks', 'rag_entity_embeddings'];
12
+
13
+ for (const table of tables) {
14
+ const { data, error } = await supabase.rpc('exec_sql', {
15
+ query: `
16
+ SELECT column_name, data_type, is_nullable
17
+ FROM information_schema.columns
18
+ WHERE table_name = '${table}'
19
+ ORDER BY ordinal_position;
20
+ `
21
+ });
22
+
23
+ if (error) {
24
+ // Try alternative method - direct query
25
+ console.log(`\nšŸ“‹ ${table}:`);
26
+ console.log(' Cannot query schema directly, trying insert test...\n');
27
+ }
28
+ }
29
+
30
+ // Alternative: Try inserting with all possible columns
31
+ console.log('šŸ“¦ Testing rag_chunks columns:\n');
32
+ const chunkTest = {
33
+ document_id: 'test_doc',
34
+ chunk_index: 0,
35
+ content: 'test content',
36
+ start_pos: 0,
37
+ end_pos: 10,
38
+ embedding: null
39
+ };
40
+
41
+ const { error: chunkErr } = await supabase
42
+ .from('rag_chunks')
43
+ .insert(chunkTest);
44
+
45
+ console.log(' Result:', chunkErr ? chunkErr.message : 'āœ… All columns accepted');
46
+
47
+ // Clean up test
48
+ if (!chunkErr) {
49
+ await supabase.from('rag_chunks').delete().eq('document_id', 'test_doc');
50
+ }
51
+
52
+ console.log('\nšŸ”® Testing rag_entity_embeddings columns:\n');
53
+ const embeddingTest = {
54
+ entity_id: 'test_entity',
55
+ embedding: new Array(384).fill(0.1),
56
+ embedding_text: 'test text'
57
+ };
58
+
59
+ const { error: embErr } = await supabase
60
+ .from('rag_entity_embeddings')
61
+ .insert(embeddingTest);
62
+
63
+ console.log(' Result:', embErr ? embErr.message : 'āœ… All columns accepted');
64
+
65
+ // Clean up test
66
+ if (!embErr) {
67
+ await supabase.from('rag_entity_embeddings').delete().eq('entity_id', 'test_entity');
68
+ }
69
+
70
+ console.log('\nāœ… Schema test complete');
@@ -0,0 +1,271 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Complete Migration Script: Chunk & Embed All Data
5
+ *
6
+ * This script will:
7
+ * 1. Chunk all documents in the database
8
+ * 2. Generate embeddings for all chunks
9
+ * 3. Generate embeddings for all entities
10
+ *
11
+ * Run this AFTER the MCP server is updated with all 20 tools.
12
+ */
13
+
14
+ import { createClient } from '@supabase/supabase-js';
15
+ import { pipeline } from '@huggingface/transformers';
16
+
17
+ const SUPABASE_URL = 'https://qystmdysjemiqlqmhfbh.supabase.co';
18
+ const SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InF5c3RtZHlzamVtaXFscW1oZmJoIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzAzMDcxNCwiZXhwIjoyMDY4NjA2NzE0fQ.prPn_vUbpSDMghlVodIfXXWFWfXT_GM0m4PX06YaSkU';
19
+
20
+ const supabase = createClient(SUPABASE_URL, SERVICE_KEY);
21
+
22
+ // Configuration
23
+ const BATCH_SIZE = 10; // Process documents in batches
24
+ const MAX_CHUNK_SIZE = 500;
25
+ const CHUNK_OVERLAP = 50;
26
+
27
+ console.log('šŸš€ Starting Complete Data Migration\n');
28
+ console.log('='.repeat(70) + '\n');
29
+
30
+ // Initialize embedding model
31
+ let embeddingModel = null;
32
+
33
+ async function initEmbeddingModel() {
34
+ console.log('šŸ¤– Loading sentence transformer model...');
35
+ try {
36
+ embeddingModel = await pipeline(
37
+ 'feature-extraction',
38
+ 'Xenova/all-MiniLM-L6-v2',
39
+ { quantized: true }
40
+ );
41
+ console.log('āœ… Model loaded successfully\n');
42
+ return true;
43
+ } catch (error) {
44
+ console.error('āŒ Failed to load model:', error.message);
45
+ return false;
46
+ }
47
+ }
48
+
49
+ async function generateEmbedding(text) {
50
+ if (!embeddingModel) {
51
+ throw new Error('Embedding model not initialized');
52
+ }
53
+
54
+ const output = await embeddingModel(text, {
55
+ pooling: 'mean',
56
+ normalize: true,
57
+ });
58
+
59
+ return Array.from(output.data);
60
+ }
61
+
62
+ // Step 1: Chunk all documents
63
+ async function chunkAllDocuments() {
64
+ console.log('šŸ“¦ Step 1: Chunking all documents...\n');
65
+
66
+ const { data: documents, error } = await supabase
67
+ .from('rag_documents')
68
+ .select('id, content');
69
+
70
+ if (error) {
71
+ throw new Error(`Failed to fetch documents: ${error.message}`);
72
+ }
73
+
74
+ console.log(`Found ${documents.length} documents to chunk\n`);
75
+
76
+ let totalChunks = 0;
77
+
78
+ for (let i = 0; i < documents.length; i++) {
79
+ const doc = documents[i];
80
+
81
+ try {
82
+ // Simple chunking
83
+ const text = doc.content;
84
+ let startPos = 0;
85
+ let chunkIndex = 0;
86
+ const chunks = [];
87
+
88
+ while (startPos < text.length) {
89
+ const endPos = Math.min(startPos + MAX_CHUNK_SIZE, text.length);
90
+ const chunkText = text.substring(startPos, endPos);
91
+
92
+ chunks.push({
93
+ document_id: doc.id,
94
+ chunk_index: chunkIndex,
95
+ content: chunkText,
96
+ start_pos: startPos,
97
+ end_pos: endPos,
98
+ });
99
+
100
+ startPos += MAX_CHUNK_SIZE - CHUNK_OVERLAP;
101
+ chunkIndex++;
102
+ }
103
+
104
+ // Insert chunks
105
+ if (chunks.length > 0) {
106
+ const { error: insertError } = await supabase
107
+ .from('rag_chunks')
108
+ .insert(chunks);
109
+
110
+ if (!insertError) {
111
+ totalChunks += chunks.length;
112
+ console.log(` āœ… [${i + 1}/${documents.length}] ${doc.id}: ${chunks.length} chunks`);
113
+ } else {
114
+ console.log(` āŒ [${i + 1}/${documents.length}] ${doc.id}: ${insertError.message}`);
115
+ }
116
+ }
117
+ } catch (error) {
118
+ console.log(` āŒ [${i + 1}/${documents.length}] ${doc.id}: ${error.message}`);
119
+ }
120
+ }
121
+
122
+ console.log(`\nāœ… Chunking complete: ${totalChunks} total chunks created\n`);
123
+ return totalChunks;
124
+ }
125
+
126
+ // Step 2: Embed all chunks
127
+ async function embedAllChunks() {
128
+ console.log('šŸ”® Step 2: Generating embeddings for all chunks...\n');
129
+
130
+ const { data: chunks, error } = await supabase
131
+ .from('rag_chunks')
132
+ .select('id, content')
133
+ .is('embedding', null);
134
+
135
+ if (error) {
136
+ throw new Error(`Failed to fetch chunks: ${error.message}`);
137
+ }
138
+
139
+ console.log(`Found ${chunks.length} chunks to embed\n`);
140
+
141
+ let embeddedCount = 0;
142
+
143
+ for (let i = 0; i < chunks.length; i++) {
144
+ const chunk = chunks[i];
145
+
146
+ try {
147
+ const embedding = await generateEmbedding(chunk.content);
148
+
149
+ const { error: updateError } = await supabase
150
+ .from('rag_chunks')
151
+ .update({ embedding })
152
+ .eq('id', chunk.id);
153
+
154
+ if (!updateError) {
155
+ embeddedCount++;
156
+ if ((i + 1) % 10 === 0 || i === chunks.length - 1) {
157
+ console.log(` āœ… Progress: ${i + 1}/${chunks.length} chunks embedded`);
158
+ }
159
+ } else {
160
+ console.log(` āŒ Chunk ${chunk.id}: ${updateError.message}`);
161
+ }
162
+ } catch (error) {
163
+ console.log(` āŒ Chunk ${chunk.id}: ${error.message}`);
164
+ }
165
+ }
166
+
167
+ console.log(`\nāœ… Chunk embedding complete: ${embeddedCount}/${chunks.length} embedded\n`);
168
+ return embeddedCount;
169
+ }
170
+
171
+ // Step 3: Embed all entities
172
+ async function embedAllEntities() {
173
+ console.log('šŸ”® Step 3: Generating embeddings for all entities...\n');
174
+
175
+ const { data: entities, error } = await supabase
176
+ .from('rag_entities')
177
+ .select('id, name, entity_type, observations');
178
+
179
+ if (error) {
180
+ throw new Error(`Failed to fetch entities: ${error.message}`);
181
+ }
182
+
183
+ console.log(`Found ${entities.length} entities to embed\n`);
184
+
185
+ let embeddedCount = 0;
186
+
187
+ for (let i = 0; i < entities.length; i++) {
188
+ const entity = entities[i];
189
+
190
+ try {
191
+ // Generate embedding text
192
+ const parts = [
193
+ `Entity: ${entity.name}`,
194
+ `Type: ${entity.entity_type}`,
195
+ ];
196
+
197
+ if (entity.observations && entity.observations.length > 0) {
198
+ parts.push(`Observations: ${entity.observations.join('. ')}`);
199
+ }
200
+
201
+ const embeddingText = parts.join('\n');
202
+ const embedding = await generateEmbedding(embeddingText);
203
+
204
+ // Store embedding
205
+ const { error: insertError } = await supabase
206
+ .from('rag_entity_embeddings')
207
+ .upsert({
208
+ entity_id: entity.id,
209
+ embedding,
210
+ embedding_text: embeddingText,
211
+ });
212
+
213
+ if (!insertError) {
214
+ embeddedCount++;
215
+ if ((i + 1) % 50 === 0 || i === entities.length - 1) {
216
+ console.log(` āœ… Progress: ${i + 1}/${entities.length} entities embedded`);
217
+ }
218
+ } else {
219
+ console.log(` āŒ Entity ${entity.name}: ${insertError.message}`);
220
+ }
221
+ } catch (error) {
222
+ console.log(` āŒ Entity ${entity.name}: ${error.message}`);
223
+ }
224
+ }
225
+
226
+ console.log(`\nāœ… Entity embedding complete: ${embeddedCount}/${entities.length} embedded\n`);
227
+ return embeddedCount;
228
+ }
229
+
230
+ // Main migration
231
+ async function main() {
232
+ try {
233
+ // Initialize model
234
+ const modelReady = await initEmbeddingModel();
235
+ if (!modelReady) {
236
+ console.error('āŒ Cannot proceed without embedding model');
237
+ process.exit(1);
238
+ }
239
+
240
+ // Step 1: Chunk documents
241
+ const totalChunks = await chunkAllDocuments();
242
+
243
+ // Step 2: Embed chunks
244
+ const embeddedChunks = await embedAllChunks();
245
+
246
+ // Step 3: Embed entities
247
+ const embeddedEntities = await embedAllEntities();
248
+
249
+ // Summary
250
+ console.log('='.repeat(70));
251
+ console.log('\nšŸŽ‰ Migration Complete!\n');
252
+ console.log('šŸ“Š Summary:');
253
+ console.log(` - Documents chunked: 277`);
254
+ console.log(` - Total chunks created: ${totalChunks}`);
255
+ console.log(` - Chunks embedded: ${embeddedChunks}`);
256
+ console.log(` - Entities embedded: ${embeddedEntities}`);
257
+ console.log('\nāœ… Your RAG Memory PostgreSQL server is now fully operational!\n');
258
+ console.log('šŸš€ Next steps:');
259
+ console.log(' 1. Restart Cursor to load the updated MCP server');
260
+ console.log(' 2. Test semantic search with searchNodes');
261
+ console.log(' 3. Test hybrid search with hybridSearch');
262
+ console.log(' 4. Test detailed context with getDetailedContext\n');
263
+
264
+ } catch (error) {
265
+ console.error('\nāŒ Migration failed:', error.message);
266
+ console.error(error.stack);
267
+ process.exit(1);
268
+ }
269
+ }
270
+
271
+ main();
@@ -0,0 +1,51 @@
1
+ /**
2
+ * Migrate chunks and embeddings from SQLite to PostgreSQL
3
+ *
4
+ * Since the old SQLite database doesn't have chunks yet,
5
+ * we need to:
6
+ * 1. Chunk all existing documents in PostgreSQL
7
+ * 2. Generate embeddings for all chunks
8
+ * 3. Generate embeddings for all entities
9
+ */
10
+
11
+ import { createClient } from '@supabase/supabase-js';
12
+
13
+ const SUPABASE_URL = 'https://qystmdysjemiqlqmhfbh.supabase.co';
14
+ const SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InF5c3RtZHlzamVtaXFscW1oZmJoIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzAzMDcxNCwiZXhwIjoyMDY4NjA2NzE0fQ.prPn_vUbpSDMghlVodIfXXWFWfXT_GM0m4PX06YaSkU';
15
+
16
+ const supabase = createClient(SUPABASE_URL, SERVICE_KEY);
17
+
18
+ console.log('šŸš€ Migration Plan: Chunk & Embed All Documents\n');
19
+ console.log('='.repeat(70) + '\n');
20
+
21
+ // Get all documents
22
+ const { data: documents, error } = await supabase
23
+ .from('rag_documents')
24
+ .select('id, metadata')
25
+ .order('created_at', { ascending: true });
26
+
27
+ if (error) {
28
+ console.error('āŒ Error fetching documents:', error.message);
29
+ process.exit(1);
30
+ }
31
+
32
+ console.log(`šŸ“Š Found ${documents.length} documents in PostgreSQL\n`);
33
+ console.log('šŸ“‹ Migration Steps:\n');
34
+ console.log(' 1. Chunk all 277 documents (using chunkDocument tool)');
35
+ console.log(' 2. Generate embeddings for all chunks (using embedChunks tool)');
36
+ console.log(' 3. Generate embeddings for all 555 entities (using embedAllEntities tool)');
37
+ console.log(' 4. Optionally: Link entities to documents (using linkEntitiesToDocument tool)');
38
+ console.log('\n' + '='.repeat(70));
39
+ console.log('\nšŸ’” Recommendation:\n');
40
+ console.log(' Run these operations through the MCP server after restart:');
41
+ console.log(' 1. First, test with a few documents');
42
+ console.log(' 2. Then batch process all documents');
43
+ console.log(' 3. Finally, run embedAllEntities once');
44
+ console.log('\nšŸ“ Sample documents to test with:');
45
+
46
+ // Show first 5 documents
47
+ documents.slice(0, 5).forEach((doc, i) => {
48
+ console.log(` ${i + 1}. ${doc.id} (${doc.metadata?.type || 'unknown'})`);
49
+ });
50
+
51
+ console.log('\nāœ… Schema is ready for migration!\n');
package/package.json ADDED
@@ -0,0 +1,28 @@
1
+ {
2
+ "name": "rag-memory-pg-mcp",
3
+ "version": "1.0.0",
4
+ "description": "PostgreSQL-based RAG Memory MCP Server with Supabase",
5
+ "type": "module",
6
+ "main": "src/index.js",
7
+ "bin": {
8
+ "rag-memory-pg-mcp": "src/index.js"
9
+ },
10
+ "scripts": {
11
+ "start": "node src/index.js"
12
+ },
13
+ "keywords": ["mcp", "rag", "memory", "postgresql", "supabase", "knowledge-graph"],
14
+ "author": "",
15
+ "license": "MIT",
16
+ "repository": {
17
+ "type": "git",
18
+ "url": ""
19
+ },
20
+ "dependencies": {
21
+ "@modelcontextprotocol/sdk": "^1.0.4",
22
+ "@supabase/supabase-js": "^2.39.0",
23
+ "@huggingface/transformers": "^3.0.0"
24
+ },
25
+ "engines": {
26
+ "node": ">=18.0.0"
27
+ }
28
+ }
@@ -0,0 +1,136 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Run schema migration using Supabase client
5
+ */
6
+
7
+ import { createClient } from '@supabase/supabase-js';
8
+
9
+ const SUPABASE_URL = 'https://qystmdysjemiqlqmhfbh.supabase.co';
10
+ const SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InF5c3RtZHlzamVtaXFscW1oZmJoIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzAzMDcxNCwiZXhwIjoyMDY4NjA2NzE0fQ.prPn_vUbpSDMghlVodIfXXWFWfXT_GM0m4PX06YaSkU';
11
+
12
+ const supabase = createClient(SUPABASE_URL, SERVICE_KEY);
13
+
14
+ console.log('šŸ”§ Running Schema Migration via Supabase Client\n');
15
+ console.log('='.repeat(70) + '\n');
16
+
17
+ // Try to use the PostgreSQL REST API to run SQL
18
+ // Note: This requires the service role key and proper RLS policies
19
+
20
+ async function runSQL(sql, description) {
21
+ console.log(`šŸ“ ${description}...`);
22
+
23
+ try {
24
+ // Supabase client doesn't directly support ALTER TABLE
25
+ // We need to use the REST API or create a stored procedure
26
+
27
+ // Alternative: Use a stored procedure if available
28
+ const { data, error } = await supabase.rpc('exec_sql', { query: sql });
29
+
30
+ if (error) {
31
+ console.log(` āŒ Error: ${error.message}`);
32
+ console.log(` šŸ’” This is expected - Supabase client can't run DDL directly`);
33
+ return false;
34
+ } else {
35
+ console.log(` āœ… Success`);
36
+ return true;
37
+ }
38
+ } catch (err) {
39
+ console.log(` āŒ Exception: ${err.message}`);
40
+ return false;
41
+ }
42
+ }
43
+
44
+ console.log('āš ļø Note: Supabase client cannot run ALTER TABLE commands directly.\n');
45
+ console.log('šŸ“‹ You have 2 options:\n');
46
+
47
+ console.log('Option 1: Use Supabase Dashboard (Recommended)');
48
+ console.log(' 1. Go to: https://supabase.com/dashboard/project/qystmdysjemiqlqmhfbh');
49
+ console.log(' 2. Click: SQL Editor');
50
+ console.log(' 3. Run this SQL:\n');
51
+
52
+ const sql = `-- Add missing columns
53
+ ALTER TABLE rag_chunks
54
+ ADD COLUMN IF NOT EXISTS start_pos INTEGER,
55
+ ADD COLUMN IF NOT EXISTS end_pos INTEGER;
56
+
57
+ ALTER TABLE rag_entity_embeddings
58
+ ADD COLUMN IF NOT EXISTS embedding_text TEXT;`;
59
+
60
+ console.log(sql);
61
+
62
+ console.log('\n\nOption 2: Use psql command line');
63
+ console.log(' (Requires direct PostgreSQL connection string)\n');
64
+
65
+ console.log('='.repeat(70));
66
+ console.log('\nšŸ’” After running the SQL, execute:\n');
67
+ console.log(' node migrate-all-data.js\n');
68
+
69
+ // Try to verify current schema
70
+ console.log('šŸ” Checking current schema...\n');
71
+
72
+ // Test what columns currently exist by trying inserts
73
+ console.log('šŸ“¦ Testing rag_chunks columns:');
74
+ const chunkTests = [
75
+ { document_id: 'test', chunk_index: 0, content: 'test' },
76
+ { document_id: 'test', chunk_index: 0, content: 'test', start_pos: 0 },
77
+ { document_id: 'test', chunk_index: 0, content: 'test', start_pos: 0, end_pos: 10 },
78
+ ];
79
+
80
+ for (const test of chunkTests) {
81
+ const cols = Object.keys(test).join(', ');
82
+ const { error } = await supabase.from('rag_chunks').insert(test);
83
+
84
+ if (error) {
85
+ if (error.message.includes('Could not find')) {
86
+ const missing = error.message.match(/'([^']+)' column/)?.[1];
87
+ console.log(` āŒ Missing column: ${missing}`);
88
+ break;
89
+ } else if (error.message.includes('duplicate')) {
90
+ console.log(` āœ… Columns work: ${cols}`);
91
+ await supabase.from('rag_chunks').delete().eq('document_id', 'test');
92
+ break;
93
+ } else {
94
+ console.log(` āš ļø ${error.message}`);
95
+ }
96
+ } else {
97
+ console.log(` āœ… All columns exist: ${cols}`);
98
+ await supabase.from('rag_chunks').delete().eq('document_id', 'test');
99
+ break;
100
+ }
101
+ }
102
+
103
+ console.log('\nšŸ”® Testing rag_entity_embeddings columns:');
104
+ const embTests = [
105
+ { entity_id: 'test', embedding: new Array(384).fill(0.1) },
106
+ { entity_id: 'test', embedding: new Array(384).fill(0.1), embedding_text: 'test' },
107
+ ];
108
+
109
+ for (const test of embTests) {
110
+ const cols = Object.keys(test).join(', ');
111
+ const { error } = await supabase.from('rag_entity_embeddings').insert(test);
112
+
113
+ if (error) {
114
+ if (error.message.includes('Could not find')) {
115
+ const missing = error.message.match(/'([^']+)' column/)?.[1];
116
+ console.log(` āŒ Missing column: ${missing}`);
117
+ break;
118
+ } else if (error.message.includes('duplicate')) {
119
+ console.log(` āœ… Columns work: ${cols}`);
120
+ await supabase.from('rag_entity_embeddings').delete().eq('entity_id', 'test');
121
+ break;
122
+ } else {
123
+ console.log(` āš ļø ${error.message}`);
124
+ }
125
+ } else {
126
+ console.log(` āœ… All columns exist: ${cols}`);
127
+ await supabase.from('rag_entity_embeddings').delete().eq('entity_id', 'test');
128
+ break;
129
+ }
130
+ }
131
+
132
+ console.log('\n' + '='.repeat(70));
133
+ console.log('\nšŸ“ Summary:');
134
+ console.log(' - Schema migration SQL is ready');
135
+ console.log(' - Run it in Supabase SQL Editor');
136
+ console.log(' - Then run: node migrate-all-data.js\n');