@iflow-mcp/joleyline-mcp-memory-libsql 0.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +160 -0
- package/dist/config/index.js +89 -0
- package/dist/db/core.js +234 -0
- package/dist/db/index.js +45 -0
- package/dist/db/migrations/fix-vector-dimension.js +187 -0
- package/dist/db/migrations/regenerate-embeddings-after-migration.js +81 -0
- package/dist/db/migrations/run.js +30 -0
- package/dist/db/migrations/schema.js +34 -0
- package/dist/db/migrations/update-vector-dimension.js +98 -0
- package/dist/db/regenerate-embeddings.js +83 -0
- package/dist/db/test-embeddings.js +80 -0
- package/dist/db/test.js +168 -0
- package/dist/db/types.js +1 -0
- package/dist/index.js +460 -0
- package/dist/models/index.js +1 -0
- package/dist/services/database-service.js +159 -0
- package/dist/services/embedding-service.js +180 -0
- package/dist/services/entity-service.js +217 -0
- package/dist/services/graph-service.js +161 -0
- package/dist/services/relation-service.js +119 -0
- package/dist/services/vector-service.js +121 -0
- package/dist/transports/sse-transport.js +146 -0
- package/dist/transports/transport-adapter.js +89 -0
- package/dist/transports/transport.js +1 -0
- package/dist/types/database.js +1 -0
- package/dist/types/index.js +1 -0
- package/dist/types/server-config.js +4 -0
- package/dist/utils/errors.js +103 -0
- package/dist/utils/logger.js +115 -0
- package/package.json +1 -0
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { DatabaseManager } from '../core.js';
|
|
3
|
+
import { databaseConfig } from '../../config/index.js';
|
|
4
|
+
import { logger } from '../../utils/logger.js';
|
|
5
|
+
import { EMBEDDING_DIMENSION } from '../../services/embedding-service.js';
|
|
6
|
+
/**
|
|
7
|
+
* Migration script to fix the database schema to support the correct vector dimensions
|
|
8
|
+
* This script will:
|
|
9
|
+
* 1. Disable foreign key constraints
|
|
10
|
+
* 2. Create backups of all tables (entities, observations, relations)
|
|
11
|
+
* 3. Drop the existing tables in the correct order
|
|
12
|
+
* 4. Create new tables with the updated vector dimension
|
|
13
|
+
* 5. Restore the data from the backup tables (excluding incompatible embeddings)
|
|
14
|
+
* 6. Re-enable foreign key constraints
|
|
15
|
+
*/
|
|
16
|
+
async function fixVectorDimension() {
|
|
17
|
+
logger.info('Starting vector dimension fix...');
|
|
18
|
+
// Get database connection
|
|
19
|
+
const config = databaseConfig;
|
|
20
|
+
const dbManager = await DatabaseManager.getInstance(config);
|
|
21
|
+
const client = dbManager.getClient();
|
|
22
|
+
try {
|
|
23
|
+
// Start transaction
|
|
24
|
+
const txn = await client.transaction('write');
|
|
25
|
+
try {
|
|
26
|
+
// 1. Disable foreign key constraints
|
|
27
|
+
logger.info('Disabling foreign key constraints...');
|
|
28
|
+
await txn.execute({
|
|
29
|
+
sql: `PRAGMA foreign_keys = OFF;`
|
|
30
|
+
});
|
|
31
|
+
// 2. Check if backup tables exist and drop them if they do
|
|
32
|
+
logger.info('Checking for existing backup tables...');
|
|
33
|
+
await txn.execute({
|
|
34
|
+
sql: `DROP TABLE IF EXISTS entities_backup`
|
|
35
|
+
});
|
|
36
|
+
await txn.execute({
|
|
37
|
+
sql: `DROP TABLE IF EXISTS observations_backup`
|
|
38
|
+
});
|
|
39
|
+
await txn.execute({
|
|
40
|
+
sql: `DROP TABLE IF EXISTS relations_backup`
|
|
41
|
+
});
|
|
42
|
+
// 3. Create backups of all tables
|
|
43
|
+
logger.info('Creating backup of entities table...');
|
|
44
|
+
await txn.execute({
|
|
45
|
+
sql: `CREATE TABLE entities_backup AS SELECT * FROM entities`
|
|
46
|
+
});
|
|
47
|
+
logger.info('Creating backup of observations table...');
|
|
48
|
+
await txn.execute({
|
|
49
|
+
sql: `CREATE TABLE observations_backup AS SELECT * FROM observations`
|
|
50
|
+
});
|
|
51
|
+
logger.info('Creating backup of relations table...');
|
|
52
|
+
await txn.execute({
|
|
53
|
+
sql: `CREATE TABLE relations_backup AS SELECT * FROM relations`
|
|
54
|
+
});
|
|
55
|
+
// 4. Drop existing tables in the correct order (respecting foreign key relationships)
|
|
56
|
+
logger.info('Dropping existing tables...');
|
|
57
|
+
await txn.execute({
|
|
58
|
+
sql: `DROP TABLE IF EXISTS observations`
|
|
59
|
+
});
|
|
60
|
+
await txn.execute({
|
|
61
|
+
sql: `DROP TABLE IF EXISTS relations`
|
|
62
|
+
});
|
|
63
|
+
await txn.execute({
|
|
64
|
+
sql: `DROP TABLE IF EXISTS entities`
|
|
65
|
+
});
|
|
66
|
+
// 5. Create new tables with updated vector dimension
|
|
67
|
+
logger.info(`Creating new entities table with ${EMBEDDING_DIMENSION} dimensions...`);
|
|
68
|
+
await txn.execute({
|
|
69
|
+
sql: `
|
|
70
|
+
CREATE TABLE entities (
|
|
71
|
+
name TEXT PRIMARY KEY,
|
|
72
|
+
entity_type TEXT NOT NULL,
|
|
73
|
+
embedding F32_BLOB(${EMBEDDING_DIMENSION}), -- Updated dimension
|
|
74
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
75
|
+
)
|
|
76
|
+
`
|
|
77
|
+
});
|
|
78
|
+
logger.info('Creating new observations table...');
|
|
79
|
+
await txn.execute({
|
|
80
|
+
sql: `
|
|
81
|
+
CREATE TABLE observations (
|
|
82
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
83
|
+
entity_name TEXT NOT NULL,
|
|
84
|
+
content TEXT NOT NULL,
|
|
85
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
86
|
+
FOREIGN KEY (entity_name) REFERENCES entities(name)
|
|
87
|
+
)
|
|
88
|
+
`
|
|
89
|
+
});
|
|
90
|
+
logger.info('Creating new relations table...');
|
|
91
|
+
await txn.execute({
|
|
92
|
+
sql: `
|
|
93
|
+
CREATE TABLE relations (
|
|
94
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
95
|
+
source TEXT NOT NULL,
|
|
96
|
+
target TEXT NOT NULL,
|
|
97
|
+
relation_type TEXT NOT NULL,
|
|
98
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
99
|
+
FOREIGN KEY (source) REFERENCES entities(name),
|
|
100
|
+
FOREIGN KEY (target) REFERENCES entities(name)
|
|
101
|
+
)
|
|
102
|
+
`
|
|
103
|
+
});
|
|
104
|
+
// 6. Restore data from backups (excluding embeddings as they're incompatible)
|
|
105
|
+
logger.info('Restoring entities data from backup (without embeddings)...');
|
|
106
|
+
await txn.execute({
|
|
107
|
+
sql: `
|
|
108
|
+
INSERT INTO entities (name, entity_type, created_at)
|
|
109
|
+
SELECT name, entity_type, created_at FROM entities_backup
|
|
110
|
+
`
|
|
111
|
+
});
|
|
112
|
+
logger.info('Restoring observations data from backup...');
|
|
113
|
+
await txn.execute({
|
|
114
|
+
sql: `
|
|
115
|
+
INSERT INTO observations (entity_name, content, created_at)
|
|
116
|
+
SELECT entity_name, content, created_at FROM observations_backup
|
|
117
|
+
`
|
|
118
|
+
});
|
|
119
|
+
logger.info('Restoring relations data from backup...');
|
|
120
|
+
await txn.execute({
|
|
121
|
+
sql: `
|
|
122
|
+
INSERT INTO relations (source, target, relation_type, created_at)
|
|
123
|
+
SELECT source, target, relation_type, created_at FROM relations_backup
|
|
124
|
+
`
|
|
125
|
+
});
|
|
126
|
+
// 7. Recreate indexes
|
|
127
|
+
logger.info('Recreating indexes...');
|
|
128
|
+
await txn.execute({
|
|
129
|
+
sql: 'CREATE INDEX IF NOT EXISTS idx_entities_name ON entities(name)'
|
|
130
|
+
});
|
|
131
|
+
await txn.execute({
|
|
132
|
+
sql: 'CREATE INDEX IF NOT EXISTS idx_observations_entity ON observations(entity_name)'
|
|
133
|
+
});
|
|
134
|
+
await txn.execute({
|
|
135
|
+
sql: 'CREATE INDEX IF NOT EXISTS idx_relations_source ON relations(source)'
|
|
136
|
+
});
|
|
137
|
+
await txn.execute({
|
|
138
|
+
sql: 'CREATE INDEX IF NOT EXISTS idx_relations_target ON relations(target)'
|
|
139
|
+
});
|
|
140
|
+
await txn.execute({
|
|
141
|
+
sql: 'CREATE INDEX IF NOT EXISTS idx_entities_embedding ON entities(libsql_vector_idx(embedding))'
|
|
142
|
+
});
|
|
143
|
+
// 8. Re-enable foreign key constraints
|
|
144
|
+
logger.info('Re-enabling foreign key constraints...');
|
|
145
|
+
await txn.execute({
|
|
146
|
+
sql: `PRAGMA foreign_keys = ON;`
|
|
147
|
+
});
|
|
148
|
+
// 9. Drop backup tables
|
|
149
|
+
logger.info('Dropping backup tables...');
|
|
150
|
+
await txn.execute({
|
|
151
|
+
sql: `DROP TABLE entities_backup`
|
|
152
|
+
});
|
|
153
|
+
await txn.execute({
|
|
154
|
+
sql: `DROP TABLE observations_backup`
|
|
155
|
+
});
|
|
156
|
+
await txn.execute({
|
|
157
|
+
sql: `DROP TABLE relations_backup`
|
|
158
|
+
});
|
|
159
|
+
// Commit transaction
|
|
160
|
+
await txn.commit();
|
|
161
|
+
logger.info('Vector dimension fix completed successfully!');
|
|
162
|
+
}
|
|
163
|
+
catch (error) {
|
|
164
|
+
// Rollback transaction on error
|
|
165
|
+
await txn.rollback();
|
|
166
|
+
throw error;
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
catch (error) {
|
|
170
|
+
logger.error('Migration failed:', error);
|
|
171
|
+
throw new Error(`Vector dimension fix failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
172
|
+
}
|
|
173
|
+
finally {
|
|
174
|
+
// Close database connection
|
|
175
|
+
await dbManager.close();
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
// Run migration
|
|
179
|
+
fixVectorDimension()
|
|
180
|
+
.then(() => {
|
|
181
|
+
logger.info('Migration completed successfully');
|
|
182
|
+
process.exit(0);
|
|
183
|
+
})
|
|
184
|
+
.catch((error) => {
|
|
185
|
+
logger.error('Migration failed:', error);
|
|
186
|
+
process.exit(1);
|
|
187
|
+
});
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { DatabaseManager } from '../index.js';
|
|
3
|
+
import { databaseConfig } from '../../config/index.js';
|
|
4
|
+
import { logger } from '../../utils/logger.js';
|
|
5
|
+
import { embeddingService } from '../../services/embedding-service.js';
|
|
6
|
+
/**
|
|
7
|
+
* Regenerates embeddings for all entities in the database after migration
|
|
8
|
+
* This script should be run after the fix-vector-dimension.ts migration
|
|
9
|
+
* to ensure all entities have embeddings with the correct dimension
|
|
10
|
+
*/
|
|
11
|
+
async function regenerateEmbeddingsAfterMigration() {
|
|
12
|
+
logger.info('Starting embedding regeneration after migration...');
|
|
13
|
+
// Get database connection
|
|
14
|
+
const config = databaseConfig;
|
|
15
|
+
const dbManager = await DatabaseManager.get_instance(config);
|
|
16
|
+
try {
|
|
17
|
+
// Get all entities from the database
|
|
18
|
+
logger.info('Retrieving all entities from the database...');
|
|
19
|
+
const entities = await dbManager.get_recent_entities(1000); // Set a high limit to get all entities
|
|
20
|
+
const totalEntities = entities.length;
|
|
21
|
+
logger.info(`Found ${totalEntities} entities to process`);
|
|
22
|
+
// Process each entity
|
|
23
|
+
let successCount = 0;
|
|
24
|
+
let errorCount = 0;
|
|
25
|
+
for (let i = 0; i < totalEntities; i++) {
|
|
26
|
+
const entity = entities[i];
|
|
27
|
+
const entityName = entity.name;
|
|
28
|
+
const entityType = entity.entityType;
|
|
29
|
+
try {
|
|
30
|
+
logger.info(`Processing entity ${i + 1}/${totalEntities}: ${entityName} (${entityType})`);
|
|
31
|
+
// Get observations from the entity
|
|
32
|
+
const observations = entity.observations;
|
|
33
|
+
if (!observations || observations.length === 0) {
|
|
34
|
+
logger.warn(`Entity "${entityName}" has no observations, skipping embedding generation`);
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
// Generate embedding from observations
|
|
38
|
+
logger.info(`Generating embedding for entity "${entityName}" using ${embeddingService.getModelName()}`);
|
|
39
|
+
const text = observations.join(' ');
|
|
40
|
+
const embedding = await embeddingService.generateEmbedding(text);
|
|
41
|
+
logger.info(`Successfully generated embedding with dimension: ${embedding.length}`);
|
|
42
|
+
// Update entity with new embedding
|
|
43
|
+
await dbManager.create_entities([{
|
|
44
|
+
name: entityName,
|
|
45
|
+
entityType: entityType,
|
|
46
|
+
observations: observations,
|
|
47
|
+
embedding: embedding
|
|
48
|
+
}]);
|
|
49
|
+
logger.info(`Successfully updated embedding for entity "${entityName}"`);
|
|
50
|
+
successCount++;
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
logger.error(`Error processing entity "${entityName}":`, error);
|
|
54
|
+
errorCount++;
|
|
55
|
+
}
|
|
56
|
+
// Log progress every 10 entities
|
|
57
|
+
if ((i + 1) % 10 === 0 || i === totalEntities - 1) {
|
|
58
|
+
logger.info(`Progress: ${i + 1}/${totalEntities} entities processed (${successCount} succeeded, ${errorCount} failed)`);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
logger.info(`Embedding regeneration complete. ${successCount} entities updated successfully, ${errorCount} entities failed.`);
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
logger.error('Error during embedding regeneration:', error);
|
|
65
|
+
throw error;
|
|
66
|
+
}
|
|
67
|
+
finally {
|
|
68
|
+
// Close database connection
|
|
69
|
+
await dbManager.close();
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
// Run the regeneration
|
|
73
|
+
regenerateEmbeddingsAfterMigration()
|
|
74
|
+
.then(() => {
|
|
75
|
+
logger.info('Embedding regeneration process completed successfully');
|
|
76
|
+
process.exit(0);
|
|
77
|
+
})
|
|
78
|
+
.catch((error) => {
|
|
79
|
+
logger.error('Embedding regeneration process failed:', error);
|
|
80
|
+
process.exit(1);
|
|
81
|
+
});
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { DatabaseManager } from '../index.js';
|
|
2
|
+
import { databaseConfig } from '../../config/index.js';
|
|
3
|
+
import { schema } from './schema.js';
|
|
4
|
+
async function run_migrations() {
|
|
5
|
+
const config = databaseConfig;
|
|
6
|
+
const db_manager = await DatabaseManager.get_instance(config);
|
|
7
|
+
const db = db_manager.get_client();
|
|
8
|
+
try {
|
|
9
|
+
console.log('Starting migrations...');
|
|
10
|
+
for (const migration of schema) {
|
|
11
|
+
console.log(`Executing: ${migration.slice(0, 50)}...`);
|
|
12
|
+
await db.execute(migration);
|
|
13
|
+
}
|
|
14
|
+
console.log('Migrations completed successfully');
|
|
15
|
+
}
|
|
16
|
+
catch (error) {
|
|
17
|
+
console.error('Error running migrations:', error);
|
|
18
|
+
throw error;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// Run migrations if this file is executed directly
|
|
22
|
+
if (require.main === module) {
|
|
23
|
+
run_migrations()
|
|
24
|
+
.then(() => process.exit(0))
|
|
25
|
+
.catch((error) => {
|
|
26
|
+
console.error(error);
|
|
27
|
+
process.exit(1);
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
export { run_migrations };
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
export const schema = [
|
|
2
|
+
// Create entities table
|
|
3
|
+
`CREATE TABLE IF NOT EXISTS entities (
|
|
4
|
+
name TEXT PRIMARY KEY,
|
|
5
|
+
entity_type TEXT NOT NULL,
|
|
6
|
+
embedding F32_BLOB(384), -- 4 dimensions for testing
|
|
7
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
8
|
+
)`,
|
|
9
|
+
// Create observations table
|
|
10
|
+
`CREATE TABLE IF NOT EXISTS observations (
|
|
11
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
12
|
+
entity_name TEXT NOT NULL,
|
|
13
|
+
content TEXT NOT NULL,
|
|
14
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
15
|
+
FOREIGN KEY (entity_name) REFERENCES entities(name)
|
|
16
|
+
)`,
|
|
17
|
+
// Create relations table
|
|
18
|
+
`CREATE TABLE IF NOT EXISTS relations (
|
|
19
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
20
|
+
source TEXT NOT NULL,
|
|
21
|
+
target TEXT NOT NULL,
|
|
22
|
+
relation_type TEXT NOT NULL,
|
|
23
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
24
|
+
FOREIGN KEY (source) REFERENCES entities(name),
|
|
25
|
+
FOREIGN KEY (target) REFERENCES entities(name)
|
|
26
|
+
)`,
|
|
27
|
+
// Create indexes
|
|
28
|
+
`CREATE INDEX IF NOT EXISTS idx_entities_name ON entities(name)`,
|
|
29
|
+
`CREATE INDEX IF NOT EXISTS idx_observations_entity ON observations(entity_name)`,
|
|
30
|
+
`CREATE INDEX IF NOT EXISTS idx_relations_source ON relations(source)`,
|
|
31
|
+
`CREATE INDEX IF NOT EXISTS idx_relations_target ON relations(target)`,
|
|
32
|
+
// Create vector index for similarity search
|
|
33
|
+
`CREATE INDEX IF NOT EXISTS idx_entities_embedding ON entities(libsql_vector_idx(embedding))`,
|
|
34
|
+
];
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { DatabaseManager } from '../core.js';
|
|
3
|
+
import { databaseConfig } from '../../config/index.js';
|
|
4
|
+
import { logger } from '../../utils/logger.js';
|
|
5
|
+
import { EMBEDDING_DIMENSION } from '../../services/embedding-service.js';
|
|
6
|
+
/**
|
|
7
|
+
* Migration script to update the database schema to support larger vector dimensions
|
|
8
|
+
* This script will:
|
|
9
|
+
* 1. Create a backup of the entities table
|
|
10
|
+
* 2. Drop the existing entities table
|
|
11
|
+
* 3. Create a new entities table with the updated vector dimension
|
|
12
|
+
* 4. Restore the data from the backup table
|
|
13
|
+
* 5. Drop the backup table
|
|
14
|
+
*/
|
|
15
|
+
async function migrateVectorDimension() {
|
|
16
|
+
logger.info('Starting vector dimension migration...');
|
|
17
|
+
// Get database connection
|
|
18
|
+
const config = databaseConfig;
|
|
19
|
+
const dbManager = await DatabaseManager.getInstance(config);
|
|
20
|
+
const client = dbManager.getClient();
|
|
21
|
+
try {
|
|
22
|
+
// Start transaction
|
|
23
|
+
const txn = await client.transaction('write');
|
|
24
|
+
try {
|
|
25
|
+
// 1. Check if backup table exists and drop it if it does
|
|
26
|
+
logger.info('Checking for existing backup table...');
|
|
27
|
+
await txn.execute({
|
|
28
|
+
sql: `DROP TABLE IF EXISTS entities_backup`
|
|
29
|
+
});
|
|
30
|
+
// 2. Create backup of entities table
|
|
31
|
+
logger.info('Creating backup of entities table...');
|
|
32
|
+
await txn.execute({
|
|
33
|
+
sql: `CREATE TABLE entities_backup AS SELECT * FROM entities`
|
|
34
|
+
});
|
|
35
|
+
// 3. Drop existing entities table
|
|
36
|
+
logger.info('Dropping existing entities table...');
|
|
37
|
+
await txn.execute({
|
|
38
|
+
sql: `DROP TABLE entities`
|
|
39
|
+
});
|
|
40
|
+
// 4. Create new entities table with updated vector dimension
|
|
41
|
+
logger.info(`Creating new entities table with ${EMBEDDING_DIMENSION} dimensions...`);
|
|
42
|
+
await txn.execute({
|
|
43
|
+
sql: `
|
|
44
|
+
CREATE TABLE entities (
|
|
45
|
+
name TEXT PRIMARY KEY,
|
|
46
|
+
entity_type TEXT NOT NULL,
|
|
47
|
+
embedding F32_BLOB(${EMBEDDING_DIMENSION}), -- Updated dimension
|
|
48
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
49
|
+
)
|
|
50
|
+
`
|
|
51
|
+
});
|
|
52
|
+
// 5. Restore data from backup (excluding embeddings as they're incompatible)
|
|
53
|
+
logger.info('Restoring data from backup (without embeddings)...');
|
|
54
|
+
await txn.execute({
|
|
55
|
+
sql: `
|
|
56
|
+
INSERT INTO entities (name, entity_type, created_at)
|
|
57
|
+
SELECT name, entity_type, created_at FROM entities_backup
|
|
58
|
+
`
|
|
59
|
+
});
|
|
60
|
+
// 6. Recreate index
|
|
61
|
+
logger.info('Recreating vector index...');
|
|
62
|
+
await txn.execute({
|
|
63
|
+
sql: 'CREATE INDEX IF NOT EXISTS idx_entities_embedding ON entities(libsql_vector_idx(embedding))'
|
|
64
|
+
});
|
|
65
|
+
// 7. Drop backup table
|
|
66
|
+
logger.info('Dropping backup table...');
|
|
67
|
+
await txn.execute({
|
|
68
|
+
sql: `DROP TABLE entities_backup`
|
|
69
|
+
});
|
|
70
|
+
// Commit transaction
|
|
71
|
+
await txn.commit();
|
|
72
|
+
logger.info('Vector dimension migration completed successfully!');
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
// Rollback transaction on error
|
|
76
|
+
await txn.rollback();
|
|
77
|
+
throw error;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
catch (error) {
|
|
81
|
+
logger.error('Migration failed:', error);
|
|
82
|
+
throw new Error(`Vector dimension migration failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
83
|
+
}
|
|
84
|
+
finally {
|
|
85
|
+
// Close database connection
|
|
86
|
+
await dbManager.close();
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
// Run migration
|
|
90
|
+
migrateVectorDimension()
|
|
91
|
+
.then(() => {
|
|
92
|
+
logger.info('Migration completed successfully');
|
|
93
|
+
process.exit(0);
|
|
94
|
+
})
|
|
95
|
+
.catch((error) => {
|
|
96
|
+
logger.error('Migration failed:', error);
|
|
97
|
+
process.exit(1);
|
|
98
|
+
});
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { DatabaseManager } from './index.js';
|
|
3
|
+
import { databaseConfig } from '../config/index.js';
|
|
4
|
+
import { logger } from '../utils/logger.js';
|
|
5
|
+
import { embeddingService } from '../services/embedding-service.js';
|
|
6
|
+
/**
|
|
7
|
+
* Regenerates embeddings for all entities in the database
|
|
8
|
+
* This is useful when:
|
|
9
|
+
* 1. Migrating from a previous version without embeddings
|
|
10
|
+
* 2. Changing the embedding model
|
|
11
|
+
* 3. Fixing corrupted embeddings
|
|
12
|
+
*/
|
|
13
|
+
async function regenerateAllEmbeddings() {
|
|
14
|
+
logger.info('Starting embedding regeneration for all entities...');
|
|
15
|
+
// Get database connection
|
|
16
|
+
const config = databaseConfig;
|
|
17
|
+
const dbManager = await DatabaseManager.get_instance(config);
|
|
18
|
+
try {
|
|
19
|
+
// Get all entities from the database using get_recent_entities with a high limit
|
|
20
|
+
logger.info('Retrieving all entities from the database...');
|
|
21
|
+
const entities = await dbManager.get_recent_entities(1000); // Set a high limit to get all entities
|
|
22
|
+
const totalEntities = entities.length;
|
|
23
|
+
logger.info(`Found ${totalEntities} entities to process`);
|
|
24
|
+
// Process each entity
|
|
25
|
+
let successCount = 0;
|
|
26
|
+
let errorCount = 0;
|
|
27
|
+
for (let i = 0; i < totalEntities; i++) {
|
|
28
|
+
const entity = entities[i];
|
|
29
|
+
const entityName = entity.name;
|
|
30
|
+
const entityType = entity.entityType;
|
|
31
|
+
try {
|
|
32
|
+
logger.info(`Processing entity ${i + 1}/${totalEntities}: ${entityName} (${entityType})`);
|
|
33
|
+
// Get observations from the entity
|
|
34
|
+
const observations = entity.observations;
|
|
35
|
+
if (!observations || observations.length === 0) {
|
|
36
|
+
logger.warn(`Entity "${entityName}" has no observations, skipping embedding generation`);
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
// Generate embedding from observations
|
|
40
|
+
logger.info(`Generating embedding for entity "${entityName}" using ${embeddingService.getModelName()}`);
|
|
41
|
+
const text = observations.join(' ');
|
|
42
|
+
const embedding = await embeddingService.generateEmbedding(text);
|
|
43
|
+
logger.info(`Successfully generated embedding with dimension: ${embedding.length}`);
|
|
44
|
+
// Update entity with new embedding
|
|
45
|
+
await dbManager.create_entities([{
|
|
46
|
+
name: entityName,
|
|
47
|
+
entityType: entityType,
|
|
48
|
+
observations: observations,
|
|
49
|
+
embedding: embedding
|
|
50
|
+
}]);
|
|
51
|
+
logger.info(`Successfully updated embedding for entity "${entityName}"`);
|
|
52
|
+
successCount++;
|
|
53
|
+
}
|
|
54
|
+
catch (error) {
|
|
55
|
+
logger.error(`Error processing entity "${entityName}":`, error);
|
|
56
|
+
errorCount++;
|
|
57
|
+
}
|
|
58
|
+
// Log progress every 10 entities
|
|
59
|
+
if ((i + 1) % 10 === 0 || i === totalEntities - 1) {
|
|
60
|
+
logger.info(`Progress: ${i + 1}/${totalEntities} entities processed (${successCount} succeeded, ${errorCount} failed)`);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
logger.info(`Embedding regeneration complete. ${successCount} entities updated successfully, ${errorCount} entities failed.`);
|
|
64
|
+
}
|
|
65
|
+
catch (error) {
|
|
66
|
+
logger.error('Error during embedding regeneration:', error);
|
|
67
|
+
throw error;
|
|
68
|
+
}
|
|
69
|
+
finally {
|
|
70
|
+
// Close database connection
|
|
71
|
+
await dbManager.close();
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
// Run the regeneration
|
|
75
|
+
regenerateAllEmbeddings()
|
|
76
|
+
.then(() => {
|
|
77
|
+
logger.info('Embedding regeneration process completed successfully');
|
|
78
|
+
process.exit(0);
|
|
79
|
+
})
|
|
80
|
+
.catch((error) => {
|
|
81
|
+
logger.error('Embedding regeneration process failed:', error);
|
|
82
|
+
process.exit(1);
|
|
83
|
+
});
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { DatabaseManager } from './index.js';
|
|
3
|
+
import { databaseConfig } from '../config/index.js';
|
|
4
|
+
import { logger } from '../utils/logger.js';
|
|
5
|
+
import { embeddingService } from '../services/embedding-service.js';
|
|
6
|
+
/**
|
|
7
|
+
* Test script to verify the embedding functionality
|
|
8
|
+
*/
|
|
9
|
+
async function testEmbeddings() {
|
|
10
|
+
logger.info('Starting embedding test...');
|
|
11
|
+
// Get database connection
|
|
12
|
+
const config = databaseConfig;
|
|
13
|
+
const dbManager = await DatabaseManager.get_instance(config);
|
|
14
|
+
try {
|
|
15
|
+
// Test 1: Generate an embedding
|
|
16
|
+
logger.info('Test 1: Generating embedding for test text...');
|
|
17
|
+
const testText = "This is a test of the embedding functionality";
|
|
18
|
+
const embedding = await embeddingService.generateEmbedding(testText);
|
|
19
|
+
logger.info(`Successfully generated embedding with dimension: ${embedding.length}`);
|
|
20
|
+
// Test 2: Create an entity with automatic embedding
|
|
21
|
+
logger.info('Test 2: Creating entity with automatic embedding...');
|
|
22
|
+
await dbManager.create_entities([
|
|
23
|
+
{
|
|
24
|
+
name: "TestEntityAuto",
|
|
25
|
+
entityType: "test",
|
|
26
|
+
observations: ["This is a test entity with automatic embedding generation"]
|
|
27
|
+
}
|
|
28
|
+
]);
|
|
29
|
+
logger.info('Successfully created entity with automatic embedding');
|
|
30
|
+
// Test 3: Create an entity with explicit embedding
|
|
31
|
+
logger.info('Test 3: Creating entity with explicit embedding...');
|
|
32
|
+
await dbManager.create_entities([
|
|
33
|
+
{
|
|
34
|
+
name: "TestEntityExplicit",
|
|
35
|
+
entityType: "test",
|
|
36
|
+
observations: ["This is a test entity with explicit embedding"],
|
|
37
|
+
embedding: embedding
|
|
38
|
+
}
|
|
39
|
+
]);
|
|
40
|
+
logger.info('Successfully created entity with explicit embedding');
|
|
41
|
+
// Test 4: Search for entities using text query
|
|
42
|
+
logger.info('Test 4: Searching for entities using text query...');
|
|
43
|
+
const textSearchResults = await dbManager.search_nodes("test entity");
|
|
44
|
+
logger.info(`Text search found ${textSearchResults.entities.length} entities`);
|
|
45
|
+
// Test 5: Search for entities using vector query
|
|
46
|
+
logger.info('Test 5: Searching for entities using vector query...');
|
|
47
|
+
const vectorSearchResults = await dbManager.search_nodes(embedding);
|
|
48
|
+
logger.info(`Vector search found ${vectorSearchResults.entities.length} entities`);
|
|
49
|
+
// Test 6: Retrieve and verify entity embeddings
|
|
50
|
+
logger.info('Test 6: Retrieving and verifying entity embeddings...');
|
|
51
|
+
const entity1 = await dbManager.get_entity("TestEntityAuto");
|
|
52
|
+
const entity2 = await dbManager.get_entity("TestEntityExplicit");
|
|
53
|
+
logger.info(`TestEntityAuto embedding dimension: ${entity1.embedding?.length || 'undefined'}`);
|
|
54
|
+
logger.info(`TestEntityExplicit embedding dimension: ${entity2.embedding?.length || 'undefined'}`);
|
|
55
|
+
// Test 7: Clean up test entities
|
|
56
|
+
logger.info('Test 7: Cleaning up test entities...');
|
|
57
|
+
await dbManager.delete_entity("TestEntityAuto");
|
|
58
|
+
await dbManager.delete_entity("TestEntityExplicit");
|
|
59
|
+
logger.info('Successfully deleted test entities');
|
|
60
|
+
logger.info('All embedding tests completed successfully!');
|
|
61
|
+
}
|
|
62
|
+
catch (error) {
|
|
63
|
+
logger.error('Embedding test failed:', error);
|
|
64
|
+
throw error;
|
|
65
|
+
}
|
|
66
|
+
finally {
|
|
67
|
+
// Close database connection
|
|
68
|
+
await dbManager.close();
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
// Run tests
|
|
72
|
+
testEmbeddings()
|
|
73
|
+
.then(() => {
|
|
74
|
+
logger.info('Embedding tests completed successfully');
|
|
75
|
+
process.exit(0);
|
|
76
|
+
})
|
|
77
|
+
.catch((error) => {
|
|
78
|
+
logger.error('Embedding tests failed:', error);
|
|
79
|
+
process.exit(1);
|
|
80
|
+
});
|