@matperez/coderag 0.1.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +154 -0
- package/dist/.tsbuildinfo +1 -0
- package/dist/ast-chunking.d.ts +40 -0
- package/dist/ast-chunking.d.ts.map +1 -0
- package/dist/ast-chunking.js +88 -0
- package/dist/ast-chunking.js.map +1 -0
- package/dist/ast-chunking.test.d.ts +5 -0
- package/dist/ast-chunking.test.d.ts.map +1 -0
- package/dist/ast-chunking.test.js +173 -0
- package/dist/ast-chunking.test.js.map +1 -0
- package/dist/code-tokenizer.d.ts +62 -0
- package/dist/code-tokenizer.d.ts.map +1 -0
- package/dist/code-tokenizer.js +129 -0
- package/dist/code-tokenizer.js.map +1 -0
- package/dist/code-tokenizer.test.d.ts +5 -0
- package/dist/code-tokenizer.test.d.ts.map +1 -0
- package/dist/code-tokenizer.test.js +96 -0
- package/dist/code-tokenizer.test.js.map +1 -0
- package/dist/db/client-pg.d.ts +16 -0
- package/dist/db/client-pg.d.ts.map +1 -0
- package/dist/db/client-pg.js +38 -0
- package/dist/db/client-pg.js.map +1 -0
- package/dist/db/client.d.ts +36 -0
- package/dist/db/client.d.ts.map +1 -0
- package/dist/db/client.js +81 -0
- package/dist/db/client.js.map +1 -0
- package/dist/db/migrations-pg.d.ts +6 -0
- package/dist/db/migrations-pg.d.ts.map +1 -0
- package/dist/db/migrations-pg.js +88 -0
- package/dist/db/migrations-pg.js.map +1 -0
- package/dist/db/migrations.d.ts +9 -0
- package/dist/db/migrations.d.ts.map +1 -0
- package/dist/db/migrations.js +164 -0
- package/dist/db/migrations.js.map +1 -0
- package/dist/db/schema-pg.d.ts +611 -0
- package/dist/db/schema-pg.d.ts.map +1 -0
- package/dist/db/schema-pg.js +66 -0
- package/dist/db/schema-pg.js.map +1 -0
- package/dist/db/schema.d.ts +630 -0
- package/dist/db/schema.d.ts.map +1 -0
- package/dist/db/schema.js +85 -0
- package/dist/db/schema.js.map +1 -0
- package/dist/embeddings.d.ts +92 -0
- package/dist/embeddings.d.ts.map +1 -0
- package/dist/embeddings.js +275 -0
- package/dist/embeddings.js.map +1 -0
- package/dist/embeddings.test.d.ts +5 -0
- package/dist/embeddings.test.d.ts.map +1 -0
- package/dist/embeddings.test.js +255 -0
- package/dist/embeddings.test.js.map +1 -0
- package/dist/hybrid-search.d.ts +47 -0
- package/dist/hybrid-search.d.ts.map +1 -0
- package/dist/hybrid-search.js +215 -0
- package/dist/hybrid-search.js.map +1 -0
- package/dist/hybrid-search.test.d.ts +5 -0
- package/dist/hybrid-search.test.d.ts.map +1 -0
- package/dist/hybrid-search.test.js +252 -0
- package/dist/hybrid-search.test.js.map +1 -0
- package/dist/incremental-tfidf.d.ts +77 -0
- package/dist/incremental-tfidf.d.ts.map +1 -0
- package/dist/incremental-tfidf.js +248 -0
- package/dist/incremental-tfidf.js.map +1 -0
- package/dist/incremental-tfidf.test.d.ts +5 -0
- package/dist/incremental-tfidf.test.d.ts.map +1 -0
- package/dist/incremental-tfidf.test.js +276 -0
- package/dist/incremental-tfidf.test.js.map +1 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +19 -0
- package/dist/index.js.map +1 -0
- package/dist/indexer.d.ts +205 -0
- package/dist/indexer.d.ts.map +1 -0
- package/dist/indexer.js +1331 -0
- package/dist/indexer.js.map +1 -0
- package/dist/indexer.test.d.ts +12 -0
- package/dist/indexer.test.d.ts.map +1 -0
- package/dist/indexer.test.js +471 -0
- package/dist/indexer.test.js.map +1 -0
- package/dist/language-config.d.ts +54 -0
- package/dist/language-config.d.ts.map +1 -0
- package/dist/language-config.js +75 -0
- package/dist/language-config.js.map +1 -0
- package/dist/search-cache.d.ts +63 -0
- package/dist/search-cache.d.ts.map +1 -0
- package/dist/search-cache.js +118 -0
- package/dist/search-cache.js.map +1 -0
- package/dist/search-cache.test.d.ts +5 -0
- package/dist/search-cache.test.d.ts.map +1 -0
- package/dist/search-cache.test.js +194 -0
- package/dist/search-cache.test.js.map +1 -0
- package/dist/storage-factory.d.ts +11 -0
- package/dist/storage-factory.d.ts.map +1 -0
- package/dist/storage-factory.js +17 -0
- package/dist/storage-factory.js.map +1 -0
- package/dist/storage-persistent-pg.d.ts +75 -0
- package/dist/storage-persistent-pg.d.ts.map +1 -0
- package/dist/storage-persistent-pg.js +579 -0
- package/dist/storage-persistent-pg.js.map +1 -0
- package/dist/storage-persistent-pg.test.d.ts +7 -0
- package/dist/storage-persistent-pg.test.d.ts.map +1 -0
- package/dist/storage-persistent-pg.test.js +90 -0
- package/dist/storage-persistent-pg.test.js.map +1 -0
- package/dist/storage-persistent-types.d.ts +110 -0
- package/dist/storage-persistent-types.d.ts.map +1 -0
- package/dist/storage-persistent-types.js +5 -0
- package/dist/storage-persistent-types.js.map +1 -0
- package/dist/storage-persistent.d.ts +231 -0
- package/dist/storage-persistent.d.ts.map +1 -0
- package/dist/storage-persistent.js +897 -0
- package/dist/storage-persistent.js.map +1 -0
- package/dist/storage-persistent.test.d.ts +5 -0
- package/dist/storage-persistent.test.d.ts.map +1 -0
- package/dist/storage-persistent.test.js +325 -0
- package/dist/storage-persistent.test.js.map +1 -0
- package/dist/storage.d.ts +63 -0
- package/dist/storage.d.ts.map +1 -0
- package/dist/storage.js +67 -0
- package/dist/storage.js.map +1 -0
- package/dist/storage.test.d.ts +5 -0
- package/dist/storage.test.d.ts.map +1 -0
- package/dist/storage.test.js +157 -0
- package/dist/storage.test.js.map +1 -0
- package/dist/tfidf.d.ts +97 -0
- package/dist/tfidf.d.ts.map +1 -0
- package/dist/tfidf.js +308 -0
- package/dist/tfidf.js.map +1 -0
- package/dist/tfidf.test.d.ts +5 -0
- package/dist/tfidf.test.d.ts.map +1 -0
- package/dist/tfidf.test.js +181 -0
- package/dist/tfidf.test.js.map +1 -0
- package/dist/utils.d.ts +61 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +264 -0
- package/dist/utils.js.map +1 -0
- package/dist/utils.test.d.ts +5 -0
- package/dist/utils.test.d.ts.map +1 -0
- package/dist/utils.test.js +94 -0
- package/dist/utils.test.js.map +1 -0
- package/dist/vector-storage.d.ts +120 -0
- package/dist/vector-storage.d.ts.map +1 -0
- package/dist/vector-storage.js +264 -0
- package/dist/vector-storage.js.map +1 -0
- package/dist/vector-storage.test.d.ts +5 -0
- package/dist/vector-storage.test.d.ts.map +1 -0
- package/dist/vector-storage.test.js +345 -0
- package/dist/vector-storage.test.js.map +1 -0
- package/package.json +85 -0
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostgreSQL database client (pg Pool + Drizzle)
|
|
3
|
+
*/
|
|
4
|
+
import type { Pool } from 'pg';
|
|
5
|
+
import { drizzle } from 'drizzle-orm/node-postgres';
|
|
6
|
+
import * as schema from './schema-pg.js';
|
|
7
|
+
import type { PostgresDbConfig } from '../storage-persistent-types.js';
|
|
8
|
+
export interface DbInstancePg {
|
|
9
|
+
db: ReturnType<typeof drizzle<typeof schema>>;
|
|
10
|
+
pool: Pool;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Create Postgres client and run migrations.
|
|
14
|
+
*/
|
|
15
|
+
export declare function createPostgresDb(config: PostgresDbConfig): Promise<DbInstancePg>;
|
|
16
|
+
//# sourceMappingURL=client-pg.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client-pg.d.ts","sourceRoot":"","sources":["../../src/db/client-pg.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,IAAI,CAAA;AAC9B,OAAO,EAAE,OAAO,EAAE,MAAM,2BAA2B,CAAA;AACnD,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAA;AACxC,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAA;AAEtE,MAAM,WAAW,YAAY;IAC5B,EAAE,EAAE,UAAU,CAAC,OAAO,OAAO,CAAC,OAAO,MAAM,CAAC,CAAC,CAAA;IAC7C,IAAI,EAAE,IAAI,CAAA;CACV;AA4BD;;GAEG;AACH,wBAAsB,gBAAgB,CAAC,MAAM,EAAE,gBAAgB,GAAG,OAAO,CAAC,YAAY,CAAC,CAWtF"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostgreSQL database client (pg Pool + Drizzle)
|
|
3
|
+
*/
|
|
4
|
+
import { drizzle } from 'drizzle-orm/node-postgres';
|
|
5
|
+
import * as schema from './schema-pg.js';
|
|
6
|
+
function buildConnectionString(config) {
|
|
7
|
+
if ('connectionString' in config && config.connectionString) {
|
|
8
|
+
return config.connectionString;
|
|
9
|
+
}
|
|
10
|
+
const c = config;
|
|
11
|
+
const host = c.host ?? process.env.PGHOST ?? 'localhost';
|
|
12
|
+
const port = c.port ?? (process.env.PGPORT ? parseInt(process.env.PGPORT, 10) : 5432);
|
|
13
|
+
const database = c.database ?? process.env.PGDATABASE ?? 'coderag';
|
|
14
|
+
const user = c.user ?? process.env.PGUSER ?? 'postgres';
|
|
15
|
+
const password = c.password ?? process.env.PGPASSWORD ?? '';
|
|
16
|
+
const ssl = c.ssl ?? (process.env.PGSSLMODE === 'require');
|
|
17
|
+
const params = new URLSearchParams();
|
|
18
|
+
if (ssl)
|
|
19
|
+
params.set('sslmode', 'require');
|
|
20
|
+
const qs = params.toString();
|
|
21
|
+
return `postgresql://${encodeURIComponent(user)}:${encodeURIComponent(password)}@${host}:${port}/${encodeURIComponent(database)}${qs ? `?${qs}` : ''}`;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Create Postgres client and run migrations.
|
|
25
|
+
*/
|
|
26
|
+
export async function createPostgresDb(config) {
|
|
27
|
+
const { Pool } = await import('pg');
|
|
28
|
+
const connectionString = buildConnectionString(config);
|
|
29
|
+
const pool = new Pool({
|
|
30
|
+
connectionString,
|
|
31
|
+
max: 10,
|
|
32
|
+
});
|
|
33
|
+
const db = drizzle(pool, { schema });
|
|
34
|
+
const { runMigrationsPg } = await import('./migrations-pg.js');
|
|
35
|
+
await runMigrationsPg(pool);
|
|
36
|
+
return { db, pool };
|
|
37
|
+
}
|
|
38
|
+
//# sourceMappingURL=client-pg.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client-pg.js","sourceRoot":"","sources":["../../src/db/client-pg.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAAE,OAAO,EAAE,MAAM,2BAA2B,CAAA;AACnD,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAA;AAiBxC,SAAS,qBAAqB,CAAC,MAAwB;IACtD,IAAI,kBAAkB,IAAI,MAAM,IAAI,MAAM,CAAC,gBAAgB,EAAE,CAAC;QAC7D,OAAO,MAAM,CAAC,gBAAgB,CAAA;IAC/B,CAAC;IACD,MAAM,CAAC,GAAG,MAAiD,CAAA;IAC3D,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,GAAG,CAAC,MAAM,IAAI,WAAW,CAAA;IACxD,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAA;IACrF,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,IAAI,SAAS,CAAA;IAClE,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,GAAG,CAAC,MAAM,IAAI,UAAU,CAAA;IACvD,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,IAAI,EAAE,CAAA;IAC3D,MAAM,GAAG,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,KAAK,SAAS,CAAC,CAAA;IAC1D,MAAM,MAAM,GAAG,IAAI,eAAe,EAAE,CAAA;IACpC,IAAI,GAAG;QAAE,MAAM,CAAC,GAAG,CAAC,SAAS,EAAE,SAAS,CAAC,CAAA;IACzC,MAAM,EAAE,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;IAC5B,OAAO,gBAAgB,kBAAkB,CAAC,IAAI,CAAC,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,kBAAkB,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAA;AACvJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CAAC,MAAwB;IAC9D,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,CAAA;IACnC,MAAM,gBAAgB,GAAG,qBAAqB,CAAC,MAAM,CAAC,CAAA;IACtD,MAAM,IAAI,GAAG,IAAI,IAAI,CAAC;QACrB,gBAAgB;QAChB,GAAG,EAAE,EAAE;KACP,CAAC,CAAA;IACF,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,EAAE,EAAE,MAAM,EAAE,CAAC,CAAA;IACpC,MAAM,EAAE,eAAe,EAAE,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAAC,CAAA;IAC9D,MAAM,eAAe,CAAC,IAAI,CAAC,CAAA;IAC3B,OAAO,EAAE,EAAE,EAAE,IAAI,EAAE,CAAA;AACpB,CAAC"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database client setup using LibSQL (WASM-compatible)
|
|
3
|
+
*/
|
|
4
|
+
import { type Client } from '@libsql/client';
|
|
5
|
+
import { drizzle } from 'drizzle-orm/libsql';
|
|
6
|
+
import * as schema from './schema.js';
|
|
7
|
+
export interface DbConfig {
|
|
8
|
+
dbPath?: string;
|
|
9
|
+
codebaseRoot?: string;
|
|
10
|
+
/** When true, storeManyChunks inserts in batches. Default false. */
|
|
11
|
+
useBulkInsertChunks?: boolean;
|
|
12
|
+
}
|
|
13
|
+
export interface DbInstance {
|
|
14
|
+
db: ReturnType<typeof drizzle<typeof schema>>;
|
|
15
|
+
client: Client;
|
|
16
|
+
dbPath: string;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Get the global coderag data directory
|
|
20
|
+
* Uses ~/.coderag/projects/<hash>/ for persistent storage
|
|
21
|
+
*/
|
|
22
|
+
export declare function getCoderagDataDir(codebaseRoot: string): string;
|
|
23
|
+
/**
|
|
24
|
+
* Project metadata stored alongside the database
|
|
25
|
+
*/
|
|
26
|
+
export interface ProjectMetadata {
|
|
27
|
+
path: string;
|
|
28
|
+
name: string;
|
|
29
|
+
createdAt: string;
|
|
30
|
+
lastAccessedAt: string;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Create database client (async for LibSQL compatibility)
|
|
34
|
+
*/
|
|
35
|
+
export declare function createDb(config?: DbConfig): Promise<DbInstance>;
|
|
36
|
+
//# sourceMappingURL=client.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../src/db/client.ts"],"names":[],"mappings":"AAAA;;GAEG;AAMH,OAAO,EAAE,KAAK,MAAM,EAAgB,MAAM,gBAAgB,CAAA;AAC1D,OAAO,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAA;AAC5C,OAAO,KAAK,MAAM,MAAM,aAAa,CAAA;AAErC,MAAM,WAAW,QAAQ;IACxB,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB,oEAAoE;IACpE,mBAAmB,CAAC,EAAE,OAAO,CAAA;CAC7B;AAED,MAAM,WAAW,UAAU;IAC1B,EAAE,EAAE,UAAU,CAAC,OAAO,OAAO,CAAC,OAAO,MAAM,CAAC,CAAC,CAAA;IAC7C,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,CAAA;CACd;AAED;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAQ9D;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC/B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,SAAS,EAAE,MAAM,CAAA;IACjB,cAAc,EAAE,MAAM,CAAA;CACtB;AAmCD;;GAEG;AACH,wBAAsB,QAAQ,CAAC,MAAM,GAAE,QAAa,GAAG,OAAO,CAAC,UAAU,CAAC,CA8BzE"}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database client setup using LibSQL (WASM-compatible)
|
|
3
|
+
*/
|
|
4
|
+
import crypto from 'node:crypto';
|
|
5
|
+
import fs from 'node:fs';
|
|
6
|
+
import os from 'node:os';
|
|
7
|
+
import path from 'node:path';
|
|
8
|
+
import { createClient } from '@libsql/client';
|
|
9
|
+
import { drizzle } from 'drizzle-orm/libsql';
|
|
10
|
+
import * as schema from './schema.js';
|
|
11
|
+
/**
|
|
12
|
+
* Get the global coderag data directory
|
|
13
|
+
* Uses ~/.coderag/projects/<hash>/ for persistent storage
|
|
14
|
+
*/
|
|
15
|
+
export function getCoderagDataDir(codebaseRoot) {
|
|
16
|
+
// Normalize the path and create a stable hash
|
|
17
|
+
const normalizedPath = path.resolve(codebaseRoot);
|
|
18
|
+
const hash = crypto.createHash('sha256').update(normalizedPath).digest('hex').substring(0, 16);
|
|
19
|
+
// Use ~/.coderag/projects/<hash>/
|
|
20
|
+
const homeDir = os.homedir();
|
|
21
|
+
return path.join(homeDir, '.coderag', 'projects', hash);
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Write project metadata to help identify which project a database belongs to
|
|
25
|
+
*/
|
|
26
|
+
function writeProjectMetadata(dataDir, codebaseRoot) {
|
|
27
|
+
const metadataPath = path.join(dataDir, 'metadata.json');
|
|
28
|
+
const metadata = {
|
|
29
|
+
path: path.resolve(codebaseRoot),
|
|
30
|
+
name: path.basename(codebaseRoot),
|
|
31
|
+
createdAt: fs.existsSync(metadataPath)
|
|
32
|
+
? JSON.parse(fs.readFileSync(metadataPath, 'utf8')).createdAt
|
|
33
|
+
: new Date().toISOString(),
|
|
34
|
+
lastAccessedAt: new Date().toISOString(),
|
|
35
|
+
};
|
|
36
|
+
fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Clean up old .codebase-search folder from previous versions
|
|
40
|
+
* This folder is no longer used - data is now stored in ~/.coderag/
|
|
41
|
+
*/
|
|
42
|
+
function cleanupOldStorage(codebaseRoot) {
|
|
43
|
+
const oldDir = path.join(codebaseRoot, '.codebase-search');
|
|
44
|
+
if (fs.existsSync(oldDir)) {
|
|
45
|
+
try {
|
|
46
|
+
fs.rmSync(oldDir, { recursive: true, force: true });
|
|
47
|
+
console.error(`[INFO] Cleaned up old storage: ${oldDir}`);
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
// Ignore errors - not critical
|
|
51
|
+
console.error(`[WARN] Failed to clean up old storage: ${oldDir}`);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Create database client (async for LibSQL compatibility)
|
|
57
|
+
*/
|
|
58
|
+
export async function createDb(config = {}) {
|
|
59
|
+
const codebaseRoot = config.codebaseRoot || process.cwd();
|
|
60
|
+
// Clean up old .codebase-search folder (no longer used)
|
|
61
|
+
cleanupOldStorage(codebaseRoot);
|
|
62
|
+
// Use global ~/.coderag/projects/<hash>/ directory
|
|
63
|
+
const dbDir = getCoderagDataDir(codebaseRoot);
|
|
64
|
+
const dbPath = config.dbPath || path.join(dbDir, 'index.db');
|
|
65
|
+
// Ensure database directory exists
|
|
66
|
+
if (!fs.existsSync(dbDir)) {
|
|
67
|
+
fs.mkdirSync(dbDir, { recursive: true });
|
|
68
|
+
}
|
|
69
|
+
// Write project metadata for identification
|
|
70
|
+
writeProjectMetadata(dbDir, codebaseRoot);
|
|
71
|
+
// Create LibSQL client with local file
|
|
72
|
+
const client = createClient({
|
|
73
|
+
url: `file:${dbPath}`,
|
|
74
|
+
});
|
|
75
|
+
// Enable WAL mode for better concurrency
|
|
76
|
+
await client.execute('PRAGMA journal_mode = WAL');
|
|
77
|
+
// Create Drizzle instance
|
|
78
|
+
const db = drizzle(client, { schema });
|
|
79
|
+
return { db, client, dbPath };
|
|
80
|
+
}
|
|
81
|
+
//# sourceMappingURL=client.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.js","sourceRoot":"","sources":["../../src/db/client.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,MAAM,MAAM,aAAa,CAAA;AAChC,OAAO,EAAE,MAAM,SAAS,CAAA;AACxB,OAAO,EAAE,MAAM,SAAS,CAAA;AACxB,OAAO,IAAI,MAAM,WAAW,CAAA;AAC5B,OAAO,EAAe,YAAY,EAAE,MAAM,gBAAgB,CAAA;AAC1D,OAAO,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAA;AAC5C,OAAO,KAAK,MAAM,MAAM,aAAa,CAAA;AAerC;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,YAAoB;IACrD,8CAA8C;IAC9C,MAAM,cAAc,GAAG,IAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;IACjD,MAAM,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;IAE9F,kCAAkC;IAClC,MAAM,OAAO,GAAG,EAAE,CAAC,OAAO,EAAE,CAAA;IAC5B,OAAO,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,IAAI,CAAC,CAAA;AACxD,CAAC;AAYD;;GAEG;AACH,SAAS,oBAAoB,CAAC,OAAe,EAAE,YAAoB;IAClE,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,CAAC,CAAA;IACxD,MAAM,QAAQ,GAAoB;QACjC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,YAAY,CAAC;QAChC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC;QACjC,SAAS,EAAE,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC;YACrC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,CAAC,SAAS;YAC7D,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;QAC3B,cAAc,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;KACxC,CAAA;IACD,EAAE,CAAC,aAAa,CAAC,YAAY,EAAE,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAA;AAClE,CAAC;AAED;;;GAGG;AACH,SAAS,iBAAiB,CAAC,YAAoB;IAC9C,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAA;IAC1D,IAAI,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,CAAC;QAC3B,IAAI,CAAC;YACJ,EAAE,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAA;YACnD,OAAO,CAAC,KAAK,CAAC,kCAAkC,MAAM,EAAE,CAAC,CAAA;QAC1D,CAAC;QAAC,MAAM,CAAC;YACR,+BAA+B;YAC/B,OAAO,CAAC,KAAK,CAAC,0CAA0C,MAAM,EAAE,CAAC,CAAA;QAClE,CAAC;IACF,CAAC;AACF,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,QAAQ,CAAC,SAAmB,EAAE;IACnD,MAAM,YAAY,GAAG,MAAM,CAAC,YAAY,IAAI,OAAO,CAAC,GAAG,EAAE,CAAA;IAEzD,wDAAwD;IACxD,iBAAiB,CAAC,YAAY,CAAC,CAAA;IAE/B,mDAAmD;IACnD,MAAM,KAAK,GAAG,iBAAiB,CAAC,YAAY,CAAC,CAAA;IAC7C,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,UAAU,CAAC,CAAA;IAE5D,mCAAmC;IACnC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC;QAC3B,EAAE,CAAC,SAAS,CAAC,KAAK,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAA;IACzC,CAAC;IAED,4CAA4C;IAC5C,oBAAoB,CAAC,KAAK,EAAE,YAAY,CAAC,CAAA;IAEzC,uCAAuC;IACvC,MAAM,MAAM,GAAG,YAAY,CAAC;QAC3B,GAAG,EAAE,QAAQ,MAAM,EAAE;KACrB,CAAC,CAAA;IAEF,yCAAyC;IACzC,MAAM,MAAM,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAA;IAEjD,0BAA0B;IAC1B,MAAM,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,MAAM,EAAE,CAAC,CAAA;IAEtC,OAAO,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,CAAA;AAC9B,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrations-pg.d.ts","sourceRoot":"","sources":["../../src/db/migrations-pg.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,IAAI,CAAA;AAiB9B,wBAAsB,eAAe,CAAC,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAmF/D"}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostgreSQL migrations (run on connect)
|
|
3
|
+
*/
|
|
4
|
+
async function migrationExists(pool, hash) {
|
|
5
|
+
const r = await pool.query('SELECT id FROM __drizzle_migrations WHERE hash = $1', [hash]);
|
|
6
|
+
return r.rows.length > 0;
|
|
7
|
+
}
|
|
8
|
+
async function recordMigration(pool, hash) {
|
|
9
|
+
await pool.query('INSERT INTO __drizzle_migrations (hash, created_at) VALUES ($1, $2)', [hash, Date.now()]);
|
|
10
|
+
}
|
|
11
|
+
export async function runMigrationsPg(pool) {
|
|
12
|
+
await pool.query(`
|
|
13
|
+
CREATE TABLE IF NOT EXISTS __drizzle_migrations (
|
|
14
|
+
id SERIAL PRIMARY KEY,
|
|
15
|
+
hash TEXT NOT NULL UNIQUE,
|
|
16
|
+
created_at BIGINT NOT NULL
|
|
17
|
+
)
|
|
18
|
+
`);
|
|
19
|
+
const migrationHash = 'postgres_initial_schema_v1';
|
|
20
|
+
if (!(await migrationExists(pool, migrationHash))) {
|
|
21
|
+
console.error('[DB] Running migration: postgres_initial_schema_v1');
|
|
22
|
+
await pool.query(`
|
|
23
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
24
|
+
id SERIAL PRIMARY KEY,
|
|
25
|
+
path TEXT NOT NULL UNIQUE,
|
|
26
|
+
content TEXT NOT NULL,
|
|
27
|
+
hash TEXT NOT NULL,
|
|
28
|
+
size INTEGER NOT NULL,
|
|
29
|
+
mtime BIGINT NOT NULL,
|
|
30
|
+
language TEXT,
|
|
31
|
+
indexed_at BIGINT NOT NULL,
|
|
32
|
+
magnitude REAL DEFAULT 0,
|
|
33
|
+
token_count INTEGER DEFAULT 0
|
|
34
|
+
)
|
|
35
|
+
`);
|
|
36
|
+
await pool.query('CREATE INDEX IF NOT EXISTS files_path_idx ON files(path)');
|
|
37
|
+
await pool.query('CREATE INDEX IF NOT EXISTS files_hash_idx ON files(hash)');
|
|
38
|
+
await pool.query(`
|
|
39
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
40
|
+
id SERIAL PRIMARY KEY,
|
|
41
|
+
file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,
|
|
42
|
+
content TEXT NOT NULL,
|
|
43
|
+
type TEXT NOT NULL,
|
|
44
|
+
start_line INTEGER NOT NULL,
|
|
45
|
+
end_line INTEGER NOT NULL,
|
|
46
|
+
metadata TEXT,
|
|
47
|
+
token_count INTEGER DEFAULT 0,
|
|
48
|
+
magnitude REAL DEFAULT 0
|
|
49
|
+
)
|
|
50
|
+
`);
|
|
51
|
+
await pool.query('CREATE INDEX IF NOT EXISTS chunks_file_id_idx ON chunks(file_id)');
|
|
52
|
+
await pool.query('CREATE INDEX IF NOT EXISTS chunks_type_idx ON chunks(type)');
|
|
53
|
+
await pool.query(`
|
|
54
|
+
CREATE TABLE IF NOT EXISTS document_vectors (
|
|
55
|
+
id SERIAL PRIMARY KEY,
|
|
56
|
+
chunk_id INTEGER NOT NULL REFERENCES chunks(id) ON DELETE CASCADE,
|
|
57
|
+
term TEXT NOT NULL,
|
|
58
|
+
tf REAL NOT NULL,
|
|
59
|
+
tfidf REAL NOT NULL,
|
|
60
|
+
raw_freq INTEGER NOT NULL
|
|
61
|
+
)
|
|
62
|
+
`);
|
|
63
|
+
await pool.query('CREATE INDEX IF NOT EXISTS vectors_chunk_id_idx ON document_vectors(chunk_id)');
|
|
64
|
+
await pool.query('CREATE INDEX IF NOT EXISTS vectors_term_idx ON document_vectors(term)');
|
|
65
|
+
await pool.query('CREATE INDEX IF NOT EXISTS vectors_tfidf_idx ON document_vectors(tfidf)');
|
|
66
|
+
await pool.query('CREATE INDEX IF NOT EXISTS vectors_term_chunk_idx ON document_vectors(term, chunk_id)');
|
|
67
|
+
await pool.query(`
|
|
68
|
+
CREATE TABLE IF NOT EXISTS idf_scores (
|
|
69
|
+
id SERIAL PRIMARY KEY,
|
|
70
|
+
term TEXT NOT NULL UNIQUE,
|
|
71
|
+
idf REAL NOT NULL,
|
|
72
|
+
document_frequency INTEGER NOT NULL
|
|
73
|
+
)
|
|
74
|
+
`);
|
|
75
|
+
await pool.query('CREATE INDEX IF NOT EXISTS idf_term_idx ON idf_scores(term)');
|
|
76
|
+
await pool.query(`
|
|
77
|
+
CREATE TABLE IF NOT EXISTS index_metadata (
|
|
78
|
+
id SERIAL PRIMARY KEY,
|
|
79
|
+
key TEXT NOT NULL UNIQUE,
|
|
80
|
+
value TEXT NOT NULL,
|
|
81
|
+
updated_at BIGINT NOT NULL
|
|
82
|
+
)
|
|
83
|
+
`);
|
|
84
|
+
await recordMigration(pool, migrationHash);
|
|
85
|
+
console.error('[DB] Migration complete: postgres_initial_schema_v1');
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
//# sourceMappingURL=migrations-pg.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrations-pg.js","sourceRoot":"","sources":["../../src/db/migrations-pg.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,KAAK,UAAU,eAAe,CAAC,IAAU,EAAE,IAAY;IACtD,MAAM,CAAC,GAAG,MAAM,IAAI,CAAC,KAAK,CACzB,qDAAqD,EACrD,CAAC,IAAI,CAAC,CACN,CAAA;IACD,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;AACzB,CAAC;AAED,KAAK,UAAU,eAAe,CAAC,IAAU,EAAE,IAAY;IACtD,MAAM,IAAI,CAAC,KAAK,CACf,qEAAqE,EACrE,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,EAAE,CAAC,CAClB,CAAA;AACF,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,IAAU;IAC/C,MAAM,IAAI,CAAC,KAAK,CAAC;;;;;;EAMhB,CAAC,CAAA;IAEF,MAAM,aAAa,GAAG,4BAA4B,CAAA;IAClD,IAAI,CAAC,CAAC,MAAM,eAAe,CAAC,IAAI,EAAE,aAAa,CAAC,CAAC,EAAE,CAAC;QACnD,OAAO,CAAC,KAAK,CAAC,oDAAoD,CAAC,CAAA;QAEnE,MAAM,IAAI,CAAC,KAAK,CAAC;;;;;;;;;;;;;GAahB,CAAC,CAAA;QACF,MAAM,IAAI,CAAC,KAAK,CAAC,0DAA0D,CAAC,CAAA;QAC5E,MAAM,IAAI,CAAC,KAAK,CAAC,0DAA0D,CAAC,CAAA;QAE5E,MAAM,IAAI,CAAC,KAAK,CAAC;;;;;;;;;;;;GAYhB,CAAC,CAAA;QACF,MAAM,IAAI,CAAC,KAAK,CAAC,kEAAkE,CAAC,CAAA;QACpF,MAAM,IAAI,CAAC,KAAK,CAAC,4DAA4D,CAAC,CAAA;QAE9E,MAAM,IAAI,CAAC,KAAK,CAAC;;;;;;;;;GAShB,CAAC,CAAA;QACF,MAAM,IAAI,CAAC,KAAK,CAAC,+EAA+E,CAAC,CAAA;QACjG,MAAM,IAAI,CAAC,KAAK,CAAC,uEAAuE,CAAC,CAAA;QACzF,MAAM,IAAI,CAAC,KAAK,CAAC,yEAAyE,CAAC,CAAA;QAC3F,MAAM,IAAI,CAAC,KAAK,CAAC,uFAAuF,CAAC,CAAA;QAEzG,MAAM,IAAI,CAAC,KAAK,CAAC;;;;;;;GAOhB,CAAC,CAAA;QACF,MAAM,IAAI,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAA;QAE/E,MAAM,IAAI,CAAC,KAAK,CAAC;;;;;;;GAOhB,CAAC,CAAA;QAEF,MAAM,eAAe,CAAC,IAAI,EAAE,aAAa,CAAC,CAAA;QAC1C,OAAO,CAAC,KAAK,CAAC,qDAAqD,CAAC,CAAA;IACrE,CAAC;AACF,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrations.d.ts","sourceRoot":"","sources":["../../src/db/migrations.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAA;AAuB5C;;GAEG;AACH,wBAAsB,aAAa,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CA2KjE"}
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database migrations using LibSQL
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* Check if a migration exists
|
|
6
|
+
*/
|
|
7
|
+
async function migrationExists(client, hash) {
|
|
8
|
+
const result = await client.execute({
|
|
9
|
+
sql: 'SELECT id FROM __drizzle_migrations WHERE hash = ?',
|
|
10
|
+
args: [hash],
|
|
11
|
+
});
|
|
12
|
+
return result.rows.length > 0;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Record a migration
|
|
16
|
+
*/
|
|
17
|
+
async function recordMigration(client, hash) {
|
|
18
|
+
await client.execute({
|
|
19
|
+
sql: 'INSERT INTO __drizzle_migrations (hash, created_at) VALUES (?, ?)',
|
|
20
|
+
args: [hash, Date.now()],
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Run all migrations
|
|
25
|
+
*/
|
|
26
|
+
export async function runMigrations(client) {
|
|
27
|
+
// Create migrations table if it doesn't exist
|
|
28
|
+
await client.execute(`
|
|
29
|
+
CREATE TABLE IF NOT EXISTS __drizzle_migrations (
|
|
30
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
31
|
+
hash TEXT NOT NULL UNIQUE,
|
|
32
|
+
created_at INTEGER NOT NULL
|
|
33
|
+
);
|
|
34
|
+
`);
|
|
35
|
+
// Migration 1: Initial schema
|
|
36
|
+
const migration1Hash = 'initial_schema_v1';
|
|
37
|
+
if (!(await migrationExists(client, migration1Hash))) {
|
|
38
|
+
console.error('[DB] Running migration: initial_schema_v1');
|
|
39
|
+
await client.execute(`
|
|
40
|
+
-- Files table
|
|
41
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
42
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
43
|
+
path TEXT NOT NULL UNIQUE,
|
|
44
|
+
content TEXT NOT NULL,
|
|
45
|
+
hash TEXT NOT NULL,
|
|
46
|
+
size INTEGER NOT NULL,
|
|
47
|
+
mtime INTEGER NOT NULL,
|
|
48
|
+
language TEXT,
|
|
49
|
+
indexed_at INTEGER NOT NULL
|
|
50
|
+
);
|
|
51
|
+
`);
|
|
52
|
+
await client.execute('CREATE INDEX IF NOT EXISTS files_path_idx ON files(path);');
|
|
53
|
+
await client.execute('CREATE INDEX IF NOT EXISTS files_hash_idx ON files(hash);');
|
|
54
|
+
await client.execute(`
|
|
55
|
+
-- Document vectors table
|
|
56
|
+
CREATE TABLE IF NOT EXISTS document_vectors (
|
|
57
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
58
|
+
file_id INTEGER NOT NULL,
|
|
59
|
+
term TEXT NOT NULL,
|
|
60
|
+
tf REAL NOT NULL,
|
|
61
|
+
tfidf REAL NOT NULL,
|
|
62
|
+
raw_freq INTEGER NOT NULL,
|
|
63
|
+
FOREIGN KEY (file_id) REFERENCES files(id) ON DELETE CASCADE
|
|
64
|
+
);
|
|
65
|
+
`);
|
|
66
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_file_id_idx ON document_vectors(file_id);');
|
|
67
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_term_idx ON document_vectors(term);');
|
|
68
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_tfidf_idx ON document_vectors(tfidf);');
|
|
69
|
+
await client.execute(`
|
|
70
|
+
-- IDF scores table
|
|
71
|
+
CREATE TABLE IF NOT EXISTS idf_scores (
|
|
72
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
73
|
+
term TEXT NOT NULL UNIQUE,
|
|
74
|
+
idf REAL NOT NULL,
|
|
75
|
+
document_frequency INTEGER NOT NULL
|
|
76
|
+
);
|
|
77
|
+
`);
|
|
78
|
+
await client.execute('CREATE INDEX IF NOT EXISTS idf_term_idx ON idf_scores(term);');
|
|
79
|
+
await client.execute(`
|
|
80
|
+
-- Index metadata table
|
|
81
|
+
CREATE TABLE IF NOT EXISTS index_metadata (
|
|
82
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
83
|
+
key TEXT NOT NULL UNIQUE,
|
|
84
|
+
value TEXT NOT NULL,
|
|
85
|
+
updated_at INTEGER NOT NULL
|
|
86
|
+
);
|
|
87
|
+
`);
|
|
88
|
+
await recordMigration(client, migration1Hash);
|
|
89
|
+
console.error('[DB] Migration complete: initial_schema_v1');
|
|
90
|
+
}
|
|
91
|
+
// Migration 2: Add magnitude column to files table (for pre-computed TF-IDF vector magnitude)
|
|
92
|
+
const migration2Hash = 'add_magnitude_column_v1';
|
|
93
|
+
if (!(await migrationExists(client, migration2Hash))) {
|
|
94
|
+
console.error('[DB] Running migration: add_magnitude_column_v1');
|
|
95
|
+
// Add magnitude column with default 0
|
|
96
|
+
await client.execute('ALTER TABLE files ADD COLUMN magnitude REAL DEFAULT 0;');
|
|
97
|
+
await recordMigration(client, migration2Hash);
|
|
98
|
+
console.error('[DB] Migration complete: add_magnitude_column_v1');
|
|
99
|
+
}
|
|
100
|
+
// Migration 3: Add composite index for efficient term search
|
|
101
|
+
const migration3Hash = 'add_composite_term_index_v1';
|
|
102
|
+
if (!(await migrationExists(client, migration3Hash))) {
|
|
103
|
+
console.error('[DB] Running migration: add_composite_term_index_v1');
|
|
104
|
+
// Add composite index (term, file_id) for efficient search queries
|
|
105
|
+
// searchByTerms() filters by term first, then groups by file_id
|
|
106
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_term_file_idx ON document_vectors(term, file_id);');
|
|
107
|
+
await recordMigration(client, migration3Hash);
|
|
108
|
+
console.error('[DB] Migration complete: add_composite_term_index_v1');
|
|
109
|
+
}
|
|
110
|
+
// Migration 4: Add token_count column for BM25 document length normalization
|
|
111
|
+
const migration4Hash = 'add_token_count_column_v1';
|
|
112
|
+
if (!(await migrationExists(client, migration4Hash))) {
|
|
113
|
+
console.error('[DB] Running migration: add_token_count_column_v1');
|
|
114
|
+
// Add token_count column with default 0
|
|
115
|
+
await client.execute('ALTER TABLE files ADD COLUMN token_count INTEGER DEFAULT 0;');
|
|
116
|
+
await recordMigration(client, migration4Hash);
|
|
117
|
+
console.error('[DB] Migration complete: add_token_count_column_v1');
|
|
118
|
+
}
|
|
119
|
+
// Migration 5: Add chunks table and migrate to chunk-level indexing
|
|
120
|
+
const migration5Hash = 'add_chunks_table_v1';
|
|
121
|
+
if (!(await migrationExists(client, migration5Hash))) {
|
|
122
|
+
console.error('[DB] Running migration: add_chunks_table_v1');
|
|
123
|
+
await client.execute(`
|
|
124
|
+
-- Create chunks table
|
|
125
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
126
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
127
|
+
file_id INTEGER NOT NULL,
|
|
128
|
+
content TEXT NOT NULL,
|
|
129
|
+
type TEXT NOT NULL,
|
|
130
|
+
start_line INTEGER NOT NULL,
|
|
131
|
+
end_line INTEGER NOT NULL,
|
|
132
|
+
metadata TEXT,
|
|
133
|
+
token_count INTEGER DEFAULT 0,
|
|
134
|
+
magnitude REAL DEFAULT 0,
|
|
135
|
+
FOREIGN KEY (file_id) REFERENCES files(id) ON DELETE CASCADE
|
|
136
|
+
);
|
|
137
|
+
`);
|
|
138
|
+
await client.execute('CREATE INDEX IF NOT EXISTS chunks_file_id_idx ON chunks(file_id);');
|
|
139
|
+
await client.execute('CREATE INDEX IF NOT EXISTS chunks_type_idx ON chunks(type);');
|
|
140
|
+
// Drop old document_vectors table and recreate with chunk_id
|
|
141
|
+
await client.execute('DROP TABLE IF EXISTS document_vectors;');
|
|
142
|
+
await client.execute(`
|
|
143
|
+
CREATE TABLE document_vectors (
|
|
144
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
145
|
+
chunk_id INTEGER NOT NULL,
|
|
146
|
+
term TEXT NOT NULL,
|
|
147
|
+
tf REAL NOT NULL,
|
|
148
|
+
tfidf REAL NOT NULL,
|
|
149
|
+
raw_freq INTEGER NOT NULL,
|
|
150
|
+
FOREIGN KEY (chunk_id) REFERENCES chunks(id) ON DELETE CASCADE
|
|
151
|
+
);
|
|
152
|
+
`);
|
|
153
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_chunk_id_idx ON document_vectors(chunk_id);');
|
|
154
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_term_idx ON document_vectors(term);');
|
|
155
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_tfidf_idx ON document_vectors(tfidf);');
|
|
156
|
+
await client.execute('CREATE INDEX IF NOT EXISTS vectors_term_chunk_idx ON document_vectors(term, chunk_id);');
|
|
157
|
+
// Clear IDF scores (will be recalculated)
|
|
158
|
+
await client.execute('DELETE FROM idf_scores;');
|
|
159
|
+
await recordMigration(client, migration5Hash);
|
|
160
|
+
console.error('[DB] Migration complete: add_chunks_table_v1');
|
|
161
|
+
console.error('[DB] Note: Index needs to be rebuilt after this migration');
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
//# sourceMappingURL=migrations.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrations.js","sourceRoot":"","sources":["../../src/db/migrations.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH;;GAEG;AACH,KAAK,UAAU,eAAe,CAAC,MAAc,EAAE,IAAY;IAC1D,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC;QACnC,GAAG,EAAE,oDAAoD;QACzD,IAAI,EAAE,CAAC,IAAI,CAAC;KACZ,CAAC,CAAA;IACF,OAAO,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;AAC9B,CAAC;AAED;;GAEG;AACH,KAAK,UAAU,eAAe,CAAC,MAAc,EAAE,IAAY;IAC1D,MAAM,MAAM,CAAC,OAAO,CAAC;QACpB,GAAG,EAAE,mEAAmE;QACxE,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,EAAE,CAAC;KACxB,CAAC,CAAA;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAAC,MAAc;IACjD,8CAA8C;IAC9C,MAAM,MAAM,CAAC,OAAO,CAAC;;;;;;GAMnB,CAAC,CAAA;IAEH,8BAA8B;IAC9B,MAAM,cAAc,GAAG,mBAAmB,CAAA;IAC1C,IAAI,CAAC,CAAC,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC;QACtD,OAAO,CAAC,KAAK,CAAC,2CAA2C,CAAC,CAAA;QAE1D,MAAM,MAAM,CAAC,OAAO,CAAC;;;;;;;;;;;;KAYlB,CAAC,CAAA;QAEJ,MAAM,MAAM,CAAC,OAAO,CAAC,2DAA2D,CAAC,CAAA;QACjF,MAAM,MAAM,CAAC,OAAO,CAAC,2DAA2D,CAAC,CAAA;QAEjF,MAAM,MAAM,CAAC,OAAO,CAAC;;;;;;;;;;;KAWlB,CAAC,CAAA;QAEJ,MAAM,MAAM,CAAC,OAAO,CACnB,8EAA8E,CAC9E,CAAA;QACD,MAAM,MAAM,CAAC,OAAO,CAAC,wEAAwE,CAAC,CAAA;QAC9F,MAAM,MAAM,CAAC,OAAO,CAAC,0EAA0E,CAAC,CAAA;QAEhG,MAAM,MAAM,CAAC,OAAO,CAAC;;;;;;;;KAQlB,CAAC,CAAA;QAEJ,MAAM,MAAM,CAAC,OAAO,CAAC,8DAA8D,CAAC,CAAA;QAEpF,MAAM,MAAM,CAAC,OAAO,CAAC;;;;;;;;KAQlB,CAAC,CAAA;QAEJ,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC7C,OAAO,CAAC,KAAK,CAAC,4CAA4C,CAAC,CAAA;IAC5D,CAAC;IAED,8FAA8F;IAC9F,MAAM,cAAc,GAAG,yBAAyB,CAAA;IAChD,IAAI,CAAC,CAAC,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC;QACtD,OAAO,CAAC,KAAK,CAAC,iDAAiD,CAAC,CAAA;QAEhE,sCAAsC;QACtC,MAAM,MAAM,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAA;QAE9E,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC7C,OAAO,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAA;IAClE,CAAC;IAED,6DAA6D;IAC7D,MAAM,cAAc,GAAG,6BAA6B,CAAA;IACpD,IAAI,CAAC,CAAC,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC;QACtD,OAAO,CAAC,KAAK,CAAC,qDAAqD,CAAC,CAAA;QAEpE,mEAAmE;QACnE,gEAAgE;QAChE,MAAM,MAAM,CAAC,OAAO,CACnB,sFAAsF,CACtF,CAAA;QAED,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC7C,OAAO,CAAC,KAAK,CAAC,sDAAsD,CAAC,CAAA;IACtE,CAAC;IAED,6EAA6E;IAC7E,MAAM,cAAc,GAAG,2BAA2B,CAAA;IAClD,IAAI,CAAC,CAAC,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC;QACtD,OAAO,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAA;QAElE,wCAAwC;QACxC,MAAM,MAAM,CAAC,OAAO,CAAC,6DAA6D,CAAC,CAAA;QAEnF,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC7C,OAAO,CAAC,KAAK,CAAC,oDAAoD,CAAC,CAAA;IACpE,CAAC;IAED,oEAAoE;IACpE,MAAM,cAAc,GAAG,qBAAqB,CAAA;IAC5C,IAAI,CAAC,CAAC,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC;QACtD,OAAO,CAAC,KAAK,CAAC,6CAA6C,CAAC,CAAA;QAE5D,MAAM,MAAM,CAAC,OAAO,CAAC;;;;;;;;;;;;;;KAclB,CAAC,CAAA;QAEJ,MAAM,MAAM,CAAC,OAAO,CAAC,mEAAmE,CAAC,CAAA;QACzF,MAAM,MAAM,CAAC,OAAO,CAAC,6DAA6D,CAAC,CAAA;QAEnF,6DAA6D;QAC7D,MAAM,MAAM,CAAC,OAAO,CAAC,wCAAwC,CAAC,CAAA;QAE9D,MAAM,MAAM,CAAC,OAAO,CAAC;;;;;;;;;;KAUlB,CAAC,CAAA;QAEJ,MAAM,MAAM,CAAC,OAAO,CACnB,gFAAgF,CAChF,CAAA;QACD,MAAM,MAAM,CAAC,OAAO,CAAC,wEAAwE,CAAC,CAAA;QAC9F,MAAM,MAAM,CAAC,OAAO,CAAC,0EAA0E,CAAC,CAAA;QAChG,MAAM,MAAM,CAAC,OAAO,CACnB,wFAAwF,CACxF,CAAA;QAED,0CAA0C;QAC1C,MAAM,MAAM,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAA;QAE/C,MAAM,eAAe,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC7C,OAAO,CAAC,KAAK,CAAC,8CAA8C,CAAC,CAAA;QAC7D,OAAO,CAAC,KAAK,CAAC,2DAA2D,CAAC,CAAA;IAC3E,CAAC;AACF,CAAC"}
|