@hatk/hatk 0.0.1-alpha.22 → 0.0.1-alpha.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/backfill.d.ts.map +1 -1
- package/dist/backfill.js +16 -4
- package/dist/cli.js +21 -33
- package/dist/config.d.ts +1 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +1 -0
- package/dist/database/adapter-factory.d.ts +6 -0
- package/dist/database/adapter-factory.d.ts.map +1 -0
- package/dist/database/adapter-factory.js +20 -0
- package/dist/database/adapters/duckdb-search.d.ts +12 -0
- package/dist/database/adapters/duckdb-search.d.ts.map +1 -0
- package/dist/database/adapters/duckdb-search.js +27 -0
- package/dist/database/adapters/duckdb.d.ts +25 -0
- package/dist/database/adapters/duckdb.d.ts.map +1 -0
- package/dist/database/adapters/duckdb.js +161 -0
- package/dist/database/adapters/sqlite-search.d.ts +18 -0
- package/dist/database/adapters/sqlite-search.d.ts.map +1 -0
- package/dist/database/adapters/sqlite-search.js +38 -0
- package/dist/database/adapters/sqlite.d.ts +18 -0
- package/dist/database/adapters/sqlite.d.ts.map +1 -0
- package/dist/database/adapters/sqlite.js +87 -0
- package/dist/database/db.d.ts +149 -0
- package/dist/database/db.d.ts.map +1 -0
- package/dist/database/db.js +1456 -0
- package/dist/database/dialect.d.ts +45 -0
- package/dist/database/dialect.d.ts.map +1 -0
- package/dist/database/dialect.js +72 -0
- package/dist/database/fts.d.ts +24 -0
- package/dist/database/fts.d.ts.map +1 -0
- package/dist/database/fts.js +777 -0
- package/dist/database/index.d.ts +7 -0
- package/dist/database/index.d.ts.map +1 -0
- package/dist/database/index.js +6 -0
- package/dist/database/ports.d.ts +44 -0
- package/dist/database/ports.d.ts.map +1 -0
- package/dist/database/ports.js +1 -0
- package/dist/database/schema.d.ts +60 -0
- package/dist/database/schema.d.ts.map +1 -0
- package/dist/database/schema.js +388 -0
- package/dist/feeds.js +1 -1
- package/dist/hooks.js +1 -1
- package/dist/hydrate.js +1 -1
- package/dist/indexer.d.ts.map +1 -1
- package/dist/indexer.js +3 -3
- package/dist/labels.js +2 -2
- package/dist/main.js +30 -10
- package/dist/oauth/db.d.ts.map +1 -1
- package/dist/oauth/db.js +41 -15
- package/dist/oauth/server.js +4 -4
- package/dist/opengraph.js +1 -1
- package/dist/seed.js +1 -1
- package/dist/server.js +4 -4
- package/dist/setup.d.ts +10 -1
- package/dist/setup.d.ts.map +1 -1
- package/dist/setup.js +2 -2
- package/dist/test.d.ts +1 -1
- package/dist/test.d.ts.map +1 -1
- package/dist/test.js +22 -8
- package/dist/views.js +1 -1
- package/dist/vite-plugin.d.ts.map +1 -1
- package/dist/vite-plugin.js +10 -0
- package/dist/xrpc.js +2 -2
- package/package.json +3 -1
package/dist/backfill.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"backfill.d.ts","sourceRoot":"","sources":["../src/backfill.ts"],"names":[],"mappings":"AAiBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;
|
|
1
|
+
{"version":3,"file":"backfill.d.ts","sourceRoot":"","sources":["../src/backfill.ts"],"names":[],"mappings":"AAiBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAIjD,6CAA6C;AAC7C,UAAU,YAAY;IACpB,wFAAwF;IACxF,MAAM,EAAE,MAAM,CAAA;IACd,8FAA8F;IAC9F,MAAM,EAAE,MAAM,CAAA;IACd,yEAAyE;IACzE,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IACxB,wDAAwD;IACxD,MAAM,EAAE,cAAc,CAAA;CACvB;AAoGD;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAsB,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAkK/G;AA8BD;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAsB,WAAW,CAAC,IAAI,EAAE,YAAY,GAAG,OAAO,CAAC,MAAM,CAAC,CAkIrE"}
|
package/dist/backfill.js
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import { parseCarStream } from "./car.js";
|
|
2
2
|
import { cborDecode } from "./cbor.js";
|
|
3
3
|
import { walkMst } from "./mst.js";
|
|
4
|
-
import { setRepoStatus, getRepoStatus, getRepoRev, getRepoRetryInfo, listRetryEligibleRepos, listPendingRepos, querySQL, runSQL, getSchema, bulkInsertRecords, } from "./db.js";
|
|
4
|
+
import { setRepoStatus, getRepoStatus, getRepoRev, getRepoRetryInfo, listRetryEligibleRepos, listPendingRepos, querySQL, runSQL, getSchema, bulkInsertRecords, } from "./database/db.js";
|
|
5
5
|
import { emit, timer } from "./logger.js";
|
|
6
|
+
import { validateRecord } from '@bigmoves/lexicon';
|
|
7
|
+
import { getLexiconArray } from "./database/schema.js";
|
|
6
8
|
/** In-memory cache of DID → PDS resolution results to avoid redundant lookups. */
|
|
7
9
|
const pdsCache = new Map();
|
|
8
10
|
let plcUrl;
|
|
@@ -184,13 +186,13 @@ export async function backfillRepo(did, collections, fetchTimeout) {
|
|
|
184
186
|
const schema = getSchema(col);
|
|
185
187
|
if (!schema)
|
|
186
188
|
continue;
|
|
187
|
-
await runSQL(`DELETE FROM ${schema.tableName} WHERE did = $1`, did);
|
|
189
|
+
await runSQL(`DELETE FROM ${schema.tableName} WHERE did = $1`, [did]);
|
|
188
190
|
for (const child of schema.children) {
|
|
189
|
-
await runSQL(`DELETE FROM ${child.tableName} WHERE parent_did = $1`, did);
|
|
191
|
+
await runSQL(`DELETE FROM ${child.tableName} WHERE parent_did = $1`, [did]);
|
|
190
192
|
}
|
|
191
193
|
for (const union of schema.unions) {
|
|
192
194
|
for (const branch of union.branches) {
|
|
193
|
-
await runSQL(`DELETE FROM ${branch.tableName} WHERE parent_did = $1`, did);
|
|
195
|
+
await runSQL(`DELETE FROM ${branch.tableName} WHERE parent_did = $1`, [did]);
|
|
194
196
|
}
|
|
195
197
|
}
|
|
196
198
|
}
|
|
@@ -212,6 +214,16 @@ export async function backfillRepo(did, collections, fetchTimeout) {
|
|
|
212
214
|
continue;
|
|
213
215
|
const rkey = entry.path.split('/').slice(1).join('/');
|
|
214
216
|
const uri = `at://${did}/${collection}/${rkey}`;
|
|
217
|
+
const validationError = validateRecord(getLexiconArray(), collection, record);
|
|
218
|
+
if (validationError) {
|
|
219
|
+
emit('backfill', 'validation_skip', {
|
|
220
|
+
uri,
|
|
221
|
+
collection,
|
|
222
|
+
path: validationError.path,
|
|
223
|
+
error: validationError.message,
|
|
224
|
+
});
|
|
225
|
+
continue;
|
|
226
|
+
}
|
|
215
227
|
chunk.push({ collection, uri, cid: entry.cid, did, record });
|
|
216
228
|
if (chunk.length >= CHUNK_SIZE) {
|
|
217
229
|
count += await bulkInsertRecords(chunk);
|
package/dist/cli.js
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
import { mkdirSync, writeFileSync, existsSync, unlinkSync, readdirSync, readFileSync } from 'node:fs';
|
|
3
3
|
import { resolve, join, dirname } from 'node:path';
|
|
4
4
|
import { execSync, spawn } from 'node:child_process';
|
|
5
|
-
import { loadLexicons, discoverCollections, buildSchemas } from "./schema.js";
|
|
5
|
+
import { loadLexicons, discoverCollections, buildSchemas } from "./database/schema.js";
|
|
6
6
|
import { loadConfig } from "./config.js";
|
|
7
7
|
const args = process.argv.slice(2);
|
|
8
8
|
const command = args[0];
|
|
@@ -70,7 +70,7 @@ function usage() {
|
|
|
70
70
|
Usage: hatk <command> [options]
|
|
71
71
|
|
|
72
72
|
Getting Started
|
|
73
|
-
new <name> [--svelte] [--template <t>] Create a new hatk project
|
|
73
|
+
new <name> [--svelte] [--duckdb] [--template <t>] Create a new hatk project
|
|
74
74
|
|
|
75
75
|
Running
|
|
76
76
|
start Start the hatk server
|
|
@@ -328,7 +328,7 @@ const dirs = {
|
|
|
328
328
|
if (command === 'new') {
|
|
329
329
|
const name = args[1];
|
|
330
330
|
if (!name) {
|
|
331
|
-
console.error('Usage: hatk new <name> [--svelte] [--template <template-name>]');
|
|
331
|
+
console.error('Usage: hatk new <name> [--svelte] [--duckdb] [--template <template-name>]');
|
|
332
332
|
process.exit(1);
|
|
333
333
|
}
|
|
334
334
|
const templateIdx = args.indexOf('--template');
|
|
@@ -366,6 +366,8 @@ if (command === 'new') {
|
|
|
366
366
|
process.exit(0);
|
|
367
367
|
}
|
|
368
368
|
const withSvelte = args.includes('--svelte');
|
|
369
|
+
const withDuckdb = args.includes('--duckdb');
|
|
370
|
+
const dbEngine = withDuckdb ? 'duckdb' : 'sqlite';
|
|
369
371
|
mkdirSync(dir);
|
|
370
372
|
const subs = [
|
|
371
373
|
'lexicons',
|
|
@@ -395,6 +397,7 @@ export default defineConfig({
|
|
|
395
397
|
relay: 'ws://localhost:2583',
|
|
396
398
|
plc: 'http://localhost:2582',
|
|
397
399
|
port: 3000,
|
|
400
|
+
databaseEngine: '${dbEngine}',
|
|
398
401
|
database: 'data/hatk.db',
|
|
399
402
|
admins: [],
|
|
400
403
|
backfill: {
|
|
@@ -896,6 +899,9 @@ EXPOSE 3000
|
|
|
896
899
|
CMD ["node", "--experimental-strip-types", "--max-old-space-size=512", "node_modules/@hatk/hatk/dist/main.js", "hatk.config.ts"]
|
|
897
900
|
`);
|
|
898
901
|
const pkgDeps = { '@hatk/oauth-client': '*', hatk: '*' };
|
|
902
|
+
if (!withDuckdb) {
|
|
903
|
+
pkgDeps['better-sqlite3'] = '^11';
|
|
904
|
+
}
|
|
899
905
|
const pkgDevDeps = {
|
|
900
906
|
'@playwright/test': '^1',
|
|
901
907
|
oxfmt: '^0.35.0',
|
|
@@ -1593,7 +1599,7 @@ else if (command === 'build') {
|
|
|
1593
1599
|
else if (command === 'reset') {
|
|
1594
1600
|
const config = await loadConfig(resolve('hatk.config.ts'));
|
|
1595
1601
|
if (config.database !== ':memory:') {
|
|
1596
|
-
for (const suffix of ['', '.wal']) {
|
|
1602
|
+
for (const suffix of ['', '.wal', '-shm', '-wal']) {
|
|
1597
1603
|
const file = config.database + suffix;
|
|
1598
1604
|
if (existsSync(file)) {
|
|
1599
1605
|
unlinkSync(file);
|
|
@@ -1752,37 +1758,19 @@ else if (command === 'resolve') {
|
|
|
1752
1758
|
}
|
|
1753
1759
|
else if (command === 'schema') {
|
|
1754
1760
|
const config = await loadConfig(resolve('hatk.config.ts'));
|
|
1755
|
-
|
|
1756
|
-
|
|
1757
|
-
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
const collections = config.collections.length > 0 ? config.collections : discoverCollections(lexicons);
|
|
1764
|
-
if (collections.length === 0) {
|
|
1765
|
-
console.error('No record collections found. Add record lexicons to the lexicons/ directory.');
|
|
1766
|
-
process.exit(1);
|
|
1767
|
-
}
|
|
1761
|
+
const { initDatabase, getSchemaDump } = await import("./database/db.js");
|
|
1762
|
+
const { createAdapter } = await import("./database/adapter-factory.js");
|
|
1763
|
+
const { getDialect } = await import("./database/dialect.js");
|
|
1764
|
+
const configDir2 = resolve('.');
|
|
1765
|
+
const lexicons2 = loadLexicons(resolve(configDir2, 'lexicons'));
|
|
1766
|
+
const collections2 = config.collections.length > 0 ? config.collections : discoverCollections(lexicons2);
|
|
1767
|
+
const { schemas: schemas2, ddlStatements: ddl2 } = buildSchemas(lexicons2, collections2, getDialect(config.databaseEngine));
|
|
1768
|
+
if (config.database !== ':memory:') {
|
|
1768
1769
|
mkdirSync(dirname(config.database), { recursive: true });
|
|
1769
|
-
const { initDatabase } = await import("./db.js");
|
|
1770
|
-
const { schemas, ddlStatements } = buildSchemas(lexicons, collections);
|
|
1771
|
-
await initDatabase(config.database, schemas, ddlStatements);
|
|
1772
|
-
}
|
|
1773
|
-
const { DuckDBInstance } = await import('@duckdb/node-api');
|
|
1774
|
-
const instance = await DuckDBInstance.create(config.database);
|
|
1775
|
-
const con = await instance.connect();
|
|
1776
|
-
const tables = (await (await con.runAndReadAll(`SELECT table_name FROM information_schema.tables WHERE table_schema = 'main' ORDER BY table_name`)).getRowObjects());
|
|
1777
|
-
for (const { table_name } of tables) {
|
|
1778
|
-
console.log(`"${table_name}"`);
|
|
1779
|
-
const cols = (await (await con.runAndReadAll(`SELECT column_name, data_type, is_nullable FROM information_schema.columns WHERE table_name = '${table_name}' ORDER BY ordinal_position`)).getRowObjects());
|
|
1780
|
-
for (const col of cols) {
|
|
1781
|
-
const nullable = col.is_nullable === 'YES' ? '' : ' NOT NULL';
|
|
1782
|
-
console.log(` ${col.column_name.padEnd(20)} ${col.data_type}${nullable}`);
|
|
1783
|
-
}
|
|
1784
|
-
console.log();
|
|
1785
1770
|
}
|
|
1771
|
+
const { adapter: adapter2 } = await createAdapter(config.databaseEngine);
|
|
1772
|
+
await initDatabase(adapter2, config.database, schemas2, ddl2);
|
|
1773
|
+
console.log(await getSchemaDump());
|
|
1786
1774
|
}
|
|
1787
1775
|
else if (command === 'start') {
|
|
1788
1776
|
const mainPath = resolve(import.meta.dirname, 'main.js');
|
package/dist/config.d.ts
CHANGED
package/dist/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,WAAW,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,OAAO,GAAG,QAAQ,GAAG,MAAM,CAAA;IACrC,KAAK,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,CAAA;IACnC,cAAc,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAA;IAC1C,OAAO,CAAC,EAAE,WAAW,EAAE,CAAA;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,aAAa,EAAE,MAAM,EAAE,CAAA;IACvB,KAAK,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,OAAO,EAAE,iBAAiB,EAAE,CAAA;CAC7B;AAED,MAAM,WAAW,cAAc;IAC7B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;IAC5B,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;IAChB,WAAW,EAAE,OAAO,CAAA;IACpB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;IACZ,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,QAAQ,EAAE,cAAc,CAAA;IACxB,kBAAkB,EAAE,MAAM,CAAA;IAC1B,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,MAAM,EAAE,MAAM,EAAE,CAAA;CACjB;AAED,4EAA4E;AAC5E,MAAM,MAAM,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,CAAC,CAAC,GAAG;IAC9E,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG;QAAE,OAAO,EAAE,iBAAiB,EAAE,CAAA;KAAE,CAAC,GAAG,IAAI,CAAA;IACxE,QAAQ,CAAC,EAAE,OAAO,CAAC,cAAc,CAAC,CAAA;CACnC,CAAA;AAED,4EAA4E;AAC5E,wBAAgB,YAAY,CAAC,MAAM,EAAE,eAAe,GAAG,eAAe,CAErE;AAED,yEAAyE;AACzE,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED,wBAAsB,UAAU,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,WAAW,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,OAAO,GAAG,QAAQ,GAAG,MAAM,CAAA;IACrC,KAAK,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,CAAA;IACnC,cAAc,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAA;IAC1C,OAAO,CAAC,EAAE,WAAW,EAAE,CAAA;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,aAAa,EAAE,MAAM,EAAE,CAAA;IACvB,KAAK,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,OAAO,EAAE,iBAAiB,EAAE,CAAA;CAC7B;AAED,MAAM,WAAW,cAAc;IAC7B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;IAC5B,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;IAChB,WAAW,EAAE,OAAO,CAAA;IACpB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;IACZ,cAAc,EAAE,QAAQ,GAAG,QAAQ,CAAA;IACnC,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,QAAQ,EAAE,cAAc,CAAA;IACxB,kBAAkB,EAAE,MAAM,CAAA;IAC1B,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,MAAM,EAAE,MAAM,EAAE,CAAA;CACjB;AAED,4EAA4E;AAC5E,MAAM,MAAM,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,CAAC,CAAC,GAAG;IAC9E,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG;QAAE,OAAO,EAAE,iBAAiB,EAAE,CAAA;KAAE,CAAC,GAAG,IAAI,CAAA;IACxE,QAAQ,CAAC,EAAE,OAAO,CAAC,cAAc,CAAC,CAAA;CACnC,CAAA;AAED,4EAA4E;AAC5E,wBAAgB,YAAY,CAAC,MAAM,EAAE,eAAe,GAAG,eAAe,CAErE;AAED,yEAAyE;AACzE,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED,wBAAsB,UAAU,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAwDxE"}
|
package/dist/config.js
CHANGED
|
@@ -42,6 +42,7 @@ export async function loadConfig(configPath) {
|
|
|
42
42
|
relay: env.RELAY || parsed.relay || 'ws://localhost:2583',
|
|
43
43
|
plc: env.DID_PLC_URL || parsed.plc || 'https://plc.directory',
|
|
44
44
|
port: parseInt(env.PORT || '') || parsed.port || 3000,
|
|
45
|
+
databaseEngine: (env.DATABASE_ENGINE || parsed.databaseEngine || 'sqlite'),
|
|
45
46
|
database: database ? resolve(configDir, database) : ':memory:',
|
|
46
47
|
publicDir: parsed.publicDir === null ? null : resolve(configDir, parsed.publicDir || './public'),
|
|
47
48
|
collections: parsed.collections || [],
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"adapter-factory.d.ts","sourceRoot":"","sources":["../../src/database/adapter-factory.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AAE1D,wBAAsB,aAAa,CAAC,MAAM,EAAE,QAAQ,GAAG,QAAQ,GAAG,OAAO,CAAC;IACxE,OAAO,EAAE,YAAY,CAAA;IACrB,UAAU,EAAE,UAAU,GAAG,IAAI,CAAA;CAC9B,CAAC,CAmBD"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export async function createAdapter(engine) {
|
|
2
|
+
switch (engine) {
|
|
3
|
+
case 'duckdb': {
|
|
4
|
+
const { DuckDBAdapter } = await import("./adapters/duckdb.js");
|
|
5
|
+
const { DuckDBSearchPort } = await import("./adapters/duckdb-search.js");
|
|
6
|
+
const adapter = new DuckDBAdapter();
|
|
7
|
+
const searchPort = new DuckDBSearchPort(adapter);
|
|
8
|
+
return { adapter, searchPort };
|
|
9
|
+
}
|
|
10
|
+
case 'sqlite': {
|
|
11
|
+
const { SQLiteAdapter } = await import("./adapters/sqlite.js");
|
|
12
|
+
const { SQLiteSearchPort } = await import("./adapters/sqlite-search.js");
|
|
13
|
+
const adapter = new SQLiteAdapter();
|
|
14
|
+
const searchPort = new SQLiteSearchPort(adapter);
|
|
15
|
+
return { adapter, searchPort };
|
|
16
|
+
}
|
|
17
|
+
default:
|
|
18
|
+
throw new Error(`Unsupported database engine: ${engine}`);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { SearchPort } from '../ports.ts';
|
|
2
|
+
import type { DatabasePort } from '../ports.ts';
|
|
3
|
+
export declare class DuckDBSearchPort implements SearchPort {
|
|
4
|
+
private port;
|
|
5
|
+
constructor(port: DatabasePort);
|
|
6
|
+
buildIndex(shadowTable: string, sourceQuery: string, searchColumns: string[]): Promise<void>;
|
|
7
|
+
search(shadowTable: string, query: string, searchColumns: string[], limit: number, offset: number): Promise<Array<{
|
|
8
|
+
uri: string;
|
|
9
|
+
score: number;
|
|
10
|
+
}>>;
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=duckdb-search.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"duckdb-search.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/duckdb-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAE/C,qBAAa,gBAAiB,YAAW,UAAU;IACrC,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,YAAY;IAEhC,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAiB5F,MAAM,CACV,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,aAAa,EAAE,MAAM,EAAE,EACvB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,KAAK,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CASlD"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export class DuckDBSearchPort {
|
|
2
|
+
port;
|
|
3
|
+
constructor(port) {
|
|
4
|
+
this.port = port;
|
|
5
|
+
}
|
|
6
|
+
async buildIndex(shadowTable, sourceQuery, searchColumns) {
|
|
7
|
+
// Create shadow table
|
|
8
|
+
await this.port.execute(`CREATE OR REPLACE TABLE ${shadowTable} AS ${sourceQuery}`, []);
|
|
9
|
+
// Drop existing index
|
|
10
|
+
try {
|
|
11
|
+
await this.port.execute(`PRAGMA drop_fts_index('${shadowTable}')`, []);
|
|
12
|
+
}
|
|
13
|
+
catch { }
|
|
14
|
+
// Build FTS index
|
|
15
|
+
const colList = searchColumns.map((c) => `'${c}'`).join(', ');
|
|
16
|
+
await this.port.execute(`PRAGMA create_fts_index('${shadowTable}', 'uri', ${colList}, stemmer='porter', stopwords='english', strip_accents=1, lower=1, overwrite=1)`, []);
|
|
17
|
+
}
|
|
18
|
+
async search(shadowTable, query, searchColumns, limit, offset) {
|
|
19
|
+
const ftsSchema = `fts_main_${shadowTable}`;
|
|
20
|
+
const sql = `SELECT uri, ${ftsSchema}.match_bm25(uri, $1) AS score
|
|
21
|
+
FROM ${shadowTable}
|
|
22
|
+
WHERE score IS NOT NULL
|
|
23
|
+
ORDER BY score DESC
|
|
24
|
+
LIMIT $2 OFFSET $3`;
|
|
25
|
+
return this.port.query(sql, [query, limit, offset]);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { DatabasePort, BulkInserter, Dialect } from '../ports.ts';
|
|
2
|
+
export declare class DuckDBAdapter implements DatabasePort {
|
|
3
|
+
dialect: Dialect;
|
|
4
|
+
private instance;
|
|
5
|
+
private writeCon;
|
|
6
|
+
private readCon;
|
|
7
|
+
private writeQueue;
|
|
8
|
+
private readQueue;
|
|
9
|
+
open(path: string): Promise<void>;
|
|
10
|
+
close(): void;
|
|
11
|
+
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
|
|
12
|
+
execute(sql: string, params?: unknown[]): Promise<void>;
|
|
13
|
+
executeMultiple(sql: string): Promise<void>;
|
|
14
|
+
beginTransaction(): Promise<void>;
|
|
15
|
+
commit(): Promise<void>;
|
|
16
|
+
rollback(): Promise<void>;
|
|
17
|
+
createBulkInserter(table: string, _columns: string[], _options?: {
|
|
18
|
+
onConflict?: 'ignore' | 'replace';
|
|
19
|
+
batchSize?: number;
|
|
20
|
+
}): Promise<BulkInserter>;
|
|
21
|
+
/** Enqueue a read or write operation for serialization */
|
|
22
|
+
private enqueue;
|
|
23
|
+
private bindParams;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=duckdb.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"duckdb.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/duckdb.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AAEtE,qBAAa,aAAc,YAAW,YAAY;IAChD,OAAO,EAAE,OAAO,CAAW;IAE3B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAiD;IACjE,OAAO,CAAC,OAAO,CAAiD;IAChE,OAAO,CAAC,UAAU,CAAoB;IACtC,OAAO,CAAC,SAAS,CAAoB;IAE/B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAMvC,KAAK,IAAI,IAAI;IAYP,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC;IAarF,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAY3D,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQ3C,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAMjC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IAMvB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAMzB,kBAAkB,CACtB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,EAAE,EAClB,QAAQ,CAAC,EAAE;QAAE,UAAU,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GACnE,OAAO,CAAC,YAAY,CAAC;IAqCxB,0DAA0D;IAC1D,OAAO,CAAC,OAAO;IAkBf,OAAO,CAAC,UAAU;CAyBnB"}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { DuckDBInstance } from '@duckdb/node-api';
|
|
2
|
+
export class DuckDBAdapter {
|
|
3
|
+
dialect = 'duckdb';
|
|
4
|
+
instance;
|
|
5
|
+
writeCon;
|
|
6
|
+
readCon;
|
|
7
|
+
writeQueue = Promise.resolve();
|
|
8
|
+
readQueue = Promise.resolve();
|
|
9
|
+
async open(path) {
|
|
10
|
+
this.instance = await DuckDBInstance.create(path === ':memory:' ? undefined : path);
|
|
11
|
+
this.writeCon = await this.instance.connect();
|
|
12
|
+
this.readCon = await this.instance.connect();
|
|
13
|
+
}
|
|
14
|
+
close() {
|
|
15
|
+
try {
|
|
16
|
+
this.readCon?.closeSync();
|
|
17
|
+
}
|
|
18
|
+
catch { }
|
|
19
|
+
try {
|
|
20
|
+
this.writeCon?.closeSync();
|
|
21
|
+
}
|
|
22
|
+
catch { }
|
|
23
|
+
try {
|
|
24
|
+
this.instance?.closeSync();
|
|
25
|
+
}
|
|
26
|
+
catch { }
|
|
27
|
+
}
|
|
28
|
+
async query(sql, params = []) {
|
|
29
|
+
return this.enqueue('read', async () => {
|
|
30
|
+
if (params.length === 0) {
|
|
31
|
+
const reader = await this.readCon.runAndReadAll(sql);
|
|
32
|
+
return reader.getRowObjects();
|
|
33
|
+
}
|
|
34
|
+
const prepared = await this.readCon.prepare(sql);
|
|
35
|
+
this.bindParams(prepared, params);
|
|
36
|
+
const reader = await prepared.runAndReadAll();
|
|
37
|
+
return reader.getRowObjects();
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
async execute(sql, params = []) {
|
|
41
|
+
return this.enqueue('write', async () => {
|
|
42
|
+
if (params.length === 0) {
|
|
43
|
+
await this.writeCon.run(sql);
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
const prepared = await this.writeCon.prepare(sql);
|
|
47
|
+
this.bindParams(prepared, params);
|
|
48
|
+
await prepared.run();
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
async executeMultiple(sql) {
|
|
52
|
+
return this.enqueue('write', async () => {
|
|
53
|
+
for (const statement of sql.split(';').filter((s) => s.trim())) {
|
|
54
|
+
await this.writeCon.run(statement);
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
async beginTransaction() {
|
|
59
|
+
return this.enqueue('write', async () => {
|
|
60
|
+
await this.writeCon.run('BEGIN TRANSACTION');
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
async commit() {
|
|
64
|
+
return this.enqueue('write', async () => {
|
|
65
|
+
await this.writeCon.run('COMMIT');
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
async rollback() {
|
|
69
|
+
return this.enqueue('write', async () => {
|
|
70
|
+
await this.writeCon.run('ROLLBACK');
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
async createBulkInserter(table, _columns, _options) {
|
|
74
|
+
const appender = await this.writeCon.createAppender(table.replace(/"/g, ''));
|
|
75
|
+
return {
|
|
76
|
+
append(values) {
|
|
77
|
+
for (const value of values) {
|
|
78
|
+
if (value === null || value === undefined) {
|
|
79
|
+
appender.appendNull();
|
|
80
|
+
}
|
|
81
|
+
else if (typeof value === 'string') {
|
|
82
|
+
appender.appendVarchar(value);
|
|
83
|
+
}
|
|
84
|
+
else if (typeof value === 'number') {
|
|
85
|
+
if (Number.isInteger(value)) {
|
|
86
|
+
appender.appendInteger(value);
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
appender.appendDouble(value);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
else if (typeof value === 'boolean') {
|
|
93
|
+
appender.appendBoolean(value);
|
|
94
|
+
}
|
|
95
|
+
else if (typeof value === 'bigint') {
|
|
96
|
+
appender.appendBigInt(value);
|
|
97
|
+
}
|
|
98
|
+
else if (value instanceof Uint8Array) {
|
|
99
|
+
appender.appendBlob(value);
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
appender.appendVarchar(String(value));
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
appender.endRow();
|
|
106
|
+
},
|
|
107
|
+
async flush() {
|
|
108
|
+
appender.flushSync();
|
|
109
|
+
},
|
|
110
|
+
async close() {
|
|
111
|
+
appender.flushSync();
|
|
112
|
+
appender.closeSync();
|
|
113
|
+
},
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
/** Enqueue a read or write operation for serialization */
|
|
117
|
+
enqueue(queue, fn) {
|
|
118
|
+
if (queue === 'write') {
|
|
119
|
+
const p = this.writeQueue.then(fn);
|
|
120
|
+
this.writeQueue = p.then(() => { }, () => { });
|
|
121
|
+
return p;
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
const p = this.readQueue.then(fn);
|
|
125
|
+
this.readQueue = p.then(() => { }, () => { });
|
|
126
|
+
return p;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
bindParams(prepared, params) {
|
|
130
|
+
for (let i = 0; i < params.length; i++) {
|
|
131
|
+
const idx = i + 1;
|
|
132
|
+
const value = params[i];
|
|
133
|
+
if (value === null || value === undefined) {
|
|
134
|
+
prepared.bindNull(idx);
|
|
135
|
+
}
|
|
136
|
+
else if (typeof value === 'string') {
|
|
137
|
+
prepared.bindVarchar(idx, value);
|
|
138
|
+
}
|
|
139
|
+
else if (typeof value === 'number') {
|
|
140
|
+
if (Number.isInteger(value)) {
|
|
141
|
+
prepared.bindInteger(idx, value);
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
prepared.bindDouble(idx, value);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
else if (typeof value === 'boolean') {
|
|
148
|
+
prepared.bindBoolean(idx, value);
|
|
149
|
+
}
|
|
150
|
+
else if (typeof value === 'bigint') {
|
|
151
|
+
prepared.bindBigInt(idx, value);
|
|
152
|
+
}
|
|
153
|
+
else if (value instanceof Uint8Array) {
|
|
154
|
+
prepared.bindBlob(idx, value);
|
|
155
|
+
}
|
|
156
|
+
else {
|
|
157
|
+
prepared.bindVarchar(idx, String(value));
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { SearchPort } from '../ports.ts';
|
|
2
|
+
import type { DatabasePort } from '../ports.ts';
|
|
3
|
+
/**
|
|
4
|
+
* SQLite FTS5-based search port.
|
|
5
|
+
*
|
|
6
|
+
* Uses SQLite's built-in FTS5 virtual tables for full-text search with BM25 ranking.
|
|
7
|
+
* The shadow table name is reused as the FTS5 virtual table name.
|
|
8
|
+
*/
|
|
9
|
+
export declare class SQLiteSearchPort implements SearchPort {
|
|
10
|
+
private port;
|
|
11
|
+
constructor(port: DatabasePort);
|
|
12
|
+
buildIndex(shadowTable: string, sourceQuery: string, searchColumns: string[]): Promise<void>;
|
|
13
|
+
search(shadowTable: string, query: string, _searchColumns: string[], limit: number, offset: number): Promise<Array<{
|
|
14
|
+
uri: string;
|
|
15
|
+
score: number;
|
|
16
|
+
}>>;
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=sqlite-search.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite-search.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/sqlite-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAE/C;;;;;GAKG;AACH,qBAAa,gBAAiB,YAAW,UAAU;IACrC,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,YAAY;IAEhC,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAuB5F,MAAM,CACV,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,cAAc,EAAE,MAAM,EAAE,EACxB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,KAAK,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CAalD"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SQLite FTS5-based search port.
|
|
3
|
+
*
|
|
4
|
+
* Uses SQLite's built-in FTS5 virtual tables for full-text search with BM25 ranking.
|
|
5
|
+
* The shadow table name is reused as the FTS5 virtual table name.
|
|
6
|
+
*/
|
|
7
|
+
export class SQLiteSearchPort {
|
|
8
|
+
port;
|
|
9
|
+
constructor(port) {
|
|
10
|
+
this.port = port;
|
|
11
|
+
}
|
|
12
|
+
async buildIndex(shadowTable, sourceQuery, searchColumns) {
|
|
13
|
+
// Drop existing FTS table and data table
|
|
14
|
+
await this.port.execute(`DROP TABLE IF EXISTS ${shadowTable}_fts`, []);
|
|
15
|
+
await this.port.execute(`DROP TABLE IF EXISTS ${shadowTable}`, []);
|
|
16
|
+
// Create the data table from the source query
|
|
17
|
+
await this.port.execute(`CREATE TABLE ${shadowTable} AS ${sourceQuery}`, []);
|
|
18
|
+
// Create the FTS5 virtual table over the search columns
|
|
19
|
+
const colList = searchColumns.join(', ');
|
|
20
|
+
await this.port.execute(`CREATE VIRTUAL TABLE ${shadowTable}_fts USING fts5(uri UNINDEXED, ${colList}, tokenize='porter unicode61 remove_diacritics 2')`, []);
|
|
21
|
+
// Populate FTS table from the data table
|
|
22
|
+
const selectCols = ['uri', ...searchColumns].map((c) => `COALESCE(CAST(${c} AS TEXT), '')`);
|
|
23
|
+
await this.port.execute(`INSERT INTO ${shadowTable}_fts (uri, ${colList}) SELECT ${selectCols.join(', ')} FROM ${shadowTable}`, []);
|
|
24
|
+
}
|
|
25
|
+
async search(shadowTable, query, _searchColumns, limit, offset) {
|
|
26
|
+
// Escape FTS5 special characters and build query
|
|
27
|
+
const escaped = query.replace(/['"*(){}[\]^~\\:]/g, ' ').trim();
|
|
28
|
+
if (!escaped)
|
|
29
|
+
return [];
|
|
30
|
+
// Use FTS5 MATCH with bm25() ranking (lower = better match, negate for DESC)
|
|
31
|
+
const sql = `SELECT uri, -bm25(${shadowTable}_fts) AS score
|
|
32
|
+
FROM ${shadowTable}_fts
|
|
33
|
+
WHERE ${shadowTable}_fts MATCH $1
|
|
34
|
+
ORDER BY score DESC
|
|
35
|
+
LIMIT $2 OFFSET $3`;
|
|
36
|
+
return this.port.query(sql, [escaped, limit, offset]);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { DatabasePort, BulkInserter, Dialect } from '../ports.ts';
|
|
2
|
+
export declare class SQLiteAdapter implements DatabasePort {
|
|
3
|
+
dialect: Dialect;
|
|
4
|
+
private db;
|
|
5
|
+
open(path: string): Promise<void>;
|
|
6
|
+
close(): void;
|
|
7
|
+
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
|
|
8
|
+
execute(sql: string, params?: unknown[]): Promise<void>;
|
|
9
|
+
executeMultiple(sql: string): Promise<void>;
|
|
10
|
+
beginTransaction(): Promise<void>;
|
|
11
|
+
commit(): Promise<void>;
|
|
12
|
+
rollback(): Promise<void>;
|
|
13
|
+
createBulkInserter(table: string, columns: string[], options?: {
|
|
14
|
+
onConflict?: 'ignore' | 'replace';
|
|
15
|
+
batchSize?: number;
|
|
16
|
+
}): Promise<BulkInserter>;
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=sqlite.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/sqlite.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AAoBtE,qBAAa,aAAc,YAAW,YAAY;IAChD,OAAO,EAAE,OAAO,CAAW;IAE3B,OAAO,CAAC,EAAE,CAAoB;IAExB,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAOvC,KAAK,IAAI,IAAI;IAMP,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC;IAMrF,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAM3D,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAI3C,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAIjC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IAIvB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAIzB,kBAAkB,CACtB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,CAAC,EAAE;QAAE,UAAU,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAClE,OAAO,CAAC,YAAY,CAAC;CAmCzB"}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import Database from 'better-sqlite3';
|
|
2
|
+
/**
|
|
3
|
+
* Translate DuckDB-style `$1, $2` placeholders to SQLite `?` placeholders.
|
|
4
|
+
* Handles repeated references to the same `$N` by duplicating the param value.
|
|
5
|
+
* Returns the translated SQL and expanded params array.
|
|
6
|
+
*/
|
|
7
|
+
function translateParams(sql, params) {
|
|
8
|
+
if (params.length === 0)
|
|
9
|
+
return { sql, params };
|
|
10
|
+
const expandedParams = [];
|
|
11
|
+
const translated = sql.replace(/\$(\d+)/g, (_match, numStr) => {
|
|
12
|
+
const idx = parseInt(numStr) - 1; // $1 → index 0
|
|
13
|
+
expandedParams.push(params[idx]);
|
|
14
|
+
return '?';
|
|
15
|
+
});
|
|
16
|
+
return { sql: translated, params: expandedParams };
|
|
17
|
+
}
|
|
18
|
+
export class SQLiteAdapter {
|
|
19
|
+
dialect = 'sqlite';
|
|
20
|
+
db;
|
|
21
|
+
async open(path) {
|
|
22
|
+
this.db = new Database(path === ':memory:' ? ':memory:' : path);
|
|
23
|
+
this.db.pragma('journal_mode = WAL');
|
|
24
|
+
this.db.pragma('synchronous = NORMAL');
|
|
25
|
+
this.db.pragma('foreign_keys = ON');
|
|
26
|
+
}
|
|
27
|
+
close() {
|
|
28
|
+
try {
|
|
29
|
+
this.db?.close();
|
|
30
|
+
}
|
|
31
|
+
catch { }
|
|
32
|
+
}
|
|
33
|
+
async query(sql, params = []) {
|
|
34
|
+
const t = translateParams(sql, params);
|
|
35
|
+
const stmt = this.db.prepare(t.sql);
|
|
36
|
+
return stmt.all(...t.params);
|
|
37
|
+
}
|
|
38
|
+
async execute(sql, params = []) {
|
|
39
|
+
const t = translateParams(sql, params);
|
|
40
|
+
const stmt = this.db.prepare(t.sql);
|
|
41
|
+
stmt.run(...t.params);
|
|
42
|
+
}
|
|
43
|
+
async executeMultiple(sql) {
|
|
44
|
+
this.db.exec(sql);
|
|
45
|
+
}
|
|
46
|
+
async beginTransaction() {
|
|
47
|
+
this.db.exec('BEGIN');
|
|
48
|
+
}
|
|
49
|
+
async commit() {
|
|
50
|
+
this.db.exec('COMMIT');
|
|
51
|
+
}
|
|
52
|
+
async rollback() {
|
|
53
|
+
this.db.exec('ROLLBACK');
|
|
54
|
+
}
|
|
55
|
+
async createBulkInserter(table, columns, options) {
|
|
56
|
+
const placeholders = columns.map(() => '?').join(', ');
|
|
57
|
+
const conflict = options?.onConflict === 'ignore' ? ' OR IGNORE' : options?.onConflict === 'replace' ? ' OR REPLACE' : '';
|
|
58
|
+
const sql = `INSERT${conflict} INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
59
|
+
const stmt = this.db.prepare(sql);
|
|
60
|
+
const buffer = [];
|
|
61
|
+
const batchSize = options?.batchSize ?? 5000;
|
|
62
|
+
const flushBuffer = this.db.transaction(() => {
|
|
63
|
+
for (const row of buffer) {
|
|
64
|
+
stmt.run(...row);
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
const flush = () => {
|
|
68
|
+
if (buffer.length > 0) {
|
|
69
|
+
flushBuffer();
|
|
70
|
+
buffer.length = 0;
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
return {
|
|
74
|
+
append(values) {
|
|
75
|
+
buffer.push(values);
|
|
76
|
+
if (buffer.length >= batchSize)
|
|
77
|
+
flush();
|
|
78
|
+
},
|
|
79
|
+
async flush() {
|
|
80
|
+
flush();
|
|
81
|
+
},
|
|
82
|
+
async close() {
|
|
83
|
+
flush();
|
|
84
|
+
},
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
}
|