@hatk/hatk 0.0.1-alpha.6 → 0.0.1-alpha.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter.d.ts +19 -0
- package/dist/adapter.d.ts.map +1 -0
- package/dist/adapter.js +108 -0
- package/dist/backfill.d.ts +2 -2
- package/dist/backfill.d.ts.map +1 -1
- package/dist/backfill.js +78 -31
- package/dist/car.d.ts +42 -10
- package/dist/car.d.ts.map +1 -1
- package/dist/car.js +154 -14
- package/dist/cli.js +243 -1043
- package/dist/config.d.ts +31 -1
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +40 -9
- package/dist/database/adapter-factory.d.ts +6 -0
- package/dist/database/adapter-factory.d.ts.map +1 -0
- package/dist/database/adapter-factory.js +20 -0
- package/dist/database/adapters/duckdb-search.d.ts +12 -0
- package/dist/database/adapters/duckdb-search.d.ts.map +1 -0
- package/dist/database/adapters/duckdb-search.js +27 -0
- package/dist/database/adapters/duckdb.d.ts +25 -0
- package/dist/database/adapters/duckdb.d.ts.map +1 -0
- package/dist/database/adapters/duckdb.js +161 -0
- package/dist/database/adapters/sqlite-search.d.ts +23 -0
- package/dist/database/adapters/sqlite-search.d.ts.map +1 -0
- package/dist/database/adapters/sqlite-search.js +74 -0
- package/dist/database/adapters/sqlite.d.ts +18 -0
- package/dist/database/adapters/sqlite.d.ts.map +1 -0
- package/dist/database/adapters/sqlite.js +88 -0
- package/dist/{db.d.ts → database/db.d.ts} +57 -6
- package/dist/database/db.d.ts.map +1 -0
- package/dist/{db.js → database/db.js} +730 -549
- package/dist/database/dialect.d.ts +45 -0
- package/dist/database/dialect.d.ts.map +1 -0
- package/dist/database/dialect.js +72 -0
- package/dist/{fts.d.ts → database/fts.d.ts} +7 -0
- package/dist/database/fts.d.ts.map +1 -0
- package/dist/{fts.js → database/fts.js} +116 -32
- package/dist/database/index.d.ts +7 -0
- package/dist/database/index.d.ts.map +1 -0
- package/dist/database/index.js +6 -0
- package/dist/database/ports.d.ts +50 -0
- package/dist/database/ports.d.ts.map +1 -0
- package/dist/database/ports.js +1 -0
- package/dist/{schema.d.ts → database/schema.d.ts} +14 -3
- package/dist/database/schema.d.ts.map +1 -0
- package/dist/{schema.js → database/schema.js} +81 -41
- package/dist/dev-entry.d.ts +8 -0
- package/dist/dev-entry.d.ts.map +1 -0
- package/dist/dev-entry.js +112 -0
- package/dist/feeds.d.ts +12 -8
- package/dist/feeds.d.ts.map +1 -1
- package/dist/feeds.js +51 -6
- package/dist/hooks.d.ts +85 -0
- package/dist/hooks.d.ts.map +1 -0
- package/dist/hooks.js +161 -0
- package/dist/hydrate.d.ts +7 -6
- package/dist/hydrate.d.ts.map +1 -1
- package/dist/hydrate.js +4 -16
- package/dist/indexer.d.ts +22 -0
- package/dist/indexer.d.ts.map +1 -1
- package/dist/indexer.js +123 -32
- package/dist/labels.d.ts +36 -0
- package/dist/labels.d.ts.map +1 -1
- package/dist/labels.js +71 -6
- package/dist/lexicon-resolve.d.ts.map +1 -1
- package/dist/lexicon-resolve.js +27 -112
- package/dist/lexicons/com/atproto/label/defs.json +75 -0
- package/dist/lexicons/com/atproto/moderation/defs.json +30 -0
- package/dist/lexicons/com/atproto/repo/strongRef.json +24 -0
- package/dist/lexicons/dev/hatk/applyWrites.json +87 -0
- package/dist/lexicons/dev/hatk/createRecord.json +40 -0
- package/dist/lexicons/dev/hatk/createReport.json +48 -0
- package/dist/lexicons/dev/hatk/deleteRecord.json +25 -0
- package/dist/lexicons/dev/hatk/describeCollections.json +41 -0
- package/dist/lexicons/dev/hatk/describeFeeds.json +29 -0
- package/dist/lexicons/dev/hatk/describeLabels.json +45 -0
- package/dist/lexicons/dev/hatk/getFeed.json +30 -0
- package/dist/lexicons/dev/hatk/getPreferences.json +19 -0
- package/dist/lexicons/dev/hatk/getRecord.json +26 -0
- package/dist/lexicons/dev/hatk/getRecords.json +32 -0
- package/dist/lexicons/dev/hatk/putPreference.json +28 -0
- package/dist/lexicons/dev/hatk/putRecord.json +41 -0
- package/dist/lexicons/dev/hatk/searchRecords.json +32 -0
- package/dist/lexicons/dev/hatk/uploadBlob.json +23 -0
- package/dist/logger.d.ts +29 -0
- package/dist/logger.d.ts.map +1 -1
- package/dist/logger.js +29 -0
- package/dist/main.js +137 -67
- package/dist/mst.d.ts +18 -1
- package/dist/mst.d.ts.map +1 -1
- package/dist/mst.js +19 -8
- package/dist/oauth/db.d.ts +3 -1
- package/dist/oauth/db.d.ts.map +1 -1
- package/dist/oauth/db.js +48 -19
- package/dist/oauth/server.d.ts +24 -0
- package/dist/oauth/server.d.ts.map +1 -1
- package/dist/oauth/server.js +198 -22
- package/dist/oauth/session.d.ts +11 -0
- package/dist/oauth/session.d.ts.map +1 -0
- package/dist/oauth/session.js +65 -0
- package/dist/opengraph.d.ts +10 -0
- package/dist/opengraph.d.ts.map +1 -1
- package/dist/opengraph.js +80 -40
- package/dist/pds-proxy.d.ts +60 -0
- package/dist/pds-proxy.d.ts.map +1 -0
- package/dist/pds-proxy.js +277 -0
- package/dist/push.d.ts +34 -0
- package/dist/push.d.ts.map +1 -0
- package/dist/push.js +184 -0
- package/dist/renderer.d.ts +27 -0
- package/dist/renderer.d.ts.map +1 -0
- package/dist/renderer.js +46 -0
- package/dist/resolve-hatk.d.ts +6 -0
- package/dist/resolve-hatk.d.ts.map +1 -0
- package/dist/resolve-hatk.js +20 -0
- package/dist/response.d.ts +16 -0
- package/dist/response.d.ts.map +1 -0
- package/dist/response.js +69 -0
- package/dist/scanner.d.ts +21 -0
- package/dist/scanner.d.ts.map +1 -0
- package/dist/scanner.js +88 -0
- package/dist/seed.d.ts +19 -0
- package/dist/seed.d.ts.map +1 -1
- package/dist/seed.js +43 -4
- package/dist/server-init.d.ts +8 -0
- package/dist/server-init.d.ts.map +1 -0
- package/dist/server-init.js +62 -0
- package/dist/server.d.ts +26 -3
- package/dist/server.d.ts.map +1 -1
- package/dist/server.js +629 -635
- package/dist/setup.d.ts +28 -1
- package/dist/setup.d.ts.map +1 -1
- package/dist/setup.js +50 -3
- package/dist/templates/feed.tpl +14 -0
- package/dist/templates/hook.tpl +5 -0
- package/dist/templates/label.tpl +15 -0
- package/dist/templates/og.tpl +17 -0
- package/dist/templates/seed.tpl +11 -0
- package/dist/templates/setup.tpl +5 -0
- package/dist/templates/test-feed.tpl +19 -0
- package/dist/templates/test-xrpc.tpl +19 -0
- package/dist/templates/xrpc.tpl +41 -0
- package/dist/test.d.ts +1 -1
- package/dist/test.d.ts.map +1 -1
- package/dist/test.js +39 -32
- package/dist/views.js +1 -1
- package/dist/vite-plugin.d.ts +1 -1
- package/dist/vite-plugin.d.ts.map +1 -1
- package/dist/vite-plugin.js +254 -66
- package/dist/xrpc.d.ts +75 -11
- package/dist/xrpc.d.ts.map +1 -1
- package/dist/xrpc.js +189 -39
- package/package.json +14 -7
- package/public/admin.html +133 -54
- package/dist/db.d.ts.map +0 -1
- package/dist/fts.d.ts.map +0 -1
- package/dist/oauth/hooks.d.ts +0 -10
- package/dist/oauth/hooks.d.ts.map +0 -1
- package/dist/oauth/hooks.js +0 -40
- package/dist/schema.d.ts.map +0 -1
- package/dist/test-browser.d.ts +0 -14
- package/dist/test-browser.d.ts.map +0 -1
- package/dist/test-browser.js +0 -26
package/dist/config.d.ts
CHANGED
|
@@ -20,6 +20,7 @@ export interface OAuthConfig {
|
|
|
20
20
|
issuer: string;
|
|
21
21
|
scopes: string[];
|
|
22
22
|
clients: OAuthClientConfig[];
|
|
23
|
+
cookieName?: string;
|
|
23
24
|
}
|
|
24
25
|
export interface BackfillConfig {
|
|
25
26
|
signalCollections?: string[];
|
|
@@ -29,19 +30,48 @@ export interface BackfillConfig {
|
|
|
29
30
|
fetchTimeout: number;
|
|
30
31
|
maxRetries: number;
|
|
31
32
|
}
|
|
33
|
+
export interface ApnsPushConfig {
|
|
34
|
+
keyFile: string;
|
|
35
|
+
keyId: string;
|
|
36
|
+
teamId: string;
|
|
37
|
+
bundleId: string;
|
|
38
|
+
production?: boolean;
|
|
39
|
+
}
|
|
40
|
+
export interface PushConfig {
|
|
41
|
+
apns: ApnsPushConfig;
|
|
42
|
+
}
|
|
43
|
+
export interface CdnConfig {
|
|
44
|
+
url: string;
|
|
45
|
+
key: string;
|
|
46
|
+
salt: string;
|
|
47
|
+
}
|
|
32
48
|
export interface HatkConfig {
|
|
33
49
|
relay: string;
|
|
34
50
|
plc: string;
|
|
35
51
|
port: number;
|
|
52
|
+
cdn: CdnConfig | null;
|
|
53
|
+
databaseEngine: 'duckdb' | 'sqlite';
|
|
36
54
|
database: string;
|
|
37
55
|
publicDir: string | null;
|
|
38
56
|
collections: string[];
|
|
39
57
|
backfill: BackfillConfig;
|
|
40
58
|
ftsRebuildInterval: number;
|
|
41
59
|
oauth: OAuthConfig | null;
|
|
60
|
+
push: PushConfig | null;
|
|
42
61
|
admins: string[];
|
|
43
62
|
}
|
|
63
|
+
/** Input type for defineConfig — fields that have defaults are optional. */
|
|
64
|
+
export type HatkConfigInput = Partial<Omit<HatkConfig, 'oauth' | 'backfill' | 'push' | 'cdn'>> & {
|
|
65
|
+
cdn?: CdnConfig | null;
|
|
66
|
+
oauth?: (Partial<OAuthConfig> & {
|
|
67
|
+
clients: OAuthClientConfig[];
|
|
68
|
+
}) | null;
|
|
69
|
+
backfill?: Partial<BackfillConfig>;
|
|
70
|
+
push?: PushConfig | null;
|
|
71
|
+
};
|
|
72
|
+
/** Identity function that provides type inference for hatk config files. */
|
|
73
|
+
export declare function defineConfig(config: HatkConfigInput): HatkConfigInput;
|
|
44
74
|
/** Derive HTTP URL from relay WebSocket URL (ws://host → http://host) */
|
|
45
75
|
export declare function relayHttpUrl(relay: string): string;
|
|
46
|
-
export declare function loadConfig(configPath: string): HatkConfig
|
|
76
|
+
export declare function loadConfig(configPath: string): Promise<HatkConfig>;
|
|
47
77
|
//# sourceMappingURL=config.d.ts.map
|
package/dist/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,WAAW,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,OAAO,GAAG,QAAQ,GAAG,MAAM,CAAA;IACrC,KAAK,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,CAAA;IACnC,cAAc,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAA;IAC1C,OAAO,CAAC,EAAE,WAAW,EAAE,CAAA;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,aAAa,EAAE,MAAM,EAAE,CAAA;IACvB,KAAK,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,OAAO,EAAE,iBAAiB,EAAE,CAAA;IAC5B,UAAU,CAAC,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,cAAc;IAC7B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;IAC5B,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;IAChB,WAAW,EAAE,OAAO,CAAA;IACpB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,cAAc;IAC7B,OAAO,EAAE,MAAM,CAAA;IACf,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;IACd,QAAQ,EAAE,MAAM,CAAA;IAChB,UAAU,CAAC,EAAE,OAAO,CAAA;CACrB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,cAAc,CAAA;CACrB;AAED,MAAM,WAAW,SAAS;IACxB,GAAG,EAAE,MAAM,CAAA;IACX,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;CACb;AAED,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;IACZ,GAAG,EAAE,SAAS,GAAG,IAAI,CAAA;IACrB,cAAc,EAAE,QAAQ,GAAG,QAAQ,CAAA;IACnC,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,QAAQ,EAAE,cAAc,CAAA;IACxB,kBAAkB,EAAE,MAAM,CAAA;IAC1B,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,IAAI,EAAE,UAAU,GAAG,IAAI,CAAA;IACvB,MAAM,EAAE,MAAM,EAAE,CAAA;CACjB;AAED,4EAA4E;AAC5E,MAAM,MAAM,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,GAAG,MAAM,GAAG,KAAK,CAAC,CAAC,GAAG;IAC/F,GAAG,CAAC,EAAE,SAAS,GAAG,IAAI,CAAA;IACtB,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG;QAAE,OAAO,EAAE,iBAAiB,EAAE,CAAA;KAAE,CAAC,GAAG,IAAI,CAAA;IACxE,QAAQ,CAAC,EAAE,OAAO,CAAC,cAAc,CAAC,CAAA;IAClC,IAAI,CAAC,EAAE,UAAU,GAAG,IAAI,CAAA;CACzB,CAAA;AAED,4EAA4E;AAC5E,wBAAgB,YAAY,CAAC,MAAM,EAAE,eAAe,GAAG,eAAe,CAErE;AAED,yEAAyE;AACzE,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED,wBAAsB,UAAU,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CA4DxE"}
|
package/dist/config.js
CHANGED
|
@@ -1,14 +1,40 @@
|
|
|
1
|
-
|
|
1
|
+
var __rewriteRelativeImportExtension = (this && this.__rewriteRelativeImportExtension) || function (path, preserveJsx) {
|
|
2
|
+
if (typeof path === "string" && /^\.\.?\//.test(path)) {
|
|
3
|
+
return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) {
|
|
4
|
+
return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js");
|
|
5
|
+
});
|
|
6
|
+
}
|
|
7
|
+
return path;
|
|
8
|
+
};
|
|
2
9
|
import { resolve, dirname } from 'node:path';
|
|
3
|
-
import
|
|
10
|
+
import { existsSync } from 'node:fs';
|
|
11
|
+
/** Identity function that provides type inference for hatk config files. */
|
|
12
|
+
export function defineConfig(config) {
|
|
13
|
+
return config;
|
|
14
|
+
}
|
|
4
15
|
/** Derive HTTP URL from relay WebSocket URL (ws://host → http://host) */
|
|
5
16
|
export function relayHttpUrl(relay) {
|
|
6
17
|
return relay.replace(/^ws(s?):\/\//, 'http$1://');
|
|
7
18
|
}
|
|
8
|
-
export function loadConfig(configPath) {
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
|
|
19
|
+
export async function loadConfig(configPath) {
|
|
20
|
+
const resolved = resolve(configPath);
|
|
21
|
+
if (!existsSync(resolved)) {
|
|
22
|
+
console.error(`Config file not found: ${resolved}`);
|
|
23
|
+
console.error(`hatk now uses hatk.config.ts instead of config.yaml.`);
|
|
24
|
+
console.error(`Create a hatk.config.ts file or run 'hatk new' to scaffold a project.`);
|
|
25
|
+
process.exit(1);
|
|
26
|
+
}
|
|
27
|
+
const configDir = dirname(resolved);
|
|
28
|
+
let mod;
|
|
29
|
+
try {
|
|
30
|
+
mod = await import(__rewriteRelativeImportExtension(/* @vite-ignore */ resolved));
|
|
31
|
+
}
|
|
32
|
+
catch (err) {
|
|
33
|
+
console.error(`Failed to load config file: ${resolved}`);
|
|
34
|
+
console.error(err.message || err);
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
const parsed = mod.default || {};
|
|
12
38
|
const backfillRaw = parsed.backfill || {};
|
|
13
39
|
const env = process.env;
|
|
14
40
|
const database = env.DATABASE || parsed.database;
|
|
@@ -16,19 +42,24 @@ export function loadConfig(configPath) {
|
|
|
16
42
|
relay: env.RELAY || parsed.relay || 'ws://localhost:2583',
|
|
17
43
|
plc: env.DID_PLC_URL || parsed.plc || 'https://plc.directory',
|
|
18
44
|
port: parseInt(env.PORT || '') || parsed.port || 3000,
|
|
45
|
+
databaseEngine: (env.DATABASE_ENGINE || parsed.databaseEngine || 'sqlite'),
|
|
19
46
|
database: database ? resolve(configDir, database) : ':memory:',
|
|
20
|
-
publicDir: parsed.
|
|
47
|
+
publicDir: parsed.publicDir === null ? null : resolve(configDir, parsed.publicDir || './public'),
|
|
21
48
|
collections: parsed.collections || [],
|
|
22
49
|
backfill: {
|
|
23
50
|
signalCollections: backfillRaw.signalCollections || undefined,
|
|
24
51
|
repos: env.BACKFILL_REPOS ? env.BACKFILL_REPOS.split(',').map((s) => s.trim()) : backfillRaw.repos || undefined,
|
|
25
52
|
fullNetwork: env.BACKFILL_FULL_NETWORK ? env.BACKFILL_FULL_NETWORK === 'true' : backfillRaw.fullNetwork || false,
|
|
26
|
-
parallelism: parseInt(env.BACKFILL_PARALLELISM || '') || backfillRaw.parallelism ||
|
|
53
|
+
parallelism: parseInt(env.BACKFILL_PARALLELISM || '') || backfillRaw.parallelism || 3,
|
|
27
54
|
fetchTimeout: parseInt(env.BACKFILL_FETCH_TIMEOUT || '') || backfillRaw.fetchTimeout || 300,
|
|
28
55
|
maxRetries: parseInt(env.BACKFILL_MAX_RETRIES || '') || backfillRaw.maxRetries || 5,
|
|
29
56
|
},
|
|
30
|
-
ftsRebuildInterval: parseInt(env.FTS_REBUILD_INTERVAL || '') || parsed.ftsRebuildInterval ||
|
|
57
|
+
ftsRebuildInterval: parseInt(env.FTS_REBUILD_INTERVAL || '') || parsed.ftsRebuildInterval || 5000,
|
|
58
|
+
cdn: env.CDN_URL && env.CDN_KEY && env.CDN_SALT
|
|
59
|
+
? { url: env.CDN_URL, key: env.CDN_KEY, salt: env.CDN_SALT }
|
|
60
|
+
: parsed.cdn || null,
|
|
31
61
|
oauth: null,
|
|
62
|
+
push: parsed.push || null,
|
|
32
63
|
admins: env.ADMINS ? env.ADMINS.split(',').map((s) => s.trim()) : parsed.admins || [],
|
|
33
64
|
};
|
|
34
65
|
const oauthRaw = parsed.oauth;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"adapter-factory.d.ts","sourceRoot":"","sources":["../../src/database/adapter-factory.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AAE1D,wBAAsB,aAAa,CAAC,MAAM,EAAE,QAAQ,GAAG,QAAQ,GAAG,OAAO,CAAC;IACxE,OAAO,EAAE,YAAY,CAAA;IACrB,UAAU,EAAE,UAAU,GAAG,IAAI,CAAA;CAC9B,CAAC,CAmBD"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export async function createAdapter(engine) {
|
|
2
|
+
switch (engine) {
|
|
3
|
+
case 'duckdb': {
|
|
4
|
+
const { DuckDBAdapter } = await import("./adapters/duckdb.js");
|
|
5
|
+
const { DuckDBSearchPort } = await import("./adapters/duckdb-search.js");
|
|
6
|
+
const adapter = new DuckDBAdapter();
|
|
7
|
+
const searchPort = new DuckDBSearchPort(adapter);
|
|
8
|
+
return { adapter, searchPort };
|
|
9
|
+
}
|
|
10
|
+
case 'sqlite': {
|
|
11
|
+
const { SQLiteAdapter } = await import("./adapters/sqlite.js");
|
|
12
|
+
const { SQLiteSearchPort } = await import("./adapters/sqlite-search.js");
|
|
13
|
+
const adapter = new SQLiteAdapter();
|
|
14
|
+
const searchPort = new SQLiteSearchPort(adapter);
|
|
15
|
+
return { adapter, searchPort };
|
|
16
|
+
}
|
|
17
|
+
default:
|
|
18
|
+
throw new Error(`Unsupported database engine: ${engine}`);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { SearchPort } from '../ports.ts';
|
|
2
|
+
import type { DatabasePort } from '../ports.ts';
|
|
3
|
+
export declare class DuckDBSearchPort implements SearchPort {
|
|
4
|
+
private port;
|
|
5
|
+
constructor(port: DatabasePort);
|
|
6
|
+
buildIndex(shadowTable: string, sourceQuery: string, searchColumns: string[]): Promise<void>;
|
|
7
|
+
search(shadowTable: string, query: string, searchColumns: string[], limit: number, offset: number): Promise<Array<{
|
|
8
|
+
uri: string;
|
|
9
|
+
score: number;
|
|
10
|
+
}>>;
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=duckdb-search.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"duckdb-search.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/duckdb-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAE/C,qBAAa,gBAAiB,YAAW,UAAU;IACrC,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,YAAY;IAEhC,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAiB5F,MAAM,CACV,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,aAAa,EAAE,MAAM,EAAE,EACvB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,KAAK,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CASlD"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export class DuckDBSearchPort {
|
|
2
|
+
port;
|
|
3
|
+
constructor(port) {
|
|
4
|
+
this.port = port;
|
|
5
|
+
}
|
|
6
|
+
async buildIndex(shadowTable, sourceQuery, searchColumns) {
|
|
7
|
+
// Create shadow table
|
|
8
|
+
await this.port.execute(`CREATE OR REPLACE TABLE ${shadowTable} AS ${sourceQuery}`, []);
|
|
9
|
+
// Drop existing index
|
|
10
|
+
try {
|
|
11
|
+
await this.port.execute(`PRAGMA drop_fts_index('${shadowTable}')`, []);
|
|
12
|
+
}
|
|
13
|
+
catch { }
|
|
14
|
+
// Build FTS index
|
|
15
|
+
const colList = searchColumns.map((c) => `'${c}'`).join(', ');
|
|
16
|
+
await this.port.execute(`PRAGMA create_fts_index('${shadowTable}', 'uri', ${colList}, stemmer='porter', stopwords='english', strip_accents=1, lower=1, overwrite=1)`, []);
|
|
17
|
+
}
|
|
18
|
+
async search(shadowTable, query, searchColumns, limit, offset) {
|
|
19
|
+
const ftsSchema = `fts_main_${shadowTable}`;
|
|
20
|
+
const sql = `SELECT uri, ${ftsSchema}.match_bm25(uri, $1) AS score
|
|
21
|
+
FROM ${shadowTable}
|
|
22
|
+
WHERE score IS NOT NULL
|
|
23
|
+
ORDER BY score DESC
|
|
24
|
+
LIMIT $2 OFFSET $3`;
|
|
25
|
+
return this.port.query(sql, [query, limit, offset]);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { DatabasePort, BulkInserter, Dialect } from '../ports.ts';
|
|
2
|
+
export declare class DuckDBAdapter implements DatabasePort {
|
|
3
|
+
dialect: Dialect;
|
|
4
|
+
private instance;
|
|
5
|
+
private writeCon;
|
|
6
|
+
private readCon;
|
|
7
|
+
private writeQueue;
|
|
8
|
+
private readQueue;
|
|
9
|
+
open(path: string): Promise<void>;
|
|
10
|
+
close(): void;
|
|
11
|
+
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
|
|
12
|
+
execute(sql: string, params?: unknown[]): Promise<void>;
|
|
13
|
+
executeMultiple(sql: string): Promise<void>;
|
|
14
|
+
beginTransaction(): Promise<void>;
|
|
15
|
+
commit(): Promise<void>;
|
|
16
|
+
rollback(): Promise<void>;
|
|
17
|
+
createBulkInserter(table: string, _columns: string[], _options?: {
|
|
18
|
+
onConflict?: 'ignore' | 'replace';
|
|
19
|
+
batchSize?: number;
|
|
20
|
+
}): Promise<BulkInserter>;
|
|
21
|
+
/** Enqueue a read or write operation for serialization */
|
|
22
|
+
private enqueue;
|
|
23
|
+
private bindParams;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=duckdb.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"duckdb.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/duckdb.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AAEtE,qBAAa,aAAc,YAAW,YAAY;IAChD,OAAO,EAAE,OAAO,CAAW;IAE3B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAiD;IACjE,OAAO,CAAC,OAAO,CAAiD;IAChE,OAAO,CAAC,UAAU,CAAoB;IACtC,OAAO,CAAC,SAAS,CAAoB;IAE/B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAMvC,KAAK,IAAI,IAAI;IAYP,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC;IAarF,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAY3D,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQ3C,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAMjC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IAMvB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAMzB,kBAAkB,CACtB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,EAAE,EAClB,QAAQ,CAAC,EAAE;QAAE,UAAU,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GACnE,OAAO,CAAC,YAAY,CAAC;IAqCxB,0DAA0D;IAC1D,OAAO,CAAC,OAAO;IAkBf,OAAO,CAAC,UAAU;CAyBnB"}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { DuckDBInstance } from '@duckdb/node-api';
|
|
2
|
+
export class DuckDBAdapter {
|
|
3
|
+
dialect = 'duckdb';
|
|
4
|
+
instance;
|
|
5
|
+
writeCon;
|
|
6
|
+
readCon;
|
|
7
|
+
writeQueue = Promise.resolve();
|
|
8
|
+
readQueue = Promise.resolve();
|
|
9
|
+
async open(path) {
|
|
10
|
+
this.instance = await DuckDBInstance.create(path === ':memory:' ? undefined : path);
|
|
11
|
+
this.writeCon = await this.instance.connect();
|
|
12
|
+
this.readCon = await this.instance.connect();
|
|
13
|
+
}
|
|
14
|
+
close() {
|
|
15
|
+
try {
|
|
16
|
+
this.readCon?.closeSync();
|
|
17
|
+
}
|
|
18
|
+
catch { }
|
|
19
|
+
try {
|
|
20
|
+
this.writeCon?.closeSync();
|
|
21
|
+
}
|
|
22
|
+
catch { }
|
|
23
|
+
try {
|
|
24
|
+
this.instance?.closeSync();
|
|
25
|
+
}
|
|
26
|
+
catch { }
|
|
27
|
+
}
|
|
28
|
+
async query(sql, params = []) {
|
|
29
|
+
return this.enqueue('read', async () => {
|
|
30
|
+
if (params.length === 0) {
|
|
31
|
+
const reader = await this.readCon.runAndReadAll(sql);
|
|
32
|
+
return reader.getRowObjects();
|
|
33
|
+
}
|
|
34
|
+
const prepared = await this.readCon.prepare(sql);
|
|
35
|
+
this.bindParams(prepared, params);
|
|
36
|
+
const reader = await prepared.runAndReadAll();
|
|
37
|
+
return reader.getRowObjects();
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
async execute(sql, params = []) {
|
|
41
|
+
return this.enqueue('write', async () => {
|
|
42
|
+
if (params.length === 0) {
|
|
43
|
+
await this.writeCon.run(sql);
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
const prepared = await this.writeCon.prepare(sql);
|
|
47
|
+
this.bindParams(prepared, params);
|
|
48
|
+
await prepared.run();
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
async executeMultiple(sql) {
|
|
52
|
+
return this.enqueue('write', async () => {
|
|
53
|
+
for (const statement of sql.split(';').filter((s) => s.trim())) {
|
|
54
|
+
await this.writeCon.run(statement);
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
async beginTransaction() {
|
|
59
|
+
return this.enqueue('write', async () => {
|
|
60
|
+
await this.writeCon.run('BEGIN TRANSACTION');
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
async commit() {
|
|
64
|
+
return this.enqueue('write', async () => {
|
|
65
|
+
await this.writeCon.run('COMMIT');
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
async rollback() {
|
|
69
|
+
return this.enqueue('write', async () => {
|
|
70
|
+
await this.writeCon.run('ROLLBACK');
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
async createBulkInserter(table, _columns, _options) {
|
|
74
|
+
const appender = await this.writeCon.createAppender(table.replace(/"/g, ''));
|
|
75
|
+
return {
|
|
76
|
+
append(values) {
|
|
77
|
+
for (const value of values) {
|
|
78
|
+
if (value === null || value === undefined) {
|
|
79
|
+
appender.appendNull();
|
|
80
|
+
}
|
|
81
|
+
else if (typeof value === 'string') {
|
|
82
|
+
appender.appendVarchar(value);
|
|
83
|
+
}
|
|
84
|
+
else if (typeof value === 'number') {
|
|
85
|
+
if (Number.isInteger(value)) {
|
|
86
|
+
appender.appendInteger(value);
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
appender.appendDouble(value);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
else if (typeof value === 'boolean') {
|
|
93
|
+
appender.appendBoolean(value);
|
|
94
|
+
}
|
|
95
|
+
else if (typeof value === 'bigint') {
|
|
96
|
+
appender.appendBigInt(value);
|
|
97
|
+
}
|
|
98
|
+
else if (value instanceof Uint8Array) {
|
|
99
|
+
appender.appendBlob(value);
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
appender.appendVarchar(String(value));
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
appender.endRow();
|
|
106
|
+
},
|
|
107
|
+
async flush() {
|
|
108
|
+
appender.flushSync();
|
|
109
|
+
},
|
|
110
|
+
async close() {
|
|
111
|
+
appender.flushSync();
|
|
112
|
+
appender.closeSync();
|
|
113
|
+
},
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
/** Enqueue a read or write operation for serialization */
|
|
117
|
+
enqueue(queue, fn) {
|
|
118
|
+
if (queue === 'write') {
|
|
119
|
+
const p = this.writeQueue.then(fn);
|
|
120
|
+
this.writeQueue = p.then(() => { }, () => { });
|
|
121
|
+
return p;
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
const p = this.readQueue.then(fn);
|
|
125
|
+
this.readQueue = p.then(() => { }, () => { });
|
|
126
|
+
return p;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
bindParams(prepared, params) {
|
|
130
|
+
for (let i = 0; i < params.length; i++) {
|
|
131
|
+
const idx = i + 1;
|
|
132
|
+
const value = params[i];
|
|
133
|
+
if (value === null || value === undefined) {
|
|
134
|
+
prepared.bindNull(idx);
|
|
135
|
+
}
|
|
136
|
+
else if (typeof value === 'string') {
|
|
137
|
+
prepared.bindVarchar(idx, value);
|
|
138
|
+
}
|
|
139
|
+
else if (typeof value === 'number') {
|
|
140
|
+
if (Number.isInteger(value)) {
|
|
141
|
+
prepared.bindInteger(idx, value);
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
prepared.bindDouble(idx, value);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
else if (typeof value === 'boolean') {
|
|
148
|
+
prepared.bindBoolean(idx, value);
|
|
149
|
+
}
|
|
150
|
+
else if (typeof value === 'bigint') {
|
|
151
|
+
prepared.bindBigInt(idx, value);
|
|
152
|
+
}
|
|
153
|
+
else if (value instanceof Uint8Array) {
|
|
154
|
+
prepared.bindBlob(idx, value);
|
|
155
|
+
}
|
|
156
|
+
else {
|
|
157
|
+
prepared.bindVarchar(idx, String(value));
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { SearchPort } from '../ports.ts';
|
|
2
|
+
import type { DatabasePort } from '../ports.ts';
|
|
3
|
+
/**
|
|
4
|
+
* SQLite FTS5-based search port with incremental updates.
|
|
5
|
+
*
|
|
6
|
+
* Uses external content FTS5 tables (content=shadowTable) so the FTS index
|
|
7
|
+
* references the shadow data table. Updates happen incrementally per-record
|
|
8
|
+
* instead of dropping and rebuilding the entire index.
|
|
9
|
+
*/
|
|
10
|
+
export declare class SQLiteSearchPort implements SearchPort {
|
|
11
|
+
private port;
|
|
12
|
+
constructor(port: DatabasePort);
|
|
13
|
+
indexExists(shadowTable: string): Promise<boolean>;
|
|
14
|
+
buildIndex(shadowTable: string, sourceQuery: string, searchColumns: string[]): Promise<void>;
|
|
15
|
+
updateIndex(shadowTable: string, uri: string, row: Record<string, string | null>, searchColumns: string[]): Promise<void>;
|
|
16
|
+
deleteFromIndex(shadowTable: string, uri: string, searchColumns: string[]): Promise<void>;
|
|
17
|
+
private _deleteFromFts;
|
|
18
|
+
search(shadowTable: string, query: string, _searchColumns: string[], limit: number, offset: number): Promise<Array<{
|
|
19
|
+
uri: string;
|
|
20
|
+
score: number;
|
|
21
|
+
}>>;
|
|
22
|
+
}
|
|
23
|
+
//# sourceMappingURL=sqlite-search.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite-search.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/sqlite-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAE/C;;;;;;GAMG;AACH,qBAAa,gBAAiB,YAAW,UAAU;IACrC,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,YAAY;IAEhC,WAAW,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAQlD,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAuB5F,WAAW,CACf,WAAW,EAAE,MAAM,EACnB,GAAG,EAAE,MAAM,EACX,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAClC,aAAa,EAAE,MAAM,EAAE,GACtB,OAAO,CAAC,IAAI,CAAC;IA2BV,eAAe,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;YAKjF,cAAc;IAatB,MAAM,CACV,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,cAAc,EAAE,MAAM,EAAE,EACxB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,KAAK,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CAWlD"}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SQLite FTS5-based search port with incremental updates.
|
|
3
|
+
*
|
|
4
|
+
* Uses external content FTS5 tables (content=shadowTable) so the FTS index
|
|
5
|
+
* references the shadow data table. Updates happen incrementally per-record
|
|
6
|
+
* instead of dropping and rebuilding the entire index.
|
|
7
|
+
*/
|
|
8
|
+
export class SQLiteSearchPort {
|
|
9
|
+
port;
|
|
10
|
+
constructor(port) {
|
|
11
|
+
this.port = port;
|
|
12
|
+
}
|
|
13
|
+
async indexExists(shadowTable) {
|
|
14
|
+
const rows = await this.port.query(`SELECT 1 FROM sqlite_master WHERE type='table' AND name IN ($1, $2)`, [
|
|
15
|
+
shadowTable,
|
|
16
|
+
`${shadowTable}_fts`,
|
|
17
|
+
]);
|
|
18
|
+
return rows.length >= 2;
|
|
19
|
+
}
|
|
20
|
+
async buildIndex(shadowTable, sourceQuery, searchColumns) {
|
|
21
|
+
await this.port.execute(`DROP TABLE IF EXISTS ${shadowTable}_fts`, []);
|
|
22
|
+
await this.port.execute(`DROP TABLE IF EXISTS ${shadowTable}`, []);
|
|
23
|
+
// Create shadow data table from source query
|
|
24
|
+
await this.port.execute(`CREATE TABLE ${shadowTable} AS ${sourceQuery}`, []);
|
|
25
|
+
await this.port.execute(`CREATE UNIQUE INDEX IF NOT EXISTS ${shadowTable}_uri ON ${shadowTable}(uri)`, []);
|
|
26
|
+
// Create FTS5 virtual table with external content pointing to shadow table
|
|
27
|
+
const colList = searchColumns.join(', ');
|
|
28
|
+
await this.port.execute(`CREATE VIRTUAL TABLE ${shadowTable}_fts USING fts5(uri UNINDEXED, ${colList}, content=${shadowTable}, content_rowid=rowid, tokenize='porter unicode61 remove_diacritics 2')`, []);
|
|
29
|
+
// Populate FTS from shadow table
|
|
30
|
+
const selectCols = ['uri', ...searchColumns].map((c) => `COALESCE(CAST(${c} AS TEXT), '')`);
|
|
31
|
+
await this.port.execute(`INSERT INTO ${shadowTable}_fts (uri, ${colList}) SELECT ${selectCols.join(', ')} FROM ${shadowTable}`, []);
|
|
32
|
+
}
|
|
33
|
+
async updateIndex(shadowTable, uri, row, searchColumns) {
|
|
34
|
+
const colList = searchColumns.join(', ');
|
|
35
|
+
// Remove old FTS entry if record already indexed
|
|
36
|
+
await this._deleteFromFts(shadowTable, uri, searchColumns);
|
|
37
|
+
// Upsert shadow table
|
|
38
|
+
const placeholders = searchColumns.map((_, i) => `$${i + 2}`);
|
|
39
|
+
const setClauses = searchColumns.map((c, i) => `${c} = $${i + 2}`);
|
|
40
|
+
const values = [uri, ...searchColumns.map((c) => row[c] ?? null)];
|
|
41
|
+
await this.port.execute(`INSERT INTO ${shadowTable} (uri, ${colList}) VALUES ($1, ${placeholders.join(', ')}) ON CONFLICT(uri) DO UPDATE SET ${setClauses.join(', ')}`, values);
|
|
42
|
+
// Read back rowid and insert new FTS entry
|
|
43
|
+
const rows = await this.port.query(`SELECT rowid FROM ${shadowTable} WHERE uri = $1`, [uri]);
|
|
44
|
+
if (rows.length > 0) {
|
|
45
|
+
const rowid = rows[0].rowid;
|
|
46
|
+
const ftsPlaceholders = searchColumns.map((_, i) => `$${i + 3}`);
|
|
47
|
+
await this.port.execute(`INSERT INTO ${shadowTable}_fts(rowid, uri, ${colList}) VALUES($1, $2, ${ftsPlaceholders.join(', ')})`, [rowid, uri, ...searchColumns.map((c) => row[c] ?? '')]);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
async deleteFromIndex(shadowTable, uri, searchColumns) {
|
|
51
|
+
await this._deleteFromFts(shadowTable, uri, searchColumns);
|
|
52
|
+
await this.port.execute(`DELETE FROM ${shadowTable} WHERE uri = $1`, [uri]);
|
|
53
|
+
}
|
|
54
|
+
async _deleteFromFts(shadowTable, uri, searchColumns) {
|
|
55
|
+
const colList = searchColumns.join(', ');
|
|
56
|
+
const rows = await this.port.query(`SELECT rowid, uri, ${colList} FROM ${shadowTable} WHERE uri = $1`, [uri]);
|
|
57
|
+
if (rows.length === 0)
|
|
58
|
+
return;
|
|
59
|
+
const old = rows[0];
|
|
60
|
+
const placeholders = searchColumns.map((_, i) => `$${i + 3}`);
|
|
61
|
+
await this.port.execute(`INSERT INTO ${shadowTable}_fts(${shadowTable}_fts, rowid, uri, ${colList}) VALUES('delete', $1, $2, ${placeholders.join(', ')})`, [old.rowid, uri, ...searchColumns.map((c) => old[c] ?? '')]);
|
|
62
|
+
}
|
|
63
|
+
async search(shadowTable, query, _searchColumns, limit, offset) {
|
|
64
|
+
const escaped = query.replace(/['"*(){}[\]^~\\:.]/g, ' ').trim();
|
|
65
|
+
if (!escaped)
|
|
66
|
+
return [];
|
|
67
|
+
const sql = `SELECT uri, -bm25(${shadowTable}_fts) AS score
|
|
68
|
+
FROM ${shadowTable}_fts
|
|
69
|
+
WHERE ${shadowTable}_fts MATCH $1
|
|
70
|
+
ORDER BY score DESC
|
|
71
|
+
LIMIT $2 OFFSET $3`;
|
|
72
|
+
return this.port.query(sql, [escaped, limit, offset]);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { DatabasePort, BulkInserter, Dialect } from '../ports.ts';
|
|
2
|
+
export declare class SQLiteAdapter implements DatabasePort {
|
|
3
|
+
dialect: Dialect;
|
|
4
|
+
private db;
|
|
5
|
+
open(path: string): Promise<void>;
|
|
6
|
+
close(): void;
|
|
7
|
+
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
|
|
8
|
+
execute(sql: string, params?: unknown[]): Promise<void>;
|
|
9
|
+
executeMultiple(sql: string): Promise<void>;
|
|
10
|
+
beginTransaction(): Promise<void>;
|
|
11
|
+
commit(): Promise<void>;
|
|
12
|
+
rollback(): Promise<void>;
|
|
13
|
+
createBulkInserter(table: string, columns: string[], options?: {
|
|
14
|
+
onConflict?: 'ignore' | 'replace';
|
|
15
|
+
batchSize?: number;
|
|
16
|
+
}): Promise<BulkInserter>;
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=sqlite.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/sqlite.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AAqBtE,qBAAa,aAAc,YAAW,YAAY;IAChD,OAAO,EAAE,OAAO,CAAW;IAE3B,OAAO,CAAC,EAAE,CAAoB;IAExB,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAOvC,KAAK,IAAI,IAAI;IAMP,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC;IAMrF,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAM3D,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAI3C,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAIjC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IAIvB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAIzB,kBAAkB,CACtB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,CAAC,EAAE;QAAE,UAAU,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAClE,OAAO,CAAC,YAAY,CAAC;CAmCzB"}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import Database from 'better-sqlite3';
|
|
2
|
+
/**
|
|
3
|
+
* Translate DuckDB-style `$1, $2` placeholders to SQLite `?` placeholders.
|
|
4
|
+
* Handles repeated references to the same `$N` by duplicating the param value.
|
|
5
|
+
* Returns the translated SQL and expanded params array.
|
|
6
|
+
*/
|
|
7
|
+
function translateParams(sql, params) {
|
|
8
|
+
if (params.length === 0)
|
|
9
|
+
return { sql, params };
|
|
10
|
+
const expandedParams = [];
|
|
11
|
+
const translated = sql.replace(/\$(\d+)/g, (_match, numStr) => {
|
|
12
|
+
const idx = parseInt(numStr) - 1; // $1 → index 0
|
|
13
|
+
const val = params[idx];
|
|
14
|
+
expandedParams.push(typeof val === 'boolean' ? (val ? 1 : 0) : val);
|
|
15
|
+
return '?';
|
|
16
|
+
});
|
|
17
|
+
return { sql: translated, params: expandedParams };
|
|
18
|
+
}
|
|
19
|
+
export class SQLiteAdapter {
|
|
20
|
+
dialect = 'sqlite';
|
|
21
|
+
db;
|
|
22
|
+
async open(path) {
|
|
23
|
+
this.db = new Database(path === ':memory:' ? ':memory:' : path);
|
|
24
|
+
this.db.pragma('journal_mode = WAL');
|
|
25
|
+
this.db.pragma('synchronous = NORMAL');
|
|
26
|
+
this.db.pragma('foreign_keys = ON');
|
|
27
|
+
}
|
|
28
|
+
close() {
|
|
29
|
+
try {
|
|
30
|
+
this.db?.close();
|
|
31
|
+
}
|
|
32
|
+
catch { }
|
|
33
|
+
}
|
|
34
|
+
async query(sql, params = []) {
|
|
35
|
+
const t = translateParams(sql, params);
|
|
36
|
+
const stmt = this.db.prepare(t.sql);
|
|
37
|
+
return stmt.all(...t.params);
|
|
38
|
+
}
|
|
39
|
+
async execute(sql, params = []) {
|
|
40
|
+
const t = translateParams(sql, params);
|
|
41
|
+
const stmt = this.db.prepare(t.sql);
|
|
42
|
+
stmt.run(...t.params);
|
|
43
|
+
}
|
|
44
|
+
async executeMultiple(sql) {
|
|
45
|
+
this.db.exec(sql);
|
|
46
|
+
}
|
|
47
|
+
async beginTransaction() {
|
|
48
|
+
this.db.exec('BEGIN');
|
|
49
|
+
}
|
|
50
|
+
async commit() {
|
|
51
|
+
this.db.exec('COMMIT');
|
|
52
|
+
}
|
|
53
|
+
async rollback() {
|
|
54
|
+
this.db.exec('ROLLBACK');
|
|
55
|
+
}
|
|
56
|
+
async createBulkInserter(table, columns, options) {
|
|
57
|
+
const placeholders = columns.map(() => '?').join(', ');
|
|
58
|
+
const conflict = options?.onConflict === 'ignore' ? ' OR IGNORE' : options?.onConflict === 'replace' ? ' OR REPLACE' : '';
|
|
59
|
+
const sql = `INSERT${conflict} INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
60
|
+
const stmt = this.db.prepare(sql);
|
|
61
|
+
const buffer = [];
|
|
62
|
+
const batchSize = options?.batchSize ?? 5000;
|
|
63
|
+
const flushBuffer = this.db.transaction(() => {
|
|
64
|
+
for (const row of buffer) {
|
|
65
|
+
stmt.run(...row);
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
const flush = () => {
|
|
69
|
+
if (buffer.length > 0) {
|
|
70
|
+
flushBuffer();
|
|
71
|
+
buffer.length = 0;
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
return {
|
|
75
|
+
append(values) {
|
|
76
|
+
buffer.push(values);
|
|
77
|
+
if (buffer.length >= batchSize)
|
|
78
|
+
flush();
|
|
79
|
+
},
|
|
80
|
+
async flush() {
|
|
81
|
+
flush();
|
|
82
|
+
},
|
|
83
|
+
async close() {
|
|
84
|
+
flush();
|
|
85
|
+
},
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
}
|