@hatk/hatk 0.0.1-alpha.21 → 0.0.1-alpha.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/dist/backfill.d.ts.map +1 -1
  2. package/dist/backfill.js +4 -4
  3. package/dist/car.js +1 -1
  4. package/dist/cli.js +111 -54
  5. package/dist/config.d.ts +1 -0
  6. package/dist/config.d.ts.map +1 -1
  7. package/dist/config.js +1 -0
  8. package/dist/database/adapter-factory.d.ts +6 -0
  9. package/dist/database/adapter-factory.d.ts.map +1 -0
  10. package/dist/database/adapter-factory.js +20 -0
  11. package/dist/database/adapters/duckdb-search.d.ts +12 -0
  12. package/dist/database/adapters/duckdb-search.d.ts.map +1 -0
  13. package/dist/database/adapters/duckdb-search.js +27 -0
  14. package/dist/database/adapters/duckdb.d.ts +25 -0
  15. package/dist/database/adapters/duckdb.d.ts.map +1 -0
  16. package/dist/database/adapters/duckdb.js +161 -0
  17. package/dist/database/adapters/sqlite-search.d.ts +18 -0
  18. package/dist/database/adapters/sqlite-search.d.ts.map +1 -0
  19. package/dist/database/adapters/sqlite-search.js +38 -0
  20. package/dist/database/adapters/sqlite.d.ts +18 -0
  21. package/dist/database/adapters/sqlite.d.ts.map +1 -0
  22. package/dist/database/adapters/sqlite.js +87 -0
  23. package/dist/database/db.d.ts +149 -0
  24. package/dist/database/db.d.ts.map +1 -0
  25. package/dist/database/db.js +1456 -0
  26. package/dist/database/dialect.d.ts +45 -0
  27. package/dist/database/dialect.d.ts.map +1 -0
  28. package/dist/database/dialect.js +72 -0
  29. package/dist/database/fts.d.ts +24 -0
  30. package/dist/database/fts.d.ts.map +1 -0
  31. package/dist/database/fts.js +777 -0
  32. package/dist/database/index.d.ts +7 -0
  33. package/dist/database/index.d.ts.map +1 -0
  34. package/dist/database/index.js +6 -0
  35. package/dist/database/ports.d.ts +44 -0
  36. package/dist/database/ports.d.ts.map +1 -0
  37. package/dist/database/ports.js +1 -0
  38. package/dist/database/schema.d.ts +60 -0
  39. package/dist/database/schema.d.ts.map +1 -0
  40. package/dist/database/schema.js +388 -0
  41. package/dist/feeds.js +1 -1
  42. package/dist/hooks.d.ts +15 -0
  43. package/dist/hooks.d.ts.map +1 -0
  44. package/dist/hooks.js +65 -0
  45. package/dist/hydrate.js +1 -1
  46. package/dist/indexer.d.ts +19 -0
  47. package/dist/indexer.d.ts.map +1 -1
  48. package/dist/indexer.js +34 -4
  49. package/dist/labels.d.ts +20 -0
  50. package/dist/labels.d.ts.map +1 -1
  51. package/dist/labels.js +50 -2
  52. package/dist/logger.d.ts +29 -0
  53. package/dist/logger.d.ts.map +1 -1
  54. package/dist/logger.js +29 -0
  55. package/dist/main.js +37 -32
  56. package/dist/mst.d.ts +15 -0
  57. package/dist/mst.d.ts.map +1 -1
  58. package/dist/mst.js +13 -0
  59. package/dist/oauth/db.d.ts.map +1 -1
  60. package/dist/oauth/db.js +41 -15
  61. package/dist/oauth/server.d.ts.map +1 -1
  62. package/dist/oauth/server.js +11 -6
  63. package/dist/opengraph.js +1 -1
  64. package/dist/schema.d.ts +8 -0
  65. package/dist/schema.d.ts.map +1 -1
  66. package/dist/schema.js +29 -0
  67. package/dist/seed.d.ts +19 -0
  68. package/dist/seed.d.ts.map +1 -1
  69. package/dist/seed.js +43 -4
  70. package/dist/server.d.ts.map +1 -1
  71. package/dist/server.js +12 -6
  72. package/dist/setup.d.ts +21 -1
  73. package/dist/setup.d.ts.map +1 -1
  74. package/dist/setup.js +37 -2
  75. package/dist/test.d.ts +1 -1
  76. package/dist/test.d.ts.map +1 -1
  77. package/dist/test.js +23 -9
  78. package/dist/views.js +1 -1
  79. package/dist/vite-plugin.d.ts.map +1 -1
  80. package/dist/vite-plugin.js +10 -0
  81. package/dist/xrpc.d.ts +23 -0
  82. package/dist/xrpc.d.ts.map +1 -1
  83. package/dist/xrpc.js +37 -2
  84. package/package.json +3 -1
@@ -1 +1 @@
1
- {"version":3,"file":"backfill.d.ts","sourceRoot":"","sources":["../src/backfill.ts"],"names":[],"mappings":"AAiBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAEjD,6CAA6C;AAC7C,UAAU,YAAY;IACpB,wFAAwF;IACxF,MAAM,EAAE,MAAM,CAAA;IACd,8FAA8F;IAC9F,MAAM,EAAE,MAAM,CAAA;IACd,yEAAyE;IACzE,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IACxB,wDAAwD;IACxD,MAAM,EAAE,cAAc,CAAA;CACvB;AAuGD;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAsB,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAsJ/G;AAgCD;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAsB,WAAW,CAAC,IAAI,EAAE,YAAY,GAAG,OAAO,CAAC,MAAM,CAAC,CAkIrE"}
1
+ {"version":3,"file":"backfill.d.ts","sourceRoot":"","sources":["../src/backfill.ts"],"names":[],"mappings":"AAiBA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAEjD,6CAA6C;AAC7C,UAAU,YAAY;IACpB,wFAAwF;IACxF,MAAM,EAAE,MAAM,CAAA;IACd,8FAA8F;IAC9F,MAAM,EAAE,MAAM,CAAA;IACd,yEAAyE;IACzE,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IACxB,wDAAwD;IACxD,MAAM,EAAE,cAAc,CAAA;CACvB;AAoGD;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAsB,YAAY,CAAC,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAsJ/G;AA8BD;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAsB,WAAW,CAAC,IAAI,EAAE,YAAY,GAAG,OAAO,CAAC,MAAM,CAAC,CAkIrE"}
package/dist/backfill.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import { parseCarStream } from "./car.js";
2
2
  import { cborDecode } from "./cbor.js";
3
3
  import { walkMst } from "./mst.js";
4
- import { setRepoStatus, getRepoStatus, getRepoRev, getRepoRetryInfo, listRetryEligibleRepos, listPendingRepos, querySQL, runSQL, getSchema, bulkInsertRecords, } from "./db.js";
4
+ import { setRepoStatus, getRepoStatus, getRepoRev, getRepoRetryInfo, listRetryEligibleRepos, listPendingRepos, querySQL, runSQL, getSchema, bulkInsertRecords, } from "./database/db.js";
5
5
  import { emit, timer } from "./logger.js";
6
6
  /** In-memory cache of DID → PDS resolution results to avoid redundant lookups. */
7
7
  const pdsCache = new Map();
@@ -184,13 +184,13 @@ export async function backfillRepo(did, collections, fetchTimeout) {
184
184
  const schema = getSchema(col);
185
185
  if (!schema)
186
186
  continue;
187
- await runSQL(`DELETE FROM ${schema.tableName} WHERE did = $1`, did);
187
+ await runSQL(`DELETE FROM ${schema.tableName} WHERE did = $1`, [did]);
188
188
  for (const child of schema.children) {
189
- await runSQL(`DELETE FROM ${child.tableName} WHERE parent_did = $1`, did);
189
+ await runSQL(`DELETE FROM ${child.tableName} WHERE parent_did = $1`, [did]);
190
190
  }
191
191
  for (const union of schema.unions) {
192
192
  for (const branch of union.branches) {
193
- await runSQL(`DELETE FROM ${branch.tableName} WHERE parent_did = $1`, did);
193
+ await runSQL(`DELETE FROM ${branch.tableName} WHERE parent_did = $1`, [did]);
194
194
  }
195
195
  }
196
196
  }
package/dist/car.js CHANGED
@@ -107,7 +107,7 @@ export async function parseCarStream(body) {
107
107
  while (len - pos < need) {
108
108
  const { done, value } = await reader.read();
109
109
  if (done)
110
- return (len - pos) >= need;
110
+ return len - pos >= need;
111
111
  byteLength += value.length;
112
112
  // Compact: shift remaining data to front when read cursor passes midpoint
113
113
  if (pos > 0 && pos > buf.length >>> 1) {
package/dist/cli.js CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
2
  import { mkdirSync, writeFileSync, existsSync, unlinkSync, readdirSync, readFileSync } from 'node:fs';
3
- import { resolve, join } from 'node:path';
4
- import { execSync } from 'node:child_process';
5
- import { loadLexicons } from "./schema.js";
3
+ import { resolve, join, dirname } from 'node:path';
4
+ import { execSync, spawn } from 'node:child_process';
5
+ import { loadLexicons, discoverCollections, buildSchemas } from "./database/schema.js";
6
6
  import { loadConfig } from "./config.js";
7
7
  const args = process.argv.slice(2);
8
8
  const command = args[0];
@@ -34,6 +34,31 @@ async function ensurePds() {
34
34
  console.error('[dev] PDS failed to start');
35
35
  process.exit(1);
36
36
  }
37
+ /** Spawn a long-running process and forward SIGINT/SIGTERM for clean shutdown. */
38
+ function spawnForward(cmd, args, env) {
39
+ return new Promise((resolve, reject) => {
40
+ const child = spawn(cmd, args, {
41
+ stdio: 'inherit',
42
+ cwd: process.cwd(),
43
+ env: { ...process.env, ...env },
44
+ });
45
+ const onSignal = (sig) => {
46
+ child.kill(sig);
47
+ };
48
+ process.on('SIGINT', onSignal);
49
+ process.on('SIGTERM', onSignal);
50
+ child.on('close', (code, signal) => {
51
+ process.removeListener('SIGINT', onSignal);
52
+ process.removeListener('SIGTERM', onSignal);
53
+ if (signal === 'SIGINT' || signal === 'SIGTERM')
54
+ process.exit(0);
55
+ if (code === 0 || code === null)
56
+ resolve();
57
+ else
58
+ reject(new Error(`Process exited with code ${code}`));
59
+ });
60
+ });
61
+ }
37
62
  function runSeed() {
38
63
  const seedFile = resolve('seeds/seed.ts');
39
64
  if (!existsSync(seedFile))
@@ -45,7 +70,7 @@ function usage() {
45
70
  Usage: hatk <command> [options]
46
71
 
47
72
  Getting Started
48
- new <name> [--svelte] [--template <t>] Create a new hatk project
73
+ new <name> [--svelte] [--duckdb] [--template <t>] Create a new hatk project
49
74
 
50
75
  Running
51
76
  start Start the hatk server
@@ -303,7 +328,7 @@ const dirs = {
303
328
  if (command === 'new') {
304
329
  const name = args[1];
305
330
  if (!name) {
306
- console.error('Usage: hatk new <name> [--svelte] [--template <template-name>]');
331
+ console.error('Usage: hatk new <name> [--svelte] [--duckdb] [--template <template-name>]');
307
332
  process.exit(1);
308
333
  }
309
334
  const templateIdx = args.indexOf('--template');
@@ -341,6 +366,8 @@ if (command === 'new') {
341
366
  process.exit(0);
342
367
  }
343
368
  const withSvelte = args.includes('--svelte');
369
+ const withDuckdb = args.includes('--duckdb');
370
+ const dbEngine = withDuckdb ? 'duckdb' : 'sqlite';
344
371
  mkdirSync(dir);
345
372
  const subs = [
346
373
  'lexicons',
@@ -370,6 +397,7 @@ export default defineConfig({
370
397
  relay: 'ws://localhost:2583',
371
398
  plc: 'http://localhost:2582',
372
399
  port: 3000,
400
+ databaseEngine: '${dbEngine}',
373
401
  database: 'data/hatk.db',
374
402
  admins: [],
375
403
  backfill: {
@@ -511,6 +539,14 @@ export default defineConfig({
511
539
  properties: {
512
540
  uri: { type: 'string', format: 'at-uri' },
513
541
  cid: { type: 'string', format: 'cid' },
542
+ commit: {
543
+ type: 'object',
544
+ properties: {
545
+ cid: { type: 'string', format: 'cid' },
546
+ rev: { type: 'string' },
547
+ },
548
+ },
549
+ validationStatus: { type: 'string' },
514
550
  },
515
551
  },
516
552
  },
@@ -566,6 +602,14 @@ export default defineConfig({
566
602
  properties: {
567
603
  uri: { type: 'string', format: 'at-uri' },
568
604
  cid: { type: 'string', format: 'cid' },
605
+ commit: {
606
+ type: 'object',
607
+ properties: {
608
+ cid: { type: 'string', format: 'cid' },
609
+ rev: { type: 'string' },
610
+ },
611
+ },
612
+ validationStatus: { type: 'string' },
569
613
  },
570
614
  },
571
615
  },
@@ -855,6 +899,9 @@ EXPOSE 3000
855
899
  CMD ["node", "--experimental-strip-types", "--max-old-space-size=512", "node_modules/@hatk/hatk/dist/main.js", "hatk.config.ts"]
856
900
  `);
857
901
  const pkgDeps = { '@hatk/oauth-client': '*', hatk: '*' };
902
+ if (!withDuckdb) {
903
+ pkgDeps['better-sqlite3'] = '^11';
904
+ }
858
905
  const pkgDevDeps = {
859
906
  '@playwright/test': '^1',
860
907
  oxfmt: '^0.35.0',
@@ -1082,6 +1129,43 @@ a {
1082
1129
  </div>
1083
1130
  `);
1084
1131
  }
1132
+ writeFileSync(join(dir, 'AGENTS.md'), `# hatk project
1133
+
1134
+ This is an AT Protocol application built with [hatk](https://github.com/hatk-dev/hatk).
1135
+ Read the project's lexicons in \`lexicons/\` to understand the data model.
1136
+ Types are generated from lexicons into \`hatk.generated.ts\` — never edit this file directly.
1137
+
1138
+ ## Project structure
1139
+
1140
+ | Directory | Purpose |
1141
+ |-------------|------------------------------------------------------|
1142
+ | \`lexicons/\` | AT Protocol lexicon schemas (JSON). Defines collections and XRPC methods |
1143
+ | \`feeds/\` | Feed generators — each file exports a feed via \`defineFeed\` |
1144
+ | \`xrpc/\` | XRPC method handlers — directory nesting maps to NSID segments |
1145
+ | \`labels/\` | Label definitions and rules for moderation |
1146
+ | \`setup/\` | Boot-time scripts (run before server starts). Prefix with numbers for ordering |
1147
+ | \`seeds/\` | Test data seeding scripts for local development |
1148
+ | \`hooks/\` | Lifecycle hooks (e.g. \`on-login.ts\`) |
1149
+ | \`og/\` | OpenGraph image routes |
1150
+ | \`jobs/\` | Periodic background tasks |
1151
+ | \`test/\` | Test files (vitest). Run with \`hatk test\` |
1152
+ | \`public/\` | Static files served at the root |
1153
+
1154
+ ## Key files
1155
+
1156
+ - \`hatk.config.ts\` — project configuration (see \`defineConfig\` for type info)
1157
+ - \`hatk.generated.ts\` — auto-generated types and typed helpers. Regenerate with \`hatk generate types\`
1158
+
1159
+ ## Commands
1160
+
1161
+ Run \`npx hatk --help\` for the full list of commands.
1162
+
1163
+ Use \`npx hatk generate\` to scaffold new feeds, xrpc handlers, labels, and lexicons
1164
+ rather than creating files manually. These generate files with the correct imports
1165
+ from \`hatk.generated.ts\`.
1166
+
1167
+ After modifying lexicons, always run \`npx hatk generate types\` to update the generated types.
1168
+ `);
1085
1169
  console.log(`Created ${name}/`);
1086
1170
  console.log(` hatk.config.ts`);
1087
1171
  console.log(` lexicons/ — lexicon JSON files (core + your own)`);
@@ -1486,25 +1570,14 @@ else if (command === 'destroy') {
1486
1570
  else if (command === 'dev') {
1487
1571
  await ensurePds();
1488
1572
  runSeed();
1489
- try {
1490
- if (existsSync(resolve('svelte.config.js')) && existsSync(resolve('src/app.html'))) {
1491
- // SvelteKit project — vite dev starts the hatk server via the plugin
1492
- execSync('npx vite dev', { stdio: 'inherit', cwd: process.cwd() });
1493
- }
1494
- else {
1495
- // No frontend — just run the hatk server directly
1496
- const mainPath = resolve(import.meta.dirname, 'main.js');
1497
- execSync(`npx tsx ${mainPath} hatk.config.ts`, {
1498
- stdio: 'inherit',
1499
- cwd: process.cwd(),
1500
- env: { ...process.env, DEV_MODE: '1' },
1501
- });
1502
- }
1573
+ if (existsSync(resolve('svelte.config.js')) && existsSync(resolve('src/app.html'))) {
1574
+ // SvelteKit project vite dev starts the hatk server via the plugin
1575
+ await spawnForward('npx', ['vite', 'dev']);
1503
1576
  }
1504
- catch (e) {
1505
- if (e.signal === 'SIGINT' || e.signal === 'SIGTERM')
1506
- process.exit(0);
1507
- throw e;
1577
+ else {
1578
+ // No frontend just run the hatk server directly
1579
+ const mainPath = resolve(import.meta.dirname, 'main.js');
1580
+ await spawnForward('npx', ['tsx', mainPath, 'hatk.config.ts'], { DEV_MODE: '1' });
1508
1581
  }
1509
1582
  }
1510
1583
  else if (command === 'format' || command === 'fmt') {
@@ -1526,7 +1599,7 @@ else if (command === 'build') {
1526
1599
  else if (command === 'reset') {
1527
1600
  const config = await loadConfig(resolve('hatk.config.ts'));
1528
1601
  if (config.database !== ':memory:') {
1529
- for (const suffix of ['', '.wal']) {
1602
+ for (const suffix of ['', '.wal', '-shm', '-wal']) {
1530
1603
  const file = config.database + suffix;
1531
1604
  if (existsSync(file)) {
1532
1605
  unlinkSync(file);
@@ -1685,39 +1758,23 @@ else if (command === 'resolve') {
1685
1758
  }
1686
1759
  else if (command === 'schema') {
1687
1760
  const config = await loadConfig(resolve('hatk.config.ts'));
1688
- if (config.database === ':memory:') {
1689
- console.error('No database file configured (database is :memory:)');
1690
- process.exit(1);
1691
- }
1692
- if (!existsSync(config.database)) {
1693
- console.error(`Database not found: ${config.database}`);
1694
- console.error('Run "hatk dev" first to create it.');
1695
- process.exit(1);
1696
- }
1697
- const { DuckDBInstance } = await import('@duckdb/node-api');
1698
- const instance = await DuckDBInstance.create(config.database);
1699
- const con = await instance.connect();
1700
- const tables = (await (await con.runAndReadAll(`SELECT table_name FROM information_schema.tables WHERE table_schema = 'main' ORDER BY table_name`)).getRowObjects());
1701
- for (const { table_name } of tables) {
1702
- console.log(`"${table_name}"`);
1703
- const cols = (await (await con.runAndReadAll(`SELECT column_name, data_type, is_nullable FROM information_schema.columns WHERE table_name = '${table_name}' ORDER BY ordinal_position`)).getRowObjects());
1704
- for (const col of cols) {
1705
- const nullable = col.is_nullable === 'YES' ? '' : ' NOT NULL';
1706
- console.log(` ${col.column_name.padEnd(20)} ${col.data_type}${nullable}`);
1707
- }
1708
- console.log();
1761
+ const { initDatabase, getSchemaDump } = await import("./database/db.js");
1762
+ const { createAdapter } = await import("./database/adapter-factory.js");
1763
+ const { getDialect } = await import("./database/dialect.js");
1764
+ const configDir2 = resolve('.');
1765
+ const lexicons2 = loadLexicons(resolve(configDir2, 'lexicons'));
1766
+ const collections2 = config.collections.length > 0 ? config.collections : discoverCollections(lexicons2);
1767
+ const { schemas: schemas2, ddlStatements: ddl2 } = buildSchemas(lexicons2, collections2, getDialect(config.databaseEngine));
1768
+ if (config.database !== ':memory:') {
1769
+ mkdirSync(dirname(config.database), { recursive: true });
1709
1770
  }
1771
+ const { adapter: adapter2 } = await createAdapter(config.databaseEngine);
1772
+ await initDatabase(adapter2, config.database, schemas2, ddl2);
1773
+ console.log(await getSchemaDump());
1710
1774
  }
1711
1775
  else if (command === 'start') {
1712
- try {
1713
- const mainPath = resolve(import.meta.dirname, 'main.js');
1714
- execSync(`npx tsx ${mainPath} hatk.config.ts`, { stdio: 'inherit', cwd: process.cwd() });
1715
- }
1716
- catch (e) {
1717
- if (e.signal === 'SIGINT' || e.signal === 'SIGTERM')
1718
- process.exit(0);
1719
- throw e;
1720
- }
1776
+ const mainPath = resolve(import.meta.dirname, 'main.js');
1777
+ await spawnForward('npx', ['tsx', mainPath, 'hatk.config.ts']);
1721
1778
  }
1722
1779
  else {
1723
1780
  usage();
package/dist/config.d.ts CHANGED
@@ -33,6 +33,7 @@ export interface HatkConfig {
33
33
  relay: string;
34
34
  plc: string;
35
35
  port: number;
36
+ databaseEngine: 'duckdb' | 'sqlite';
36
37
  database: string;
37
38
  publicDir: string | null;
38
39
  collections: string[];
@@ -1 +1 @@
1
- {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,WAAW,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,OAAO,GAAG,QAAQ,GAAG,MAAM,CAAA;IACrC,KAAK,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,CAAA;IACnC,cAAc,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAA;IAC1C,OAAO,CAAC,EAAE,WAAW,EAAE,CAAA;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,aAAa,EAAE,MAAM,EAAE,CAAA;IACvB,KAAK,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,OAAO,EAAE,iBAAiB,EAAE,CAAA;CAC7B;AAED,MAAM,WAAW,cAAc;IAC7B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;IAC5B,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;IAChB,WAAW,EAAE,OAAO,CAAA;IACpB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;IACZ,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,QAAQ,EAAE,cAAc,CAAA;IACxB,kBAAkB,EAAE,MAAM,CAAA;IAC1B,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,MAAM,EAAE,MAAM,EAAE,CAAA;CACjB;AAED,4EAA4E;AAC5E,MAAM,MAAM,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,CAAC,CAAC,GAAG;IAC9E,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG;QAAE,OAAO,EAAE,iBAAiB,EAAE,CAAA;KAAE,CAAC,GAAG,IAAI,CAAA;IACxE,QAAQ,CAAC,EAAE,OAAO,CAAC,cAAc,CAAC,CAAA;CACnC,CAAA;AAED,4EAA4E;AAC5E,wBAAgB,YAAY,CAAC,MAAM,EAAE,eAAe,GAAG,eAAe,CAErE;AAED,yEAAyE;AACzE,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED,wBAAsB,UAAU,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAuDxE"}
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,WAAW,EAAE,MAAM,CAAA;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,OAAO,GAAG,QAAQ,GAAG,MAAM,CAAA;IACrC,KAAK,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,CAAA;IACnC,cAAc,EAAE,MAAM,GAAG,MAAM,GAAG,QAAQ,CAAA;IAC1C,OAAO,CAAC,EAAE,WAAW,EAAE,CAAA;CACxB;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,aAAa,EAAE,MAAM,EAAE,CAAA;IACvB,KAAK,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,OAAO,EAAE,iBAAiB,EAAE,CAAA;CAC7B;AAED,MAAM,WAAW,cAAc;IAC7B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;IAC5B,KAAK,CAAC,EAAE,MAAM,EAAE,CAAA;IAChB,WAAW,EAAE,OAAO,CAAA;IACpB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;CACnB;AAED,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;IACZ,cAAc,EAAE,QAAQ,GAAG,QAAQ,CAAA;IACnC,QAAQ,EAAE,MAAM,CAAA;IAChB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,QAAQ,EAAE,cAAc,CAAA;IACxB,kBAAkB,EAAE,MAAM,CAAA;IAC1B,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,MAAM,EAAE,MAAM,EAAE,CAAA;CACjB;AAED,4EAA4E;AAC5E,MAAM,MAAM,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,GAAG,UAAU,CAAC,CAAC,GAAG;IAC9E,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG;QAAE,OAAO,EAAE,iBAAiB,EAAE,CAAA;KAAE,CAAC,GAAG,IAAI,CAAA;IACxE,QAAQ,CAAC,EAAE,OAAO,CAAC,cAAc,CAAC,CAAA;CACnC,CAAA;AAED,4EAA4E;AAC5E,wBAAgB,YAAY,CAAC,MAAM,EAAE,eAAe,GAAG,eAAe,CAErE;AAED,yEAAyE;AACzE,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED,wBAAsB,UAAU,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAwDxE"}
package/dist/config.js CHANGED
@@ -42,6 +42,7 @@ export async function loadConfig(configPath) {
42
42
  relay: env.RELAY || parsed.relay || 'ws://localhost:2583',
43
43
  plc: env.DID_PLC_URL || parsed.plc || 'https://plc.directory',
44
44
  port: parseInt(env.PORT || '') || parsed.port || 3000,
45
+ databaseEngine: (env.DATABASE_ENGINE || parsed.databaseEngine || 'sqlite'),
45
46
  database: database ? resolve(configDir, database) : ':memory:',
46
47
  publicDir: parsed.publicDir === null ? null : resolve(configDir, parsed.publicDir || './public'),
47
48
  collections: parsed.collections || [],
@@ -0,0 +1,6 @@
1
+ import type { DatabasePort, SearchPort } from './ports.ts';
2
+ export declare function createAdapter(engine: 'duckdb' | 'sqlite'): Promise<{
3
+ adapter: DatabasePort;
4
+ searchPort: SearchPort | null;
5
+ }>;
6
+ //# sourceMappingURL=adapter-factory.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"adapter-factory.d.ts","sourceRoot":"","sources":["../../src/database/adapter-factory.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AAE1D,wBAAsB,aAAa,CAAC,MAAM,EAAE,QAAQ,GAAG,QAAQ,GAAG,OAAO,CAAC;IACxE,OAAO,EAAE,YAAY,CAAA;IACrB,UAAU,EAAE,UAAU,GAAG,IAAI,CAAA;CAC9B,CAAC,CAmBD"}
@@ -0,0 +1,20 @@
1
+ export async function createAdapter(engine) {
2
+ switch (engine) {
3
+ case 'duckdb': {
4
+ const { DuckDBAdapter } = await import("./adapters/duckdb.js");
5
+ const { DuckDBSearchPort } = await import("./adapters/duckdb-search.js");
6
+ const adapter = new DuckDBAdapter();
7
+ const searchPort = new DuckDBSearchPort(adapter);
8
+ return { adapter, searchPort };
9
+ }
10
+ case 'sqlite': {
11
+ const { SQLiteAdapter } = await import("./adapters/sqlite.js");
12
+ const { SQLiteSearchPort } = await import("./adapters/sqlite-search.js");
13
+ const adapter = new SQLiteAdapter();
14
+ const searchPort = new SQLiteSearchPort(adapter);
15
+ return { adapter, searchPort };
16
+ }
17
+ default:
18
+ throw new Error(`Unsupported database engine: ${engine}`);
19
+ }
20
+ }
@@ -0,0 +1,12 @@
1
+ import type { SearchPort } from '../ports.ts';
2
+ import type { DatabasePort } from '../ports.ts';
3
+ export declare class DuckDBSearchPort implements SearchPort {
4
+ private port;
5
+ constructor(port: DatabasePort);
6
+ buildIndex(shadowTable: string, sourceQuery: string, searchColumns: string[]): Promise<void>;
7
+ search(shadowTable: string, query: string, searchColumns: string[], limit: number, offset: number): Promise<Array<{
8
+ uri: string;
9
+ score: number;
10
+ }>>;
11
+ }
12
+ //# sourceMappingURL=duckdb-search.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"duckdb-search.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/duckdb-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAE/C,qBAAa,gBAAiB,YAAW,UAAU;IACrC,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,YAAY;IAEhC,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAiB5F,MAAM,CACV,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,aAAa,EAAE,MAAM,EAAE,EACvB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,KAAK,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CASlD"}
@@ -0,0 +1,27 @@
1
+ export class DuckDBSearchPort {
2
+ port;
3
+ constructor(port) {
4
+ this.port = port;
5
+ }
6
+ async buildIndex(shadowTable, sourceQuery, searchColumns) {
7
+ // Create shadow table
8
+ await this.port.execute(`CREATE OR REPLACE TABLE ${shadowTable} AS ${sourceQuery}`, []);
9
+ // Drop existing index
10
+ try {
11
+ await this.port.execute(`PRAGMA drop_fts_index('${shadowTable}')`, []);
12
+ }
13
+ catch { }
14
+ // Build FTS index
15
+ const colList = searchColumns.map((c) => `'${c}'`).join(', ');
16
+ await this.port.execute(`PRAGMA create_fts_index('${shadowTable}', 'uri', ${colList}, stemmer='porter', stopwords='english', strip_accents=1, lower=1, overwrite=1)`, []);
17
+ }
18
+ async search(shadowTable, query, searchColumns, limit, offset) {
19
+ const ftsSchema = `fts_main_${shadowTable}`;
20
+ const sql = `SELECT uri, ${ftsSchema}.match_bm25(uri, $1) AS score
21
+ FROM ${shadowTable}
22
+ WHERE score IS NOT NULL
23
+ ORDER BY score DESC
24
+ LIMIT $2 OFFSET $3`;
25
+ return this.port.query(sql, [query, limit, offset]);
26
+ }
27
+ }
@@ -0,0 +1,25 @@
1
+ import type { DatabasePort, BulkInserter, Dialect } from '../ports.ts';
2
+ export declare class DuckDBAdapter implements DatabasePort {
3
+ dialect: Dialect;
4
+ private instance;
5
+ private writeCon;
6
+ private readCon;
7
+ private writeQueue;
8
+ private readQueue;
9
+ open(path: string): Promise<void>;
10
+ close(): void;
11
+ query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
12
+ execute(sql: string, params?: unknown[]): Promise<void>;
13
+ executeMultiple(sql: string): Promise<void>;
14
+ beginTransaction(): Promise<void>;
15
+ commit(): Promise<void>;
16
+ rollback(): Promise<void>;
17
+ createBulkInserter(table: string, _columns: string[], _options?: {
18
+ onConflict?: 'ignore' | 'replace';
19
+ batchSize?: number;
20
+ }): Promise<BulkInserter>;
21
+ /** Enqueue a read or write operation for serialization */
22
+ private enqueue;
23
+ private bindParams;
24
+ }
25
+ //# sourceMappingURL=duckdb.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"duckdb.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/duckdb.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AAEtE,qBAAa,aAAc,YAAW,YAAY;IAChD,OAAO,EAAE,OAAO,CAAW;IAE3B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAiD;IACjE,OAAO,CAAC,OAAO,CAAiD;IAChE,OAAO,CAAC,UAAU,CAAoB;IACtC,OAAO,CAAC,SAAS,CAAoB;IAE/B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAMvC,KAAK,IAAI,IAAI;IAYP,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC;IAarF,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAY3D,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQ3C,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAMjC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IAMvB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAMzB,kBAAkB,CACtB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,EAAE,EAClB,QAAQ,CAAC,EAAE;QAAE,UAAU,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GACnE,OAAO,CAAC,YAAY,CAAC;IAqCxB,0DAA0D;IAC1D,OAAO,CAAC,OAAO;IAkBf,OAAO,CAAC,UAAU;CAyBnB"}
@@ -0,0 +1,161 @@
1
+ import { DuckDBInstance } from '@duckdb/node-api';
2
+ export class DuckDBAdapter {
3
+ dialect = 'duckdb';
4
+ instance;
5
+ writeCon;
6
+ readCon;
7
+ writeQueue = Promise.resolve();
8
+ readQueue = Promise.resolve();
9
+ async open(path) {
10
+ this.instance = await DuckDBInstance.create(path === ':memory:' ? undefined : path);
11
+ this.writeCon = await this.instance.connect();
12
+ this.readCon = await this.instance.connect();
13
+ }
14
+ close() {
15
+ try {
16
+ this.readCon?.closeSync();
17
+ }
18
+ catch { }
19
+ try {
20
+ this.writeCon?.closeSync();
21
+ }
22
+ catch { }
23
+ try {
24
+ this.instance?.closeSync();
25
+ }
26
+ catch { }
27
+ }
28
+ async query(sql, params = []) {
29
+ return this.enqueue('read', async () => {
30
+ if (params.length === 0) {
31
+ const reader = await this.readCon.runAndReadAll(sql);
32
+ return reader.getRowObjects();
33
+ }
34
+ const prepared = await this.readCon.prepare(sql);
35
+ this.bindParams(prepared, params);
36
+ const reader = await prepared.runAndReadAll();
37
+ return reader.getRowObjects();
38
+ });
39
+ }
40
+ async execute(sql, params = []) {
41
+ return this.enqueue('write', async () => {
42
+ if (params.length === 0) {
43
+ await this.writeCon.run(sql);
44
+ return;
45
+ }
46
+ const prepared = await this.writeCon.prepare(sql);
47
+ this.bindParams(prepared, params);
48
+ await prepared.run();
49
+ });
50
+ }
51
+ async executeMultiple(sql) {
52
+ return this.enqueue('write', async () => {
53
+ for (const statement of sql.split(';').filter((s) => s.trim())) {
54
+ await this.writeCon.run(statement);
55
+ }
56
+ });
57
+ }
58
+ async beginTransaction() {
59
+ return this.enqueue('write', async () => {
60
+ await this.writeCon.run('BEGIN TRANSACTION');
61
+ });
62
+ }
63
+ async commit() {
64
+ return this.enqueue('write', async () => {
65
+ await this.writeCon.run('COMMIT');
66
+ });
67
+ }
68
+ async rollback() {
69
+ return this.enqueue('write', async () => {
70
+ await this.writeCon.run('ROLLBACK');
71
+ });
72
+ }
73
+ async createBulkInserter(table, _columns, _options) {
74
+ const appender = await this.writeCon.createAppender(table.replace(/"/g, ''));
75
+ return {
76
+ append(values) {
77
+ for (const value of values) {
78
+ if (value === null || value === undefined) {
79
+ appender.appendNull();
80
+ }
81
+ else if (typeof value === 'string') {
82
+ appender.appendVarchar(value);
83
+ }
84
+ else if (typeof value === 'number') {
85
+ if (Number.isInteger(value)) {
86
+ appender.appendInteger(value);
87
+ }
88
+ else {
89
+ appender.appendDouble(value);
90
+ }
91
+ }
92
+ else if (typeof value === 'boolean') {
93
+ appender.appendBoolean(value);
94
+ }
95
+ else if (typeof value === 'bigint') {
96
+ appender.appendBigInt(value);
97
+ }
98
+ else if (value instanceof Uint8Array) {
99
+ appender.appendBlob(value);
100
+ }
101
+ else {
102
+ appender.appendVarchar(String(value));
103
+ }
104
+ }
105
+ appender.endRow();
106
+ },
107
+ async flush() {
108
+ appender.flushSync();
109
+ },
110
+ async close() {
111
+ appender.flushSync();
112
+ appender.closeSync();
113
+ },
114
+ };
115
+ }
116
+ /** Enqueue a read or write operation for serialization */
117
+ enqueue(queue, fn) {
118
+ if (queue === 'write') {
119
+ const p = this.writeQueue.then(fn);
120
+ this.writeQueue = p.then(() => { }, () => { });
121
+ return p;
122
+ }
123
+ else {
124
+ const p = this.readQueue.then(fn);
125
+ this.readQueue = p.then(() => { }, () => { });
126
+ return p;
127
+ }
128
+ }
129
+ bindParams(prepared, params) {
130
+ for (let i = 0; i < params.length; i++) {
131
+ const idx = i + 1;
132
+ const value = params[i];
133
+ if (value === null || value === undefined) {
134
+ prepared.bindNull(idx);
135
+ }
136
+ else if (typeof value === 'string') {
137
+ prepared.bindVarchar(idx, value);
138
+ }
139
+ else if (typeof value === 'number') {
140
+ if (Number.isInteger(value)) {
141
+ prepared.bindInteger(idx, value);
142
+ }
143
+ else {
144
+ prepared.bindDouble(idx, value);
145
+ }
146
+ }
147
+ else if (typeof value === 'boolean') {
148
+ prepared.bindBoolean(idx, value);
149
+ }
150
+ else if (typeof value === 'bigint') {
151
+ prepared.bindBigInt(idx, value);
152
+ }
153
+ else if (value instanceof Uint8Array) {
154
+ prepared.bindBlob(idx, value);
155
+ }
156
+ else {
157
+ prepared.bindVarchar(idx, String(value));
158
+ }
159
+ }
160
+ }
161
+ }
@@ -0,0 +1,18 @@
1
+ import type { SearchPort } from '../ports.ts';
2
+ import type { DatabasePort } from '../ports.ts';
3
+ /**
4
+ * SQLite FTS5-based search port.
5
+ *
6
+ * Uses SQLite's built-in FTS5 virtual tables for full-text search with BM25 ranking.
7
+ * The shadow table name is reused as the FTS5 virtual table name.
8
+ */
9
+ export declare class SQLiteSearchPort implements SearchPort {
10
+ private port;
11
+ constructor(port: DatabasePort);
12
+ buildIndex(shadowTable: string, sourceQuery: string, searchColumns: string[]): Promise<void>;
13
+ search(shadowTable: string, query: string, _searchColumns: string[], limit: number, offset: number): Promise<Array<{
14
+ uri: string;
15
+ score: number;
16
+ }>>;
17
+ }
18
+ //# sourceMappingURL=sqlite-search.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sqlite-search.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/sqlite-search.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAE/C;;;;;GAKG;AACH,qBAAa,gBAAiB,YAAW,UAAU;IACrC,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,YAAY;IAEhC,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAuB5F,MAAM,CACV,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,cAAc,EAAE,MAAM,EAAE,EACxB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,KAAK,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CAalD"}