@hatk/hatk 0.0.1-alpha.36 → 0.0.1-alpha.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/backfill.js CHANGED
@@ -200,6 +200,7 @@ export async function backfillRepo(did, collections, fetchTimeout) {
200
200
  // Insert records in chunks to limit memory usage
201
201
  const CHUNK_SIZE = 1000;
202
202
  let chunk = [];
203
+ const validationSkips = {};
203
204
  for (const entry of entries) {
204
205
  const collection = entry.path.split('/')[0];
205
206
  if (!collections.has(collection))
@@ -216,12 +217,7 @@ export async function backfillRepo(did, collections, fetchTimeout) {
216
217
  const uri = `at://${did}/${collection}/${rkey}`;
217
218
  const validationError = validateRecord(getLexiconArray(), collection, record);
218
219
  if (validationError) {
219
- emit('backfill', 'validation_skip', {
220
- uri,
221
- collection,
222
- path: validationError.path,
223
- error: validationError.message,
224
- });
220
+ validationSkips[collection] = (validationSkips[collection] || 0) + 1;
225
221
  continue;
226
222
  }
227
223
  chunk.push({ collection, uri, cid: entry.cid, did, record });
@@ -242,6 +238,10 @@ export async function backfillRepo(did, collections, fetchTimeout) {
242
238
  if (chunk.length > 0) {
243
239
  count += await bulkInsertRecords(chunk);
244
240
  }
241
+ const totalSkips = Object.values(validationSkips).reduce((a, b) => a + b, 0);
242
+ if (totalSkips > 0) {
243
+ emit('backfill', 'validation_skips', { did, total: totalSkips, by_collection: validationSkips });
244
+ }
245
245
  await setRepoStatus(did, 'active', commit.rev, { handle });
246
246
  return count;
247
247
  }
package/dist/cli.js CHANGED
@@ -1304,7 +1304,7 @@ else if (command === 'generate') {
1304
1304
  out += `import type { ${[...usedWrappers].sort().join(', ')}, LexServerParams, Checked, Prettify, StrictArg } from '@hatk/hatk/lex-types'\n`;
1305
1305
  out += `import type { XrpcContext } from '@hatk/hatk/xrpc'\n`;
1306
1306
  out += `import { callXrpc as _callXrpc } from '@hatk/hatk/xrpc'\n`;
1307
- out += `import { defineFeed as _defineFeed, type FeedResult, type FeedContext, type HydrateContext } from '@hatk/hatk/feeds'\n`;
1307
+ out += `import { defineFeed as _defineFeed, type FeedResult, type FeedContext, type HydrateContext, type Row } from '@hatk/hatk/feeds'\n`;
1308
1308
  out += `import { seed as _seed, type SeedOpts } from '@hatk/hatk/seed'\n`;
1309
1309
  // Emit ALL lexicons as `const ... = {...} as const` (including defs-only)
1310
1310
  out += `\n// ─── Lexicon Definitions ────────────────────────────────────────────\n\n`;
@@ -1460,11 +1460,11 @@ else if (command === 'generate') {
1460
1460
  out += `}\n`;
1461
1461
  // Emit Ctx helper for typesafe XRPC handler contexts
1462
1462
  out += `\n// ─── XRPC Helpers ───────────────────────────────────────────────────\n\n`;
1463
- out += `export type { HydrateContext } from '@hatk/hatk/feeds'\n`;
1463
+ out += `export type { HydrateContext, Row } from '@hatk/hatk/feeds'\n`;
1464
1464
  out += `export { InvalidRequestError, NotFoundError } from '@hatk/hatk/xrpc'\n`;
1465
1465
  out += `export { defineSetup } from '@hatk/hatk/setup'\n`;
1466
1466
  out += `export { defineHook } from '@hatk/hatk/hooks'\n`;
1467
- out += `export { defineLabels } from '@hatk/hatk/labels'\n`;
1467
+ out += `export { defineLabel } from '@hatk/hatk/labels'\n`;
1468
1468
  out += `export { defineOG } from '@hatk/hatk/opengraph'\n`;
1469
1469
  out += `export { defineRenderer } from '@hatk/hatk/renderer'\n`;
1470
1470
  out += `export type Ctx<K extends keyof XrpcSchema & keyof Registry> = XrpcContext<\n`;
@@ -1571,9 +1571,10 @@ else if (command === 'generate') {
1571
1571
  clientOut += `export async function callXrpc<K extends keyof XrpcSchema & string>(\n`;
1572
1572
  clientOut += ` nsid: K,\n`;
1573
1573
  clientOut += ` arg?: CallArg<K>,\n`;
1574
+ clientOut += ` customFetch?: typeof globalThis.fetch,\n`;
1574
1575
  clientOut += `): Promise<OutputOf<K>> {\n`;
1575
- // Server-side bridge
1576
- clientOut += ` if (typeof window === 'undefined') {\n`;
1576
+ // Server-side bridge (skip when customFetch is provided — let SvelteKit's fetch handle it)
1577
+ clientOut += ` if (typeof window === 'undefined' && !customFetch) {\n`;
1577
1578
  clientOut += ` const bridge = (globalThis as any).__hatk_callXrpc\n`;
1578
1579
  clientOut += ` if (!bridge) throw new Error('callXrpc: server bridge not available — is hatk initialized?')\n`;
1579
1580
  if (procedureNsids.length > 0 || blobInputNsids.length > 0) {
@@ -1586,30 +1587,35 @@ else if (command === 'generate') {
1586
1587
  }
1587
1588
  clientOut += ` return bridge(nsid, arg) as Promise<OutputOf<K>>\n`;
1588
1589
  clientOut += ` }\n`;
1589
- // Client-side fetch
1590
- clientOut += ` const url = new URL(\`/xrpc/\${nsid}\`, window.location.origin)\n`;
1590
+ // Client-side fetch (or server-side with customFetch for SSR deduplication)
1591
+ clientOut += ` const _fetch = customFetch ?? globalThis.fetch\n`;
1592
+ clientOut += ` // Use relative URL so SvelteKit's fetch can deduplicate server/client requests\n`;
1593
+ clientOut += ` let path = \`/xrpc/\${nsid}\`\n`;
1591
1594
  if (blobInputNsids.length > 0) {
1592
1595
  clientOut += ` if (_blobInputs.has(nsid)) {\n`;
1593
1596
  clientOut += ` const blob = arg as Blob | ArrayBuffer\n`;
1594
1597
  clientOut += ` const ct = blob instanceof Blob ? blob.type : 'application/octet-stream'\n`;
1595
- clientOut += ` const res = await fetch(url, { method: 'POST', headers: { 'Content-Type': ct }, body: blob })\n`;
1598
+ clientOut += ` const res = await _fetch(path, { method: 'POST', headers: { 'Content-Type': ct }, body: blob })\n`;
1596
1599
  clientOut += ` if (!res.ok) throw new Error(\`XRPC \${nsid} failed: \${res.status}\`)\n`;
1597
1600
  clientOut += ` return res.json() as Promise<OutputOf<K>>\n`;
1598
1601
  clientOut += ` }\n`;
1599
1602
  }
1600
1603
  if (procedureNsids.length > 0) {
1601
1604
  clientOut += ` if (_procedures.has(nsid)) {\n`;
1602
- clientOut += ` const res = await fetch(url, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(arg) })\n`;
1603
- clientOut += ` if (res.status === 401) { window.location.href = '/oauth/login'; return new Promise(() => {}) as any }\n`;
1605
+ clientOut += ` const res = await _fetch(path, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(arg) })\n`;
1606
+ clientOut += ` if (typeof window !== 'undefined' && res.status === 401) { const _h = getViewer()?.handle; window.location.href = _h ? \`/oauth/login?handle=\${encodeURIComponent(_h)}\` : '/oauth/login'; return new Promise(() => {}) as any }\n`;
1604
1607
  clientOut += ` if (!res.ok) throw new Error(\`XRPC \${nsid} failed: \${res.status}\`)\n`;
1605
1608
  clientOut += ` return res.json() as Promise<OutputOf<K>>\n`;
1606
1609
  clientOut += ` }\n`;
1607
1610
  }
1611
+ clientOut += ` const params = new URLSearchParams()\n`;
1608
1612
  clientOut += ` for (const [k, v] of Object.entries(arg || {})) {\n`;
1609
- clientOut += ` if (v != null) url.searchParams.set(k, String(v))\n`;
1613
+ clientOut += ` if (v != null) params.set(k, String(v))\n`;
1610
1614
  clientOut += ` }\n`;
1611
- clientOut += ` const res = await fetch(url)\n`;
1612
- clientOut += ` if (res.status === 401) { window.location.href = '/oauth/login'; return new Promise(() => {}) as any }\n`;
1615
+ clientOut += ` const qs = params.toString()\n`;
1616
+ clientOut += ` if (qs) path += \`?\${qs}\`\n`;
1617
+ clientOut += ` const res = await _fetch(path)\n`;
1618
+ clientOut += ` if (typeof window !== 'undefined' && res.status === 401) { window.location.href = '/oauth/login'; return new Promise(() => {}) as any }\n`;
1613
1619
  clientOut += ` if (!res.ok) throw new Error(\`XRPC \${nsid} failed: \${res.status}\`)\n`;
1614
1620
  clientOut += ` return res.json() as Promise<OutputOf<K>>\n`;
1615
1621
  clientOut += `}\n`;
@@ -1617,6 +1623,43 @@ else if (command === 'generate') {
1617
1623
  clientOut += `\nexport function getViewer(): { did: string; handle: string } | null {\n`;
1618
1624
  clientOut += ` return (globalThis as any).__hatk_viewer ?? null\n`;
1619
1625
  clientOut += `}\n`;
1626
+ // Auth helpers — login, logout, viewerDid
1627
+ clientOut += `\n// ─── Auth Helpers ────────────────────────────────────────────────────\n\n`;
1628
+ clientOut += `export async function login(handle: string): Promise<void> {\n`;
1629
+ clientOut += ` const res = await fetch(\`/oauth/login?handle=\${encodeURIComponent(handle)}\`, { redirect: 'manual' })\n`;
1630
+ clientOut += ` if (res.type === 'opaqueredirect') {\n`;
1631
+ clientOut += ` window.location.href = \`/oauth/login?handle=\${encodeURIComponent(handle)}\`\n`;
1632
+ clientOut += ` return\n`;
1633
+ clientOut += ` }\n`;
1634
+ clientOut += ` if (res.ok) return\n`;
1635
+ clientOut += ` const body = await res.json().catch(() => ({ error: 'Login failed' }))\n`;
1636
+ clientOut += ` throw new Error(body.error || 'Login failed')\n`;
1637
+ clientOut += `}\n\n`;
1638
+ clientOut += `export async function logout(): Promise<void> {\n`;
1639
+ clientOut += ` ;(globalThis as any).__hatk_viewer = null\n`;
1640
+ clientOut += ` await fetch('/auth/logout', { method: 'POST' }).catch(() => {})\n`;
1641
+ clientOut += `}\n\n`;
1642
+ clientOut += `export function viewerDid(): string | null {\n`;
1643
+ clientOut += ` if (typeof window === 'undefined') return null\n`;
1644
+ clientOut += ` const viewer = (globalThis as any).__hatk_viewer\n`;
1645
+ clientOut += ` return viewer?.did ?? null\n`;
1646
+ clientOut += `}\n\n`;
1647
+ clientOut += `// Expose viewer for getViewer() bridge\n`;
1648
+ clientOut += `;(globalThis as any).__hatk_auth = { viewerDid }\n`;
1649
+ // parseViewer — server-side session cookie resolution for +layout.server.ts
1650
+ clientOut += `\n// ─── Server Helpers ──────────────────────────────────────────────────\n\n`;
1651
+ clientOut += `export async function parseViewer(cookies: { get(name: string): string | undefined }): Promise<{ did: string; handle?: string } | null> {\n`;
1652
+ clientOut += ` const parseSessionCookie = (globalThis as any).__hatk_parseSessionCookie\n`;
1653
+ clientOut += ` if (!parseSessionCookie) return null\n`;
1654
+ clientOut += ` const cookieValue = cookies.get('__hatk_session')\n`;
1655
+ clientOut += ` if (!cookieValue) return null\n`;
1656
+ clientOut += ` try {\n`;
1657
+ clientOut += ` const request = new Request('http://localhost', { headers: { cookie: \`__hatk_session=\${cookieValue}\` } })\n`;
1658
+ clientOut += ` const viewer = await parseSessionCookie(request)\n`;
1659
+ clientOut += ` if (viewer) (globalThis as any).__hatk_viewer = viewer\n`;
1660
+ clientOut += ` return viewer\n`;
1661
+ clientOut += ` } catch { return null }\n`;
1662
+ clientOut += `}\n`;
1620
1663
  writeFileSync('./hatk.generated.client.ts', clientOut);
1621
1664
  console.log(`Generated ${outPath} with ${entries.length} types: ${entries.map((e) => capitalize(varNames.get(e.nsid))).join(', ')}`);
1622
1665
  console.log(`Generated ./hatk.generated.client.ts (client-safe subset)`);
@@ -0,0 +1,73 @@
1
+ /**
2
+ * Cloudflare Container entry point for hatk.
3
+ *
4
+ * Runs as a long-lived Node.js process alongside the Worker. Handles the
5
+ * firehose indexer and backfill loop — the Worker delegates resync requests
6
+ * here via RPC (Cloudflare Container service bindings).
7
+ *
8
+ * No HTTP server — all communication is via the Container RPC interface.
9
+ */
10
+ export interface Env {
11
+ /** Cloudflare D1 database binding */
12
+ DB: D1Database;
13
+ HATK_RELAY: string;
14
+ HATK_PLC: string;
15
+ HATK_OAUTH_ISSUER?: string;
16
+ HATK_OAUTH_SCOPES?: string;
17
+ HATK_ADMINS?: string;
18
+ HATK_COLLECTIONS?: string;
19
+ HATK_BACKFILL_PARALLELISM?: string;
20
+ HATK_BACKFILL_FETCH_TIMEOUT?: string;
21
+ HATK_BACKFILL_MAX_RETRIES?: string;
22
+ HATK_BACKFILL_FULL_NETWORK?: string;
23
+ HATK_BACKFILL_REPOS?: string;
24
+ HATK_BACKFILL_SIGNAL_COLLECTIONS?: string;
25
+ }
26
+ interface D1Database {
27
+ prepare(sql: string): any;
28
+ batch<T = unknown>(statements: any[]): Promise<any[]>;
29
+ exec(sql: string): Promise<any>;
30
+ }
31
+ /**
32
+ * Resync a single DID by triggering auto-backfill.
33
+ * Called by the Worker via Container service binding RPC.
34
+ */
35
+ declare function resync(did: string): Promise<void>;
36
+ /**
37
+ * Trigger a full re-enumeration backfill of all repos.
38
+ * Called by the Worker via Container service binding RPC.
39
+ */
40
+ declare function resyncAll(): Promise<void>;
41
+ /**
42
+ * Return basic status info about the Container.
43
+ * Called by the Worker for health checks / admin UI.
44
+ */
45
+ declare function getStatus(): {
46
+ initialized: boolean;
47
+ collections: string[];
48
+ uptimeMs: number;
49
+ };
50
+ /**
51
+ * Cloudflare Container entry point.
52
+ *
53
+ * Containers expose RPC methods that the Worker can call via the service binding.
54
+ * The Container also handles fetch requests routed from the Worker, but for hatk
55
+ * all Worker-to-Container communication uses the RPC methods above.
56
+ */
57
+ declare const _default: {
58
+ /**
59
+ * Container startup — called when the Container is first instantiated.
60
+ * Initializes the database, starts the firehose, and begins backfill.
61
+ */
62
+ start(env: Env): Promise<void>;
63
+ /**
64
+ * Handle fetch requests forwarded from the Worker.
65
+ * The Container doesn't serve HTTP — return 404 for any direct requests.
66
+ */
67
+ fetch(request: Request, env: Env): Promise<Response>;
68
+ resync: typeof resync;
69
+ resyncAll: typeof resyncAll;
70
+ getStatus: typeof getStatus;
71
+ };
72
+ export default _default;
73
+ //# sourceMappingURL=container.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"container.d.ts","sourceRoot":"","sources":["../../src/cloudflare/container.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AA0BH,MAAM,WAAW,GAAG;IAClB,qCAAqC;IACrC,EAAE,EAAE,UAAU,CAAA;IAGd,UAAU,EAAE,MAAM,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,gBAAgB,CAAC,EAAE,MAAM,CAAA;IAGzB,yBAAyB,CAAC,EAAE,MAAM,CAAA;IAClC,2BAA2B,CAAC,EAAE,MAAM,CAAA;IACpC,yBAAyB,CAAC,EAAE,MAAM,CAAA;IAClC,0BAA0B,CAAC,EAAE,MAAM,CAAA;IACnC,mBAAmB,CAAC,EAAE,MAAM,CAAA;IAC5B,gCAAgC,CAAC,EAAE,MAAM,CAAA;CAC1C;AAGD,UAAU,UAAU;IAClB,OAAO,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;IACzB,KAAK,CAAC,CAAC,GAAG,OAAO,EAAE,UAAU,EAAE,GAAG,EAAE,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;IACrD,IAAI,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAA;CAChC;AAoLD;;;GAGG;AACH,iBAAe,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAEhD;AAED;;;GAGG;AACH,iBAAe,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC,CAExC;AAED;;;GAGG;AACH,iBAAS,SAAS,IAAI;IAAE,WAAW,EAAE,OAAO,CAAC;IAAC,WAAW,EAAE,MAAM,EAAE,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAA;CAAE,CAMtF;AAID;;;;;;GAMG;;IAED;;;OAGG;eACc,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpC;;;OAGG;mBACkB,OAAO,OAAO,GAAG,GAAG,OAAO,CAAC,QAAQ,CAAC;;;;;AAb5D,wBA0BC"}
@@ -0,0 +1,232 @@
1
+ /**
2
+ * Cloudflare Container entry point for hatk.
3
+ *
4
+ * Runs as a long-lived Node.js process alongside the Worker. Handles the
5
+ * firehose indexer and backfill loop — the Worker delegates resync requests
6
+ * here via RPC (Cloudflare Container service bindings).
7
+ *
8
+ * No HTTP server — all communication is via the Container RPC interface.
9
+ */
10
+ import { D1Adapter } from "../database/adapters/d1.js";
11
+ import { initDatabase, getCursor, migrateSchema } from "../database/db.js";
12
+ import { storeLexicons, discoverCollections, buildSchemas } from "../database/schema.js";
13
+ import { discoverViews } from "../views.js";
14
+ import { getDialect } from "../database/dialect.js";
15
+ import { setSearchPort } from "../database/fts.js";
16
+ import { rebuildAllIndexes } from "../database/fts.js";
17
+ import { registerCoreHandlers } from "../server.js";
18
+ import { configureRelay } from "../xrpc.js";
19
+ import { startIndexer, triggerAutoBackfill } from "../indexer.js";
20
+ import { runBackfill } from "../backfill.js";
21
+ import { relayHttpUrl } from "../config.js";
22
+ import { validateLexicons } from '@bigmoves/lexicon';
23
+ import { log } from "../logger.js";
24
+ // ---------- Container state ----------
25
+ let initialized = false;
26
+ let initPromise = null;
27
+ let collections = [];
28
+ let collectionSet = new Set();
29
+ let backfillOpts = null;
30
+ let startedAt = 0;
31
+ /**
32
+ * One-time initialization. Mirrors main.ts startup minus the HTTP server.
33
+ */
34
+ async function initialize(env) {
35
+ startedAt = Date.now();
36
+ // 1. Parse config from env vars
37
+ const relay = env.HATK_RELAY || 'wss://bsky.network';
38
+ const plc = env.HATK_PLC || 'https://plc.directory';
39
+ configureRelay(relay);
40
+ const admins = env.HATK_ADMINS ? env.HATK_ADMINS.split(',').map((s) => s.trim()) : [];
41
+ // 2. Load lexicons — injected at build time via virtual module
42
+ let lexicons;
43
+ try {
44
+ // @ts-expect-error — virtual module generated at build time
45
+ const lexiconModule = await import('virtual:hatk-lexicons');
46
+ lexicons = new Map(Object.entries(lexiconModule.default));
47
+ }
48
+ catch {
49
+ lexicons = new Map();
50
+ }
51
+ const lexiconErrors = validateLexicons([...lexicons.values()]);
52
+ if (lexiconErrors) {
53
+ for (const [nsid, errors] of Object.entries(lexiconErrors)) {
54
+ for (const err of errors) {
55
+ console.error(`[container] Invalid lexicon ${nsid}: ${err}`);
56
+ }
57
+ }
58
+ throw new Error('Invalid lexicons — check build output');
59
+ }
60
+ storeLexicons(lexicons);
61
+ // 3. Determine collections
62
+ collections = env.HATK_COLLECTIONS
63
+ ? env.HATK_COLLECTIONS.split(',').map((s) => s.trim())
64
+ : discoverCollections(lexicons);
65
+ collectionSet = new Set(collections);
66
+ if (collections.length === 0) {
67
+ log('[container] No record collections found — running in indexer-only mode');
68
+ }
69
+ log(`[container] Loaded config: ${collections.length} collections`);
70
+ // 4. Build schemas and init D1
71
+ discoverViews();
72
+ const engineDialect = getDialect('d1');
73
+ const { schemas, ddlStatements } = buildSchemas(lexicons, collections, engineDialect);
74
+ const adapter = new D1Adapter();
75
+ adapter.initWithBinding(env.DB);
76
+ setSearchPort(null); // D1 uses SQLite FTS natively
77
+ await initDatabase(adapter, ':memory:', schemas, ddlStatements);
78
+ // Auto-migrate schema if lexicons changed
79
+ const migrationChanges = await migrateSchema(schemas);
80
+ if (migrationChanges.length > 0) {
81
+ log(`[container] Applied ${migrationChanges.length} schema migration(s)`);
82
+ }
83
+ // 5. Init server directory handlers (feeds, labels, hooks, xrpc, setup)
84
+ // In Containers, we load these via the bundled virtual module like the Worker.
85
+ // The server/ directory scanning won't work in a Container since there's no filesystem
86
+ // layout matching the dev project. For now, register core handlers only.
87
+ // When build tooling (Task 7) bundles server handlers, they'll be imported here.
88
+ const oauthConfig = env.HATK_OAUTH_ISSUER
89
+ ? {
90
+ issuer: env.HATK_OAUTH_ISSUER,
91
+ scopes: env.HATK_OAUTH_SCOPES ? env.HATK_OAUTH_SCOPES.split(',').map((s) => s.trim()) : ['read', 'write'],
92
+ clients: [],
93
+ }
94
+ : null;
95
+ registerCoreHandlers(collections, oauthConfig);
96
+ // 6. Parse backfill config
97
+ const backfillConfig = {
98
+ fullNetwork: env.HATK_BACKFILL_FULL_NETWORK === 'true',
99
+ parallelism: env.HATK_BACKFILL_PARALLELISM ? parseInt(env.HATK_BACKFILL_PARALLELISM, 10) : 10,
100
+ fetchTimeout: env.HATK_BACKFILL_FETCH_TIMEOUT ? parseInt(env.HATK_BACKFILL_FETCH_TIMEOUT, 10) : 30,
101
+ maxRetries: env.HATK_BACKFILL_MAX_RETRIES ? parseInt(env.HATK_BACKFILL_MAX_RETRIES, 10) : 5,
102
+ repos: env.HATK_BACKFILL_REPOS ? env.HATK_BACKFILL_REPOS.split(',').map((s) => s.trim()) : undefined,
103
+ signalCollections: env.HATK_BACKFILL_SIGNAL_COLLECTIONS
104
+ ? env.HATK_BACKFILL_SIGNAL_COLLECTIONS.split(',').map((s) => s.trim())
105
+ : undefined,
106
+ };
107
+ backfillOpts = {
108
+ pdsUrl: relayHttpUrl(relay),
109
+ plcUrl: plc,
110
+ collections: collectionSet,
111
+ config: backfillConfig,
112
+ };
113
+ // 7. Start firehose indexer
114
+ const cursor = await getCursor('relay');
115
+ startIndexer({
116
+ relayUrl: relay,
117
+ collections: collectionSet,
118
+ signalCollections: backfillConfig.signalCollections ? new Set(backfillConfig.signalCollections) : undefined,
119
+ pinnedRepos: backfillConfig.repos ? new Set(backfillConfig.repos) : undefined,
120
+ cursor,
121
+ fetchTimeout: backfillConfig.fetchTimeout,
122
+ maxRetries: backfillConfig.maxRetries,
123
+ parallelism: backfillConfig.parallelism,
124
+ });
125
+ log('[container] Firehose indexer started');
126
+ // 8. Run backfill in background
127
+ runBackfillAndRestart();
128
+ initialized = true;
129
+ log('[container] Initialization complete');
130
+ }
131
+ /**
132
+ * Run backfill, rebuild FTS indexes, and restart the process if records
133
+ * were imported (to reclaim memory from CAR parsing). Mirrors main.ts behavior.
134
+ */
135
+ function runBackfillAndRestart() {
136
+ if (!backfillOpts)
137
+ return;
138
+ runBackfill(backfillOpts)
139
+ .then(async (recordCount) => {
140
+ log('[container] Backfill complete, building FTS indexes...');
141
+ await rebuildAllIndexes(collections);
142
+ log('[container] FTS indexes ready');
143
+ return recordCount;
144
+ })
145
+ .then((recordCount) => {
146
+ if (recordCount > 0) {
147
+ log('[container] Restarting to reclaim memory...');
148
+ process.exit(1);
149
+ }
150
+ })
151
+ .catch((err) => {
152
+ console.error('[container] Backfill error:', err.message);
153
+ });
154
+ }
155
+ /**
156
+ * Ensure initialization has completed. Uses a shared promise so concurrent
157
+ * RPC calls don't trigger multiple inits.
158
+ */
159
+ function ensureInit(env) {
160
+ if (initialized)
161
+ return Promise.resolve();
162
+ if (!initPromise) {
163
+ initPromise = initialize(env).catch((err) => {
164
+ initPromise = null;
165
+ throw err;
166
+ });
167
+ }
168
+ return initPromise;
169
+ }
170
+ // ---------- RPC methods ----------
171
+ /**
172
+ * Resync a single DID by triggering auto-backfill.
173
+ * Called by the Worker via Container service binding RPC.
174
+ */
175
+ async function resync(did) {
176
+ await triggerAutoBackfill(did);
177
+ }
178
+ /**
179
+ * Trigger a full re-enumeration backfill of all repos.
180
+ * Called by the Worker via Container service binding RPC.
181
+ */
182
+ async function resyncAll() {
183
+ runBackfillAndRestart();
184
+ }
185
+ /**
186
+ * Return basic status info about the Container.
187
+ * Called by the Worker for health checks / admin UI.
188
+ */
189
+ function getStatus() {
190
+ return {
191
+ initialized,
192
+ collections,
193
+ uptimeMs: startedAt > 0 ? Date.now() - startedAt : 0,
194
+ };
195
+ }
196
+ // ---------- Container export ----------
197
+ /**
198
+ * Cloudflare Container entry point.
199
+ *
200
+ * Containers expose RPC methods that the Worker can call via the service binding.
201
+ * The Container also handles fetch requests routed from the Worker, but for hatk
202
+ * all Worker-to-Container communication uses the RPC methods above.
203
+ */
204
+ export default {
205
+ /**
206
+ * Container startup — called when the Container is first instantiated.
207
+ * Initializes the database, starts the firehose, and begins backfill.
208
+ */
209
+ async start(env) {
210
+ await ensureInit(env);
211
+ },
212
+ /**
213
+ * Handle fetch requests forwarded from the Worker.
214
+ * The Container doesn't serve HTTP — return 404 for any direct requests.
215
+ */
216
+ async fetch(request, env) {
217
+ await ensureInit(env);
218
+ return new Response(JSON.stringify({ error: 'Container does not serve HTTP requests' }), {
219
+ status: 404,
220
+ headers: { 'Content-Type': 'application/json' },
221
+ });
222
+ },
223
+ // RPC methods exposed to the Worker via service binding
224
+ resync,
225
+ resyncAll,
226
+ getStatus,
227
+ };
228
+ // Graceful shutdown
229
+ process.on('SIGTERM', () => {
230
+ log('[container] Received SIGTERM, shutting down...');
231
+ process.exit(0);
232
+ });
@@ -0,0 +1,33 @@
1
+ /**
2
+ * SvelteKit handle hook for Cloudflare Workers.
3
+ *
4
+ * Use with @sveltejs/adapter-cloudflare. Lazily initializes hatk on the first
5
+ * request using the D1 binding from platform.env, then intercepts hatk API
6
+ * routes before SvelteKit processes them.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * // app/hooks.server.ts
11
+ * import { createHandle } from '@hatk/hatk/cloudflare/hooks'
12
+ * export const handle = createHandle()
13
+ * ```
14
+ */
15
+ type MaybePromise<T> = T | Promise<T>;
16
+ /** Minimal SvelteKit Handle type to avoid depending on @sveltejs/kit. */
17
+ type Handle = (input: {
18
+ event: {
19
+ request: Request;
20
+ url: URL;
21
+ platform?: {
22
+ env?: Record<string, unknown>;
23
+ };
24
+ };
25
+ resolve: (event: any) => MaybePromise<Response>;
26
+ }) => MaybePromise<Response>;
27
+ /**
28
+ * Create a SvelteKit `handle` function that initializes hatk with D1
29
+ * and intercepts API routes.
30
+ */
31
+ export declare function createHandle(): Handle;
32
+ export {};
33
+ //# sourceMappingURL=hooks.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hooks.d.ts","sourceRoot":"","sources":["../../src/cloudflare/hooks.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;GAaG;AAKH,KAAK,YAAY,CAAC,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAA;AAErC,yEAAyE;AACzE,KAAK,MAAM,GAAG,CAAC,KAAK,EAAE;IACpB,KAAK,EAAE;QACL,OAAO,EAAE,OAAO,CAAA;QAChB,GAAG,EAAE,GAAG,CAAA;QACR,QAAQ,CAAC,EAAE;YAAE,GAAG,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;SAAE,CAAA;KAC7C,CAAA;IACD,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,YAAY,CAAC,QAAQ,CAAC,CAAA;CAChD,KAAK,YAAY,CAAC,QAAQ,CAAC,CAAA;AAE5B;;;GAGG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAsBrC"}
@@ -0,0 +1,40 @@
1
+ /**
2
+ * SvelteKit handle hook for Cloudflare Workers.
3
+ *
4
+ * Use with @sveltejs/adapter-cloudflare. Lazily initializes hatk on the first
5
+ * request using the D1 binding from platform.env, then intercepts hatk API
6
+ * routes before SvelteKit processes them.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * // app/hooks.server.ts
11
+ * import { createHandle } from '@hatk/hatk/cloudflare/hooks'
12
+ * export const handle = createHandle()
13
+ * ```
14
+ */
15
+ import { ensureInit, getHandler } from "./init.js";
16
+ import { isHatkRoute } from "../adapter.js";
17
+ /**
18
+ * Create a SvelteKit `handle` function that initializes hatk with D1
19
+ * and intercepts API routes.
20
+ */
21
+ export function createHandle() {
22
+ return async ({ event, resolve }) => {
23
+ const env = event.platform?.env;
24
+ if (!env || !env.DB) {
25
+ // Not running on Cloudflare (e.g. dev mode) — pass through
26
+ return resolve(event);
27
+ }
28
+ // Lazy init hatk with the D1 binding
29
+ await ensureInit(env);
30
+ // hatk API routes
31
+ if (isHatkRoute(event.url.pathname)) {
32
+ const handler = getHandler();
33
+ if (handler) {
34
+ return handler(event.request);
35
+ }
36
+ }
37
+ // Everything else → SvelteKit
38
+ return resolve(event);
39
+ };
40
+ }
@@ -0,0 +1,27 @@
1
+ /**
2
+ * Shared Cloudflare initialization logic used by both the standalone Worker
3
+ * entry and the SvelteKit handle hook.
4
+ */
5
+ export interface CloudflareEnv {
6
+ DB: D1Database;
7
+ HATK_RELAY?: string;
8
+ HATK_PLC?: string;
9
+ HATK_OAUTH_ISSUER?: string;
10
+ HATK_OAUTH_SCOPES?: string;
11
+ HATK_ADMINS?: string;
12
+ HATK_COLLECTIONS?: string;
13
+ [key: string]: unknown;
14
+ }
15
+ interface D1Database {
16
+ prepare(sql: string): any;
17
+ batch<T = unknown>(statements: any[]): Promise<any[]>;
18
+ exec(sql: string): Promise<any>;
19
+ }
20
+ /**
21
+ * Ensure initialization has completed. Concurrent calls share the same promise.
22
+ */
23
+ export declare function ensureInit(env: CloudflareEnv): Promise<void>;
24
+ /** Get the hatk request handler (only valid after init). */
25
+ export declare function getHandler(): ((request: Request) => Promise<Response>) | null;
26
+ export {};
27
+ //# sourceMappingURL=init.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"init.d.ts","sourceRoot":"","sources":["../../src/cloudflare/init.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAgBH,MAAM,WAAW,aAAa;IAC5B,EAAE,EAAE,UAAU,CAAA;IACd,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,gBAAgB,CAAC,EAAE,MAAM,CAAA;IACzB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CACvB;AAED,UAAU,UAAU;IAClB,OAAO,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;IACzB,KAAK,CAAC,CAAC,GAAG,OAAO,EAAE,UAAU,EAAE,GAAG,EAAE,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;IACrD,IAAI,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAA;CAChC;AAoFD;;GAEG;AACH,wBAAgB,UAAU,CAAC,GAAG,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC,CAS5D;AAED,4DAA4D;AAC5D,wBAAgB,UAAU,IAAI,CAAC,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC,GAAG,IAAI,CAE7E"}
@@ -0,0 +1,103 @@
1
+ /**
2
+ * Shared Cloudflare initialization logic used by both the standalone Worker
3
+ * entry and the SvelteKit handle hook.
4
+ */
5
+ import { D1Adapter } from "../database/adapters/d1.js";
6
+ import { initDatabase, migrateSchema } from "../database/db.js";
7
+ import { storeLexicons, discoverCollections, buildSchemas } from "../database/schema.js";
8
+ import { discoverViews } from "../views.js";
9
+ import { getDialect } from "../database/dialect.js";
10
+ import { setSearchPort } from "../database/fts.js";
11
+ import { initOAuth } from "../oauth/server.js";
12
+ import { parseSessionCookie, getSessionCookieName } from "../oauth/session.js";
13
+ import { createHandler, registerCoreHandlers } from "../server.js";
14
+ import { configureRelay } from "../xrpc.js";
15
+ import { callXrpc } from "../xrpc.js";
16
+ import { validateLexicons } from '@bigmoves/lexicon';
17
+ let handler = null;
18
+ let initPromise = null;
19
+ /**
20
+ * One-time initialization. Sets up D1 adapter, database schemas, XRPC
21
+ * handlers, OAuth, and the globalThis bridge for SvelteKit SSR.
22
+ */
23
+ async function initialize(env) {
24
+ const relay = env.HATK_RELAY || 'wss://bsky.network';
25
+ const plc = env.HATK_PLC || 'https://plc.directory';
26
+ configureRelay(relay);
27
+ const admins = env.HATK_ADMINS ? env.HATK_ADMINS.split(',').map((s) => s.trim()) : [];
28
+ // Load lexicons — injected at build time via virtual module
29
+ let lexicons;
30
+ try {
31
+ // @ts-expect-error — virtual module generated at build time
32
+ const lexiconModule = await import('virtual:hatk-lexicons');
33
+ lexicons = new Map(Object.entries(lexiconModule.default));
34
+ }
35
+ catch {
36
+ lexicons = new Map();
37
+ }
38
+ const lexiconErrors = validateLexicons([...lexicons.values()]);
39
+ if (lexiconErrors) {
40
+ for (const [nsid, errors] of Object.entries(lexiconErrors)) {
41
+ for (const err of errors) {
42
+ console.error(`[hatk] Invalid lexicon ${nsid}: ${err}`);
43
+ }
44
+ }
45
+ throw new Error('Invalid lexicons — check build output');
46
+ }
47
+ storeLexicons(lexicons);
48
+ const collections = env.HATK_COLLECTIONS
49
+ ? env.HATK_COLLECTIONS.split(',').map((s) => s.trim())
50
+ : discoverCollections(lexicons);
51
+ // Build schemas and init D1
52
+ discoverViews();
53
+ const engineDialect = getDialect('d1');
54
+ const { schemas, ddlStatements } = buildSchemas(lexicons, collections, engineDialect);
55
+ const adapter = new D1Adapter();
56
+ adapter.initWithBinding(env.DB);
57
+ setSearchPort(null);
58
+ await initDatabase(adapter, ':memory:', schemas, ddlStatements);
59
+ await migrateSchema(schemas);
60
+ // Register core XRPC handlers
61
+ const oauthConfig = env.HATK_OAUTH_ISSUER
62
+ ? {
63
+ issuer: env.HATK_OAUTH_ISSUER,
64
+ scopes: env.HATK_OAUTH_SCOPES ? env.HATK_OAUTH_SCOPES.split(',').map((s) => s.trim()) : ['read', 'write'],
65
+ clients: [],
66
+ }
67
+ : null;
68
+ registerCoreHandlers(collections, oauthConfig);
69
+ if (oauthConfig) {
70
+ await initOAuth(oauthConfig, plc, relay);
71
+ }
72
+ // Expose bridge for SvelteKit SSR
73
+ ;
74
+ globalThis.__hatk_callXrpc = callXrpc;
75
+ globalThis.__hatk_parseSessionCookie = parseSessionCookie;
76
+ globalThis.__hatk_sessionCookieName = getSessionCookieName();
77
+ // Create the request handler
78
+ handler = createHandler({
79
+ collections,
80
+ publicDir: null,
81
+ oauth: oauthConfig,
82
+ admins,
83
+ onResync: undefined,
84
+ });
85
+ }
86
+ /**
87
+ * Ensure initialization has completed. Concurrent calls share the same promise.
88
+ */
89
+ export function ensureInit(env) {
90
+ if (handler)
91
+ return Promise.resolve();
92
+ if (!initPromise) {
93
+ initPromise = initialize(env).catch((err) => {
94
+ initPromise = null;
95
+ throw err;
96
+ });
97
+ }
98
+ return initPromise;
99
+ }
100
+ /** Get the hatk request handler (only valid after init). */
101
+ export function getHandler() {
102
+ return handler;
103
+ }
@@ -0,0 +1,27 @@
1
+ /**
2
+ * Cloudflare Worker entry point for hatk.
3
+ *
4
+ * Handles HTTP requests via the Workers fetch handler. The firehose indexer
5
+ * and backfill run in a companion Container — the Worker only serves the
6
+ * API and web UI.
7
+ *
8
+ * For SvelteKit apps, prefer using the handle hook from
9
+ * '@hatk/hatk/cloudflare/hooks' with adapter-cloudflare instead.
10
+ */
11
+ import { type CloudflareEnv } from './init.ts';
12
+ interface ContainerBinding {
13
+ resync(did: string): Promise<void>;
14
+ fetch(request: Request): Promise<Response>;
15
+ }
16
+ interface Env extends CloudflareEnv {
17
+ CONTAINER: ContainerBinding;
18
+ }
19
+ interface ExecutionContext {
20
+ waitUntil(promise: Promise<unknown>): void;
21
+ passThroughOnException(): void;
22
+ }
23
+ declare const _default: {
24
+ fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response>;
25
+ };
26
+ export default _default;
27
+ //# sourceMappingURL=worker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../../src/cloudflare/worker.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,EAA0B,KAAK,aAAa,EAAE,MAAM,WAAW,CAAA;AAGtE,UAAU,gBAAgB;IACxB,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAClC,KAAK,CAAC,OAAO,EAAE,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAA;CAC3C;AAED,UAAU,GAAI,SAAQ,aAAa;IACjC,SAAS,EAAE,gBAAgB,CAAA;CAC5B;AAED,UAAU,gBAAgB;IACxB,SAAS,CAAC,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,GAAG,IAAI,CAAA;IAC1C,sBAAsB,IAAI,IAAI,CAAA;CAC/B;;mBAGsB,OAAO,OAAO,GAAG,OAAO,gBAAgB,GAAG,OAAO,CAAC,QAAQ,CAAC;;AADnF,wBA2CC"}
@@ -0,0 +1,54 @@
1
+ /**
2
+ * Cloudflare Worker entry point for hatk.
3
+ *
4
+ * Handles HTTP requests via the Workers fetch handler. The firehose indexer
5
+ * and backfill run in a companion Container — the Worker only serves the
6
+ * API and web UI.
7
+ *
8
+ * For SvelteKit apps, prefer using the handle hook from
9
+ * '@hatk/hatk/cloudflare/hooks' with adapter-cloudflare instead.
10
+ */
11
+ import { ensureInit, getHandler } from "./init.js";
12
+ import { isHatkRoute } from "../adapter.js";
13
+ export default {
14
+ async fetch(request, env, ctx) {
15
+ try {
16
+ await ensureInit(env);
17
+ }
18
+ catch (err) {
19
+ return new Response(JSON.stringify({ error: 'Initialization failed', detail: err.message }), {
20
+ status: 500,
21
+ headers: { 'Content-Type': 'application/json' },
22
+ });
23
+ }
24
+ const url = new URL(request.url);
25
+ const handler = getHandler();
26
+ // Intercept admin resync to delegate to the Container via RPC
27
+ if (url.pathname === '/admin/repos/resync' && request.method === 'POST') {
28
+ try {
29
+ const body = await request.text();
30
+ const { dids } = body ? JSON.parse(body) : {};
31
+ if (Array.isArray(dids)) {
32
+ for (const did of dids) {
33
+ ctx.waitUntil(env.CONTAINER.resync(did));
34
+ }
35
+ return new Response(JSON.stringify({ resyncing: dids.length }), {
36
+ headers: { 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': '*' },
37
+ });
38
+ }
39
+ }
40
+ catch (err) {
41
+ return new Response(JSON.stringify({ error: err.message }), {
42
+ status: 500,
43
+ headers: { 'Content-Type': 'application/json' },
44
+ });
45
+ }
46
+ }
47
+ // hatk routes → handler
48
+ if (isHatkRoute(url.pathname)) {
49
+ return handler(request);
50
+ }
51
+ // Everything else → 404 (use cloudflare/hooks for SvelteKit integration)
52
+ return new Response('Not found', { status: 404 });
53
+ },
54
+ };
@@ -0,0 +1,56 @@
1
+ import type { DatabasePort, BulkInserter, Dialect } from '../ports.ts';
2
+ /**
3
+ * D1 database adapter for Cloudflare Workers/Containers.
4
+ *
5
+ * D1 is SQLite under the hood but accessed via an HTTP-based binding API.
6
+ * Key differences from the SQLite adapter:
7
+ * - No raw transactions — uses d1.batch() for atomic multi-statement execution
8
+ * - No prepared statement reuse — each query is a fresh prepare+bind
9
+ * - Bulk inserts use batched INSERT statements instead of native appenders
10
+ */
11
+ /** Minimal D1 type definitions (matches Cloudflare's D1Database binding) */
12
+ interface D1Database {
13
+ prepare(sql: string): D1PreparedStatement;
14
+ batch<T = unknown>(statements: D1PreparedStatement[]): Promise<D1Result<T>[]>;
15
+ exec(sql: string): Promise<D1ExecResult>;
16
+ }
17
+ interface D1PreparedStatement {
18
+ bind(...values: unknown[]): D1PreparedStatement;
19
+ all<T = Record<string, unknown>>(): Promise<D1Result<T>>;
20
+ run(): Promise<D1Result>;
21
+ first<T = Record<string, unknown>>(column?: string): Promise<T | null>;
22
+ }
23
+ interface D1Result<T = unknown> {
24
+ results: T[];
25
+ success: boolean;
26
+ meta: Record<string, unknown>;
27
+ }
28
+ interface D1ExecResult {
29
+ count: number;
30
+ duration: number;
31
+ }
32
+ export declare class D1Adapter implements DatabasePort {
33
+ dialect: Dialect;
34
+ private db;
35
+ private txBuffer;
36
+ /**
37
+ * Initialize with an existing D1 binding (from env.DB in Worker/Container).
38
+ * The `path` argument is ignored — D1 bindings are configured in wrangler.jsonc.
39
+ */
40
+ open(_path: string): Promise<void>;
41
+ /** Set the D1 binding directly (called before open). */
42
+ initWithBinding(db: D1Database): void;
43
+ close(): void;
44
+ query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
45
+ execute(sql: string, params?: unknown[]): Promise<void>;
46
+ executeMultiple(sql: string): Promise<void>;
47
+ beginTransaction(): Promise<void>;
48
+ commit(): Promise<void>;
49
+ rollback(): Promise<void>;
50
+ createBulkInserter(table: string, columns: string[], options?: {
51
+ onConflict?: 'ignore' | 'replace';
52
+ batchSize?: number;
53
+ }): Promise<BulkInserter>;
54
+ }
55
+ export {};
56
+ //# sourceMappingURL=d1.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"d1.d.ts","sourceRoot":"","sources":["../../../src/database/adapters/d1.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,aAAa,CAAA;AAEtE;;;;;;;;GAQG;AAEH,4EAA4E;AAC5E,UAAU,UAAU;IAClB,OAAO,CAAC,GAAG,EAAE,MAAM,GAAG,mBAAmB,CAAA;IACzC,KAAK,CAAC,CAAC,GAAG,OAAO,EAAE,UAAU,EAAE,mBAAmB,EAAE,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,CAAA;IAC7E,IAAI,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC,CAAA;CACzC;AAED,UAAU,mBAAmB;IAC3B,IAAI,CAAC,GAAG,MAAM,EAAE,OAAO,EAAE,GAAG,mBAAmB,CAAA;IAC/C,GAAG,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;IACxD,GAAG,IAAI,OAAO,CAAC,QAAQ,CAAC,CAAA;IACxB,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC,CAAA;CACvE;AAED,UAAU,QAAQ,CAAC,CAAC,GAAG,OAAO;IAC5B,OAAO,EAAE,CAAC,EAAE,CAAA;IACZ,OAAO,EAAE,OAAO,CAAA;IAChB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;CAC9B;AAED,UAAU,YAAY;IACpB,KAAK,EAAE,MAAM,CAAA;IACb,QAAQ,EAAE,MAAM,CAAA;CACjB;AAmBD,qBAAa,SAAU,YAAW,YAAY;IAC5C,OAAO,EAAE,OAAO,CAAO;IAEvB,OAAO,CAAC,EAAE,CAAa;IACvB,OAAO,CAAC,QAAQ,CAAqC;IAErD;;;OAGG;IACG,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQxC,wDAAwD;IACxD,eAAe,CAAC,EAAE,EAAE,UAAU,GAAG,IAAI;IAIrC,KAAK,IAAI,IAAI;IAIP,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC;IAOrF,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,GAAE,OAAO,EAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAa3D,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAa3C,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAIjC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;IASvB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAIzB,kBAAkB,CACtB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,CAAC,EAAE;QAAE,UAAU,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAClE,OAAO,CAAC,YAAY,CAAC;CA+BzB"}
@@ -0,0 +1,108 @@
1
+ /**
2
+ * Translate DuckDB-style $1, $2 placeholders to ? placeholders.
3
+ * Same logic as the SQLite adapter — D1 uses ? style.
4
+ */
5
+ function translateParams(sql, params) {
6
+ if (params.length === 0)
7
+ return { sql, params };
8
+ const expandedParams = [];
9
+ const translated = sql.replace(/\$(\d+)/g, (_match, numStr) => {
10
+ const idx = parseInt(numStr) - 1;
11
+ expandedParams.push(params[idx]);
12
+ return '?';
13
+ });
14
+ return { sql: translated, params: expandedParams };
15
+ }
16
+ export class D1Adapter {
17
+ dialect = 'd1';
18
+ db;
19
+ txBuffer = null;
20
+ /**
21
+ * Initialize with an existing D1 binding (from env.DB in Worker/Container).
22
+ * The `path` argument is ignored — D1 bindings are configured in wrangler.jsonc.
23
+ */
24
+ async open(_path) {
25
+ // D1 binding is injected via initWithBinding(), not opened by path.
26
+ // This is a no-op if already initialized.
27
+ if (!this.db) {
28
+ throw new Error('D1Adapter requires initWithBinding(db) before use');
29
+ }
30
+ }
31
+ /** Set the D1 binding directly (called before open). */
32
+ initWithBinding(db) {
33
+ this.db = db;
34
+ }
35
+ close() {
36
+ // D1 bindings don't need explicit cleanup
37
+ }
38
+ async query(sql, params = []) {
39
+ const t = translateParams(sql, params);
40
+ const stmt = t.params.length > 0 ? this.db.prepare(t.sql).bind(...t.params) : this.db.prepare(t.sql);
41
+ const result = await stmt.all();
42
+ return result.results;
43
+ }
44
+ async execute(sql, params = []) {
45
+ const t = translateParams(sql, params);
46
+ const stmt = t.params.length > 0 ? this.db.prepare(t.sql).bind(...t.params) : this.db.prepare(t.sql);
47
+ // If inside a transaction, buffer instead of executing
48
+ if (this.txBuffer !== null) {
49
+ this.txBuffer.push(stmt);
50
+ return;
51
+ }
52
+ await stmt.run();
53
+ }
54
+ async executeMultiple(sql) {
55
+ // D1's exec() can be unreliable with multi-statement SQL.
56
+ // Split on semicolons and run each statement via prepare().run().
57
+ const statements = sql
58
+ .split(';')
59
+ .map((s) => s.trim())
60
+ .filter((s) => s.length > 0);
61
+ for (const stmt of statements) {
62
+ await this.db.prepare(stmt).run();
63
+ }
64
+ }
65
+ async beginTransaction() {
66
+ this.txBuffer = [];
67
+ }
68
+ async commit() {
69
+ if (this.txBuffer === null)
70
+ return;
71
+ const statements = this.txBuffer;
72
+ this.txBuffer = null;
73
+ if (statements.length > 0) {
74
+ await this.db.batch(statements);
75
+ }
76
+ }
77
+ async rollback() {
78
+ this.txBuffer = null;
79
+ }
80
+ async createBulkInserter(table, columns, options) {
81
+ const placeholders = columns.map(() => '?').join(', ');
82
+ const conflict = options?.onConflict === 'ignore' ? ' OR IGNORE' : options?.onConflict === 'replace' ? ' OR REPLACE' : '';
83
+ const sqlTemplate = `INSERT${conflict} INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
84
+ const buffer = [];
85
+ const batchSize = options?.batchSize ?? 200; // smaller batches for D1 CPU limits
86
+ const db = this.db;
87
+ const flush = async () => {
88
+ if (buffer.length > 0) {
89
+ await db.batch(buffer);
90
+ buffer.length = 0;
91
+ }
92
+ };
93
+ return {
94
+ append(values) {
95
+ buffer.push(db.prepare(sqlTemplate).bind(...values));
96
+ if (buffer.length >= batchSize) {
97
+ flush();
98
+ }
99
+ },
100
+ async flush() {
101
+ await flush();
102
+ },
103
+ async close() {
104
+ await flush();
105
+ },
106
+ };
107
+ }
108
+ }
@@ -479,7 +479,7 @@ export function buildInsertOp(collection, uri, cid, authorDid, record) {
479
479
  if (rawValue === undefined || rawValue === null) {
480
480
  values.push(null);
481
481
  }
482
- else if (col.sqlType === 'JSON') {
482
+ else if (col.isJson) {
483
483
  values.push(JSON.stringify(rawValue));
484
484
  }
485
485
  else {
@@ -514,7 +514,7 @@ export async function insertRecord(collection, uri, cid, authorDid, record) {
514
514
  if (raw === undefined || raw === null) {
515
515
  values.push(null);
516
516
  }
517
- else if (col.sqlType === 'JSON') {
517
+ else if (col.isJson) {
518
518
  values.push(JSON.stringify(raw));
519
519
  }
520
520
  else {
@@ -553,7 +553,7 @@ export async function insertRecord(collection, uri, cid, authorDid, record) {
553
553
  if (raw === undefined || raw === null) {
554
554
  values.push(null);
555
555
  }
556
- else if (col.sqlType === 'JSON') {
556
+ else if (col.isJson) {
557
557
  values.push(JSON.stringify(raw));
558
558
  }
559
559
  else {
@@ -577,7 +577,7 @@ export async function insertRecord(collection, uri, cid, authorDid, record) {
577
577
  if (raw === undefined || raw === null) {
578
578
  values.push(null);
579
579
  }
580
- else if (col.sqlType === 'JSON') {
580
+ else if (col.isJson) {
581
581
  values.push(JSON.stringify(raw));
582
582
  }
583
583
  else {
@@ -1267,7 +1267,7 @@ export function reshapeRow(row, childData, unionData) {
1267
1267
  if (schema) {
1268
1268
  for (const col of schema.columns) {
1269
1269
  nameMap.set(col.name, col.originalName);
1270
- if (col.sqlType === 'JSON')
1270
+ if (col.isJson)
1271
1271
  jsonCols.add(col.name);
1272
1272
  }
1273
1273
  }
@@ -106,7 +106,7 @@ function computeFtsSchema(collection) {
106
106
  selectExprs.push(`t.${col.name}`);
107
107
  searchColNames.push(col.name);
108
108
  }
109
- else if ((col.sqlType === 'JSON' || col.sqlType === 'TEXT') && record?.properties) {
109
+ else if (col.isJson && record?.properties) {
110
110
  const prop = record.properties[col.originalName];
111
111
  if (prop?.type === 'blob')
112
112
  continue; // skip blobs
@@ -5,6 +5,7 @@ export interface ColumnDef {
5
5
  sqlType: string;
6
6
  notNull: boolean;
7
7
  isRef: boolean;
8
+ isJson: boolean;
8
9
  }
9
10
  export interface UnionBranchSchema {
10
11
  type: string;
@@ -1 +1 @@
1
- {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../src/database/schema.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAG9C,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAA;IACZ,YAAY,EAAE,MAAM,CAAA;IACpB,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,OAAO,CAAA;IAChB,KAAK,EAAE,OAAO,CAAA;CACf;AAED,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,OAAO,EAAE,OAAO,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,gBAAgB;IAC/B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,EAAE,iBAAiB,EAAE,CAAA;CAC9B;AAED,MAAM,WAAW,WAAW;IAC1B,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,UAAU,EAAE,MAAM,EAAE,CAAA;IACpB,QAAQ,EAAE,gBAAgB,EAAE,CAAA;IAC5B,MAAM,EAAE,gBAAgB,EAAE,CAAA;CAC3B;AAED,MAAM,WAAW,gBAAgB;IAC/B,gBAAgB,EAAE,MAAM,CAAA;IACxB,SAAS,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;CACrB;AAGD,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AA8CD,wBAAgB,YAAY,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CASlE;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,MAAM,EAAE,CASxE;AAID,wBAAgB,aAAa,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,CAI9D;AAED,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,GAAG,GAAG,SAAS,CAExD;AAED,wBAAgB,cAAc,IAAI,KAAK,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,GAAG,CAAA;CAAE,CAAC,CAEtE;AAED,iFAAiF;AACjF,wBAAgB,eAAe,IAAI,GAAG,EAAE,CAEvC;AAwHD,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,GAAG,EACZ,QAAQ,CAAC,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,EAC3B,OAAO,GAAE,UAA2B,GACnC,WAAW,CA0Gb;AAGD,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,WAAW,EAAE,OAAO,GAAE,UAA2B,GAAG,MAAM,CAoExG;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,EAC1B,WAAW,EAAE,MAAM,EAAE,EACrB,OAAO,GAAE,UAA2B,GACnC;IAAE,OAAO,EAAE,WAAW,EAAE,CAAC;IAAC,aAAa,EAAE,MAAM,EAAE,CAAA;CAAE,CA2BrD"}
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../src/database/schema.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAG9C,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAA;IACZ,YAAY,EAAE,MAAM,CAAA;IACpB,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,OAAO,CAAA;IAChB,KAAK,EAAE,OAAO,CAAA;IACd,MAAM,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,OAAO,EAAE,OAAO,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,gBAAgB;IAC/B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,EAAE,iBAAiB,EAAE,CAAA;CAC9B;AAED,MAAM,WAAW,WAAW;IAC1B,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,UAAU,EAAE,MAAM,EAAE,CAAA;IACpB,QAAQ,EAAE,gBAAgB,EAAE,CAAA;IAC5B,MAAM,EAAE,gBAAgB,EAAE,CAAA;CAC3B;AAED,MAAM,WAAW,gBAAgB;IAC/B,gBAAgB,EAAE,MAAM,CAAA;IACxB,SAAS,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;CACrB;AAGD,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AA+CD,wBAAgB,YAAY,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CASlE;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,MAAM,EAAE,CASxE;AAID,wBAAgB,aAAa,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,CAI9D;AAED,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,GAAG,GAAG,SAAS,CAExD;AAED,wBAAgB,cAAc,IAAI,KAAK,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,GAAG,CAAA;CAAE,CAAC,CAEtE;AAED,iFAAiF;AACjF,wBAAgB,eAAe,IAAI,GAAG,EAAE,CAEvC;AAyHD,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,GAAG,EACZ,QAAQ,CAAC,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,EAC3B,OAAO,GAAE,UAA2B,GACnC,WAAW,CA+Gb;AAGD,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,WAAW,EAAE,OAAO,GAAE,UAA2B,GAAG,MAAM,CAoExG;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,EAC1B,WAAW,EAAE,MAAM,EAAE,EACrB,OAAO,GAAE,UAA2B,GACnC;IAAE,OAAO,EAAE,WAAW,EAAE,CAAC;IAAC,aAAa,EAAE,MAAM,EAAE,CAAA;CAAE,CA2BrD"}
@@ -8,36 +8,36 @@ export function toSnakeCase(str) {
8
8
  function mapType(prop, dialect) {
9
9
  if (prop.type === 'string') {
10
10
  if (prop.format === 'datetime')
11
- return { sqlType: dialect.typeMap.timestamp, isRef: false };
11
+ return { sqlType: dialect.typeMap.timestamp, isRef: false, isJson: false };
12
12
  if (prop.format === 'at-uri')
13
- return { sqlType: dialect.typeMap.text, isRef: true };
14
- return { sqlType: dialect.typeMap.text, isRef: false };
13
+ return { sqlType: dialect.typeMap.text, isRef: true, isJson: false };
14
+ return { sqlType: dialect.typeMap.text, isRef: false, isJson: false };
15
15
  }
16
16
  if (prop.type === 'integer')
17
- return { sqlType: dialect.typeMap.integer, isRef: false };
17
+ return { sqlType: dialect.typeMap.integer, isRef: false, isJson: false };
18
18
  if (prop.type === 'boolean')
19
- return { sqlType: dialect.typeMap.boolean, isRef: false };
19
+ return { sqlType: dialect.typeMap.boolean, isRef: false, isJson: false };
20
20
  if (prop.type === 'bytes')
21
- return { sqlType: dialect.typeMap.blob, isRef: false };
21
+ return { sqlType: dialect.typeMap.blob, isRef: false, isJson: false };
22
22
  if (prop.type === 'cid-link')
23
- return { sqlType: dialect.typeMap.text, isRef: false };
23
+ return { sqlType: dialect.typeMap.text, isRef: false, isJson: false };
24
24
  if (prop.type === 'array')
25
- return { sqlType: dialect.jsonType, isRef: false };
25
+ return { sqlType: dialect.jsonType, isRef: false, isJson: true };
26
26
  if (prop.type === 'blob')
27
- return { sqlType: dialect.jsonType, isRef: false };
27
+ return { sqlType: dialect.jsonType, isRef: false, isJson: true };
28
28
  if (prop.type === 'union')
29
- return { sqlType: dialect.jsonType, isRef: false };
29
+ return { sqlType: dialect.jsonType, isRef: false, isJson: true };
30
30
  if (prop.type === 'unknown')
31
- return { sqlType: dialect.jsonType, isRef: false };
31
+ return { sqlType: dialect.jsonType, isRef: false, isJson: true };
32
32
  if (prop.type === 'object')
33
- return { sqlType: dialect.jsonType, isRef: false };
33
+ return { sqlType: dialect.jsonType, isRef: false, isJson: true };
34
34
  if (prop.type === 'ref') {
35
35
  // strongRef contains { uri, cid } — handled specially in generateTableSchema
36
36
  if (prop.ref === 'com.atproto.repo.strongRef')
37
- return { sqlType: 'STRONG_REF', isRef: true };
38
- return { sqlType: dialect.jsonType, isRef: false };
37
+ return { sqlType: 'STRONG_REF', isRef: true, isJson: false };
38
+ return { sqlType: dialect.jsonType, isRef: false, isJson: true };
39
39
  }
40
- return { sqlType: dialect.typeMap.text, isRef: false };
40
+ return { sqlType: dialect.typeMap.text, isRef: false, isJson: false };
41
41
  }
42
42
  // Recursively find all .json files in a directory
43
43
  function findJsonFiles(dir) {
@@ -182,7 +182,7 @@ function resolveUnionBranch(ref, collection, fieldName, defs, lexicons, dialect)
182
182
  const tableName = `"${collection}__${snakeField}_${branchName}"`;
183
183
  const columns = [];
184
184
  for (const [propName, prop] of Object.entries(propSource)) {
185
- const { sqlType, isRef } = mapType(prop, dialect);
185
+ const { sqlType, isRef, isJson } = mapType(prop, dialect);
186
186
  // Skip STRONG_REF expansion in branch tables — treat as JSON
187
187
  const finalType = sqlType === 'STRONG_REF' ? dialect.jsonType : sqlType;
188
188
  columns.push({
@@ -190,7 +190,8 @@ function resolveUnionBranch(ref, collection, fieldName, defs, lexicons, dialect)
190
190
  originalName: propName,
191
191
  sqlType: finalType,
192
192
  notNull: branchRequired.has(propName),
193
- isRef: finalType !== 'JSON' && isRef,
193
+ isRef: finalType !== dialect.jsonType && isRef,
194
+ isJson: isJson || sqlType === 'STRONG_REF',
194
195
  });
195
196
  }
196
197
  return { type: fullType, branchName, tableName, columns, isArray, arrayField, wrapperField };
@@ -229,6 +230,7 @@ export function generateTableSchema(nsid, lexicon, lexicons, dialect = DUCKDB_DI
229
230
  sqlType: dialect.jsonType,
230
231
  notNull: required.has(fieldName),
231
232
  isRef: false,
233
+ isJson: true,
232
234
  });
233
235
  continue;
234
236
  }
@@ -239,13 +241,14 @@ export function generateTableSchema(nsid, lexicon, lexicons, dialect = DUCKDB_DI
239
241
  const childColumns = [];
240
242
  const itemRequired = new Set(p.items?.required || lexicon.defs?.[p.items?.ref?.slice(1)]?.required || []);
241
243
  for (const [itemField, itemProp] of Object.entries(itemProps)) {
242
- const { sqlType, isRef } = mapType(itemProp, dialect);
244
+ const { sqlType, isRef, isJson } = mapType(itemProp, dialect);
243
245
  childColumns.push({
244
246
  name: toSnakeCase(itemField),
245
247
  originalName: itemField,
246
248
  sqlType,
247
249
  notNull: itemRequired.has(itemField),
248
250
  isRef,
251
+ isJson,
249
252
  });
250
253
  }
251
254
  const snakeField = toSnakeCase(fieldName);
@@ -258,7 +261,7 @@ export function generateTableSchema(nsid, lexicon, lexicons, dialect = DUCKDB_DI
258
261
  continue;
259
262
  }
260
263
  }
261
- const { sqlType, isRef } = mapType(p, dialect);
264
+ const { sqlType, isRef, isJson } = mapType(p, dialect);
262
265
  if (sqlType === 'STRONG_REF') {
263
266
  // Expand strongRef into two columns: {name}_uri and {name}_cid
264
267
  columns.push({
@@ -267,6 +270,7 @@ export function generateTableSchema(nsid, lexicon, lexicons, dialect = DUCKDB_DI
267
270
  sqlType: dialect.typeMap.text,
268
271
  notNull: required.has(fieldName),
269
272
  isRef: true,
273
+ isJson: false,
270
274
  });
271
275
  columns.push({
272
276
  name: toSnakeCase(fieldName) + '_cid',
@@ -274,6 +278,7 @@ export function generateTableSchema(nsid, lexicon, lexicons, dialect = DUCKDB_DI
274
278
  sqlType: dialect.typeMap.text,
275
279
  notNull: required.has(fieldName),
276
280
  isRef: false,
281
+ isJson: false,
277
282
  });
278
283
  }
279
284
  else {
@@ -283,6 +288,7 @@ export function generateTableSchema(nsid, lexicon, lexicons, dialect = DUCKDB_DI
283
288
  sqlType,
284
289
  notNull: required.has(fieldName),
285
290
  isRef,
291
+ isJson,
286
292
  });
287
293
  }
288
294
  }
@@ -331,7 +337,7 @@ export function generateCreateTableSQL(schema, dialect = DUCKDB_DIALECT) {
331
337
  childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${childPrefix}_parent ON ${child.tableName}(parent_uri);`);
332
338
  childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${childPrefix}_did ON ${child.tableName}(parent_did);`);
333
339
  for (const col of child.columns) {
334
- if (col.sqlType === 'JSON' || col.sqlType === 'BLOB')
340
+ if (col.isJson || col.sqlType === 'BLOB')
335
341
  continue;
336
342
  childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${childPrefix}_${col.name} ON ${child.tableName}(${col.name});`);
337
343
  }
@@ -349,7 +355,7 @@ export function generateCreateTableSQL(schema, dialect = DUCKDB_DIALECT) {
349
355
  childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${branchPrefix}_parent ON ${branch.tableName}(parent_uri);`);
350
356
  childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${branchPrefix}_did ON ${branch.tableName}(parent_did);`);
351
357
  for (const col of branch.columns) {
352
- if (col.sqlType === 'JSON' || col.sqlType === 'BLOB')
358
+ if (col.isJson || col.sqlType === 'BLOB')
353
359
  continue;
354
360
  childDDL.push(`CREATE INDEX IF NOT EXISTS idx_${branchPrefix}_${col.name} ON ${branch.tableName}(${col.name});`);
355
361
  }
package/dist/labels.d.ts CHANGED
@@ -17,7 +17,7 @@ export interface LabelModule {
17
17
  definition?: LabelDefinition;
18
18
  evaluate?: (ctx: LabelRuleContext) => Promise<string[]>;
19
19
  }
20
- export declare function defineLabels(module: LabelModule): {
20
+ export declare function defineLabel(module: LabelModule): {
21
21
  definition?: LabelDefinition;
22
22
  evaluate?: (ctx: LabelRuleContext) => Promise<string[]>;
23
23
  __type: "labels";
@@ -1 +1 @@
1
- {"version":3,"file":"labels.d.ts","sourceRoot":"","sources":["../src/labels.ts"],"names":[],"mappings":"AA8BA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAIlD,wDAAwD;AACxD,MAAM,WAAW,gBAAgB;IAC/B,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACtD,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KACtD,CAAA;IACD,MAAM,EAAE;QACN,GAAG,EAAE,MAAM,CAAA;QACX,GAAG,EAAE,MAAM,CAAA;QACX,GAAG,EAAE,MAAM,CAAA;QACX,UAAU,EAAE,MAAM,CAAA;QAClB,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;KAC3B,CAAA;CACF;AAED,MAAM,WAAW,WAAW;IAC1B,UAAU,CAAC,EAAE,eAAe,CAAA;IAC5B,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;CACxD;AAED,wBAAgB,YAAY,CAAC,MAAM,EAAE,WAAW;iBAJjC,eAAe;eACjB,CAAC,GAAG,EAAE,gBAAgB,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC;;EAKxD;AAYD;;;;;;;;GAQG;AACH,wBAAsB,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAmCjE;AAED,oEAAoE;AACpE,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE;IAAE,UAAU,CAAC,EAAE,eAAe,CAAC;IAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;CAAE,GAClG,IAAI,CAON;AAED;;;GAGG;AACH,wBAAsB,aAAa,CAAC,MAAM,EAAE;IAC1C,GAAG,EAAE,MAAM,CAAA;IACX,GAAG,EAAE,MAAM,CAAA;IACX,GAAG,EAAE,MAAM,CAAA;IACX,UAAU,EAAE,MAAM,CAAA;IAClB,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;CAC3B,GAAG,OAAO,CAAC,IAAI,CAAC,CAyBhB;AAED;;;;;GAKG;AACH,wBAAsB,YAAY,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC;IAAE,OAAO,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,MAAM,CAAA;CAAE,CAAC,CAuCvG;AAED,yEAAyE;AACzE,wBAAgB,mBAAmB,IAAI,eAAe,EAAE,CAEvD"}
1
+ {"version":3,"file":"labels.d.ts","sourceRoot":"","sources":["../src/labels.ts"],"names":[],"mappings":"AA8BA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAIlD,wDAAwD;AACxD,MAAM,WAAW,gBAAgB;IAC/B,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACtD,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KACtD,CAAA;IACD,MAAM,EAAE;QACN,GAAG,EAAE,MAAM,CAAA;QACX,GAAG,EAAE,MAAM,CAAA;QACX,GAAG,EAAE,MAAM,CAAA;QACX,UAAU,EAAE,MAAM,CAAA;QAClB,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;KAC3B,CAAA;CACF;AAED,MAAM,WAAW,WAAW;IAC1B,UAAU,CAAC,EAAE,eAAe,CAAA;IAC5B,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;CACxD;AAED,wBAAgB,WAAW,CAAC,MAAM,EAAE,WAAW;iBAJhC,eAAe;eACjB,CAAC,GAAG,EAAE,gBAAgB,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC;;EAKxD;AAYD;;;;;;;;GAQG;AACH,wBAAsB,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAmCjE;AAED,oEAAoE;AACpE,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE;IAAE,UAAU,CAAC,EAAE,eAAe,CAAC;IAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;CAAE,GAClG,IAAI,CAON;AAED;;;GAGG;AACH,wBAAsB,aAAa,CAAC,MAAM,EAAE;IAC1C,GAAG,EAAE,MAAM,CAAA;IACX,GAAG,EAAE,MAAM,CAAA;IACX,GAAG,EAAE,MAAM,CAAA;IACX,UAAU,EAAE,MAAM,CAAA;IAClB,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;CAC3B,GAAG,OAAO,CAAC,IAAI,CAAC,CAyBhB;AAED;;;;;GAKG;AACH,wBAAsB,YAAY,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC;IAAE,OAAO,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,MAAM,CAAA;CAAE,CAAC,CAuCvG;AAED,yEAAyE;AACzE,wBAAgB,mBAAmB,IAAI,eAAe,EAAE,CAEvD"}
package/dist/labels.js CHANGED
@@ -38,7 +38,7 @@ import { resolve } from 'node:path';
38
38
  import { readdirSync } from 'node:fs';
39
39
  import { querySQL, runSQL, insertLabels, getSchema } from "./database/db.js";
40
40
  import { log, emit } from "./logger.js";
41
- export function defineLabels(module) {
41
+ export function defineLabel(module) {
42
42
  return { __type: 'labels', ...module };
43
43
  }
44
44
  const rules = [];
@@ -144,7 +144,7 @@ export async function rescanLabels(collections) {
144
144
  let v = row[col.name];
145
145
  if (v === null || v === undefined)
146
146
  continue;
147
- if (col.sqlType === 'JSON' && typeof v === 'string') {
147
+ if (col.isJson && typeof v === 'string') {
148
148
  try {
149
149
  v = JSON.parse(v);
150
150
  }
@@ -1 +1 @@
1
- {"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../src/server.ts"],"names":[],"mappings":"AAqDA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AA0B9C;;;GAGG;AACH,wBAAgB,oBAAoB,CAAC,WAAW,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,WAAW,GAAG,IAAI,GAAG,IAAI,CAwH3F;AAED,MAAM,WAAW,aAAa;IAC5B,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC,CAAA;IACxF,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAA;IAC5D,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,MAAM,EAAE,aAAa,GAAG,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,QAAQ,CAAC,CAyxB5F;AAGD,wBAAgB,WAAW,CACzB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,EAAE,EACrB,SAAS,EAAE,MAAM,GAAG,IAAI,EACxB,KAAK,EAAE,WAAW,GAAG,IAAI,EACzB,MAAM,GAAE,MAAM,EAAO,EACrB,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK;IAAE,GAAG,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,EAC5D,QAAQ,CAAC,EAAE,MAAM,IAAI,GACpB,OAAO,WAAW,EAAE,MAAM,CAG5B"}
1
+ {"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../src/server.ts"],"names":[],"mappings":"AAqDA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AA0B9C;;;GAGG;AACH,wBAAgB,oBAAoB,CAAC,WAAW,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,WAAW,GAAG,IAAI,GAAG,IAAI,CAwH3F;AAED,MAAM,WAAW,aAAa;IAC5B,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC,CAAA;IACxF,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAA;IAC5D,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,MAAM,EAAE,aAAa,GAAG,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,QAAQ,CAAC,CA8xB5F;AAGD,wBAAgB,WAAW,CACzB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,EAAE,EACrB,SAAS,EAAE,MAAM,GAAG,IAAI,EACxB,KAAK,EAAE,WAAW,GAAG,IAAI,EACzB,MAAM,GAAE,MAAM,EAAO,EACrB,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK;IAAE,GAAG,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,EAC5D,QAAQ,CAAC,EAAE,MAAM,IAAI,GACpB,OAAO,WAAW,EAAE,MAAM,CAG5B"}
package/dist/server.js CHANGED
@@ -514,10 +514,16 @@ export function createHandler(config) {
514
514
  else {
515
515
  repoList = await listActiveRepoDids();
516
516
  }
517
+ const isTargeted = Array.isArray(dids) && dids.length > 0;
517
518
  for (const did of repoList) {
518
519
  await setRepoStatus(did, 'pending');
519
520
  }
520
- if (config.onResync) {
521
+ if (isTargeted) {
522
+ for (const did of repoList) {
523
+ triggerAutoBackfill(did);
524
+ }
525
+ }
526
+ else if (config.onResync) {
521
527
  config.onResync();
522
528
  }
523
529
  else {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hatk/hatk",
3
- "version": "0.0.1-alpha.36",
3
+ "version": "0.0.1-alpha.37",
4
4
  "license": "MIT",
5
5
  "bin": {
6
6
  "hatk": "dist/cli.js"