@hatk/hatk 0.0.1-alpha.6 → 0.0.1-alpha.61

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (163) hide show
  1. package/dist/adapter.d.ts +19 -0
  2. package/dist/adapter.d.ts.map +1 -0
  3. package/dist/adapter.js +108 -0
  4. package/dist/backfill.d.ts +2 -2
  5. package/dist/backfill.d.ts.map +1 -1
  6. package/dist/backfill.js +83 -41
  7. package/dist/car.d.ts +42 -10
  8. package/dist/car.d.ts.map +1 -1
  9. package/dist/car.js +154 -14
  10. package/dist/cli.js +243 -1043
  11. package/dist/config.d.ts +31 -1
  12. package/dist/config.d.ts.map +1 -1
  13. package/dist/config.js +40 -9
  14. package/dist/database/adapter-factory.d.ts +6 -0
  15. package/dist/database/adapter-factory.d.ts.map +1 -0
  16. package/dist/database/adapter-factory.js +20 -0
  17. package/dist/database/adapters/duckdb-search.d.ts +12 -0
  18. package/dist/database/adapters/duckdb-search.d.ts.map +1 -0
  19. package/dist/database/adapters/duckdb-search.js +27 -0
  20. package/dist/database/adapters/duckdb.d.ts +25 -0
  21. package/dist/database/adapters/duckdb.d.ts.map +1 -0
  22. package/dist/database/adapters/duckdb.js +161 -0
  23. package/dist/database/adapters/sqlite-search.d.ts +23 -0
  24. package/dist/database/adapters/sqlite-search.d.ts.map +1 -0
  25. package/dist/database/adapters/sqlite-search.js +74 -0
  26. package/dist/database/adapters/sqlite.d.ts +18 -0
  27. package/dist/database/adapters/sqlite.d.ts.map +1 -0
  28. package/dist/database/adapters/sqlite.js +88 -0
  29. package/dist/{db.d.ts → database/db.d.ts} +57 -6
  30. package/dist/database/db.d.ts.map +1 -0
  31. package/dist/{db.js → database/db.js} +730 -549
  32. package/dist/database/dialect.d.ts +45 -0
  33. package/dist/database/dialect.d.ts.map +1 -0
  34. package/dist/database/dialect.js +72 -0
  35. package/dist/{fts.d.ts → database/fts.d.ts} +7 -0
  36. package/dist/database/fts.d.ts.map +1 -0
  37. package/dist/{fts.js → database/fts.js} +116 -32
  38. package/dist/database/index.d.ts +7 -0
  39. package/dist/database/index.d.ts.map +1 -0
  40. package/dist/database/index.js +6 -0
  41. package/dist/database/ports.d.ts +50 -0
  42. package/dist/database/ports.d.ts.map +1 -0
  43. package/dist/database/ports.js +1 -0
  44. package/dist/{schema.d.ts → database/schema.d.ts} +14 -3
  45. package/dist/database/schema.d.ts.map +1 -0
  46. package/dist/{schema.js → database/schema.js} +81 -41
  47. package/dist/dev-entry.d.ts +8 -0
  48. package/dist/dev-entry.d.ts.map +1 -0
  49. package/dist/dev-entry.js +113 -0
  50. package/dist/feeds.d.ts +12 -8
  51. package/dist/feeds.d.ts.map +1 -1
  52. package/dist/feeds.js +51 -6
  53. package/dist/hooks.d.ts +85 -0
  54. package/dist/hooks.d.ts.map +1 -0
  55. package/dist/hooks.js +161 -0
  56. package/dist/hydrate.d.ts +7 -6
  57. package/dist/hydrate.d.ts.map +1 -1
  58. package/dist/hydrate.js +4 -16
  59. package/dist/indexer.d.ts +23 -0
  60. package/dist/indexer.d.ts.map +1 -1
  61. package/dist/indexer.js +181 -34
  62. package/dist/labels.d.ts +36 -0
  63. package/dist/labels.d.ts.map +1 -1
  64. package/dist/labels.js +71 -6
  65. package/dist/lexicon-resolve.d.ts.map +1 -1
  66. package/dist/lexicon-resolve.js +27 -112
  67. package/dist/lexicons/com/atproto/label/defs.json +75 -0
  68. package/dist/lexicons/com/atproto/moderation/defs.json +30 -0
  69. package/dist/lexicons/com/atproto/repo/strongRef.json +24 -0
  70. package/dist/lexicons/dev/hatk/applyWrites.json +87 -0
  71. package/dist/lexicons/dev/hatk/createRecord.json +40 -0
  72. package/dist/lexicons/dev/hatk/createReport.json +48 -0
  73. package/dist/lexicons/dev/hatk/deleteRecord.json +25 -0
  74. package/dist/lexicons/dev/hatk/describeCollections.json +41 -0
  75. package/dist/lexicons/dev/hatk/describeFeeds.json +29 -0
  76. package/dist/lexicons/dev/hatk/describeLabels.json +45 -0
  77. package/dist/lexicons/dev/hatk/getFeed.json +30 -0
  78. package/dist/lexicons/dev/hatk/getPreferences.json +19 -0
  79. package/dist/lexicons/dev/hatk/getRecord.json +26 -0
  80. package/dist/lexicons/dev/hatk/getRecords.json +32 -0
  81. package/dist/lexicons/dev/hatk/putPreference.json +28 -0
  82. package/dist/lexicons/dev/hatk/putRecord.json +41 -0
  83. package/dist/lexicons/dev/hatk/searchRecords.json +32 -0
  84. package/dist/lexicons/dev/hatk/uploadBlob.json +23 -0
  85. package/dist/logger.d.ts +29 -0
  86. package/dist/logger.d.ts.map +1 -1
  87. package/dist/logger.js +29 -0
  88. package/dist/main.js +138 -67
  89. package/dist/mst.d.ts +18 -1
  90. package/dist/mst.d.ts.map +1 -1
  91. package/dist/mst.js +19 -8
  92. package/dist/oauth/db.d.ts +3 -1
  93. package/dist/oauth/db.d.ts.map +1 -1
  94. package/dist/oauth/db.js +48 -19
  95. package/dist/oauth/server.d.ts +24 -0
  96. package/dist/oauth/server.d.ts.map +1 -1
  97. package/dist/oauth/server.js +198 -22
  98. package/dist/oauth/session.d.ts +11 -0
  99. package/dist/oauth/session.d.ts.map +1 -0
  100. package/dist/oauth/session.js +65 -0
  101. package/dist/opengraph.d.ts +10 -0
  102. package/dist/opengraph.d.ts.map +1 -1
  103. package/dist/opengraph.js +80 -40
  104. package/dist/pds-proxy.d.ts +60 -0
  105. package/dist/pds-proxy.d.ts.map +1 -0
  106. package/dist/pds-proxy.js +277 -0
  107. package/dist/push.d.ts +34 -0
  108. package/dist/push.d.ts.map +1 -0
  109. package/dist/push.js +184 -0
  110. package/dist/renderer.d.ts +27 -0
  111. package/dist/renderer.d.ts.map +1 -0
  112. package/dist/renderer.js +46 -0
  113. package/dist/resolve-hatk.d.ts +6 -0
  114. package/dist/resolve-hatk.d.ts.map +1 -0
  115. package/dist/resolve-hatk.js +20 -0
  116. package/dist/response.d.ts +16 -0
  117. package/dist/response.d.ts.map +1 -0
  118. package/dist/response.js +69 -0
  119. package/dist/scanner.d.ts +21 -0
  120. package/dist/scanner.d.ts.map +1 -0
  121. package/dist/scanner.js +88 -0
  122. package/dist/seed.d.ts +19 -0
  123. package/dist/seed.d.ts.map +1 -1
  124. package/dist/seed.js +43 -4
  125. package/dist/server-init.d.ts +8 -0
  126. package/dist/server-init.d.ts.map +1 -0
  127. package/dist/server-init.js +62 -0
  128. package/dist/server.d.ts +26 -3
  129. package/dist/server.d.ts.map +1 -1
  130. package/dist/server.js +629 -635
  131. package/dist/setup.d.ts +28 -1
  132. package/dist/setup.d.ts.map +1 -1
  133. package/dist/setup.js +50 -3
  134. package/dist/templates/feed.tpl +14 -0
  135. package/dist/templates/hook.tpl +5 -0
  136. package/dist/templates/label.tpl +15 -0
  137. package/dist/templates/og.tpl +17 -0
  138. package/dist/templates/seed.tpl +11 -0
  139. package/dist/templates/setup.tpl +5 -0
  140. package/dist/templates/test-feed.tpl +19 -0
  141. package/dist/templates/test-xrpc.tpl +19 -0
  142. package/dist/templates/xrpc.tpl +41 -0
  143. package/dist/test.d.ts +1 -1
  144. package/dist/test.d.ts.map +1 -1
  145. package/dist/test.js +39 -32
  146. package/dist/views.js +1 -1
  147. package/dist/vite-plugin.d.ts +1 -1
  148. package/dist/vite-plugin.d.ts.map +1 -1
  149. package/dist/vite-plugin.js +254 -66
  150. package/dist/xrpc.d.ts +75 -11
  151. package/dist/xrpc.d.ts.map +1 -1
  152. package/dist/xrpc.js +189 -39
  153. package/package.json +14 -7
  154. package/public/admin.html +133 -54
  155. package/dist/db.d.ts.map +0 -1
  156. package/dist/fts.d.ts.map +0 -1
  157. package/dist/oauth/hooks.d.ts +0 -10
  158. package/dist/oauth/hooks.d.ts.map +0 -1
  159. package/dist/oauth/hooks.js +0 -40
  160. package/dist/schema.d.ts.map +0 -1
  161. package/dist/test-browser.d.ts +0 -14
  162. package/dist/test-browser.d.ts.map +0 -1
  163. package/dist/test-browser.js +0 -26
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hooks.d.ts","sourceRoot":"","sources":["../src/hooks.ts"],"names":[],"mappings":"AAmCA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAK9C,OAAO,EAAoB,KAAK,WAAW,EAAE,MAAM,cAAc,CAAA;AACjE,OAAO,EAAqC,KAAK,aAAa,EAAE,MAAM,WAAW,CAAA;AAEjF,0EAA0E;AAC1E,MAAM,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,GAAG;IACjD,0CAA0C;IAC1C,GAAG,EAAE,MAAM,CAAA;IACX,gDAAgD;IAChD,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,OAAO,EAAE,CAAC,CAAA;QAC9D,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KACxD,CAAA;IACD,gEAAgE;IAChE,UAAU,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IAC1C,0DAA0D;IAC1D,YAAY,EAAE,CACZ,UAAU,EAAE,MAAM,EAClB,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAC/B,IAAI,CAAC,EAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAA;KAAE,KACrB,OAAO,CAAC;QAAE,GAAG,CAAC,EAAE,MAAM,CAAC;QAAC,GAAG,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC,CAAA;IAC5C,qEAAqE;IACrE,SAAS,EAAE,CACT,UAAU,EAAE,MAAM,EAClB,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC5B,OAAO,CAAC;QAAE,GAAG,CAAC,EAAE,MAAM,CAAC;QAAC,GAAG,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC,CAAA;IAC5C,2DAA2D;IAC3D,YAAY,EAAE,CACZ,UAAU,EAAE,MAAM,EAClB,IAAI,EAAE,MAAM,KACT,OAAO,CAAC,IAAI,CAAC,CAAA;CACnB,CAAA;AAED,mEAAmE;AACnE,MAAM,MAAM,WAAW,GAAG;IACxB,iDAAiD;IACjD,MAAM,EAAE,QAAQ,GAAG,QAAQ,CAAA;IAC3B,wCAAwC;IACxC,UAAU,EAAE,MAAM,CAAA;IAClB,2CAA2C;IAC3C,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,CAAA;IAClC,mCAAmC;IACnC,IAAI,EAAE,MAAM,CAAA;IACZ,4BAA4B;IAC5B,GAAG,EAAE,MAAM,CAAA;IACX,wCAAwC;IACxC,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,OAAO,EAAE,CAAC,CAAA;QAC9D,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KACxD,CAAA;IACD,iDAAiD;IACjD,MAAM,EAAE,WAAW,CAAC,QAAQ,CAAC,CAAA;IAC7B,kCAAkC;IAClC,IAAI,EAAE,aAAa,CAAA;CACpB,CAAA;AAQD,wBAAgB,UAAU,CACxB,KAAK,EAAE,UAAU,EACjB,OAAO,EAAE,CAAC,GAAG,EAAE,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,GAC1C;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,UAAU,CAAC;IAAC,OAAO,EAAE,CAAC,GAAG,EAAE,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;CAAE,CAAA;AACrF,wBAAgB,UAAU,CACxB,KAAK,EAAE,WAAW,EAClB,OAAO,EAAE;IAAE,WAAW,EAAE,MAAM,EAAE,CAAA;CAAE,EAClC,OAAO,EAAE,CAAC,GAAG,EAAE,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,GAC3C;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,WAAW,CAAC;IAAC,WAAW,EAAE,MAAM,EAAE,CAAC;IAAC,OAAO,EAAE,CAAC,GAAG,EAAE,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;CAAE,CAAA;AAkB9G;;;GAGG;AACH,wBAAsB,eAAe,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAQrE;AASD,qDAAqD;AACrD,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,GAAG,GAAG,IAAI,CASlF;AAED,iFAAiF;AACjF,wBAAsB,eAAe,CAAC,GAAG,EAAE,MAAM,EAAE,WAAW,EAAE,WAAW,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CA8BjG;AAED;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,KAAK,CAAC;IAC7C,MAAM,EAAE,QAAQ,GAAG,QAAQ,CAAA;IAC3B,UAAU,EAAE,MAAM,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,SAAS,EAAE,MAAM,CAAA;IACjB,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,CAAA;CACnC,CAAC,GAAG,IAAI,CA2BR"}
package/dist/hooks.js ADDED
@@ -0,0 +1,161 @@
1
+ var __rewriteRelativeImportExtension = (this && this.__rewriteRelativeImportExtension) || function (path, preserveJsx) {
2
+ if (typeof path === "string" && /^\.\.?\//.test(path)) {
3
+ return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) {
4
+ return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js");
5
+ });
6
+ }
7
+ return path;
8
+ };
9
+ /**
10
+ * Lifecycle hooks that run in response to server events.
11
+ *
12
+ * Place hook modules in the `hooks/` directory. Currently supported hooks:
13
+ *
14
+ * - `on-login.ts` — called after each successful OAuth login
15
+ * - `on-commit-*.ts` — called after records are indexed from the firehose
16
+ *
17
+ * Each hook default-exports the result of `defineHook()`.
18
+ *
19
+ * @example
20
+ * ```ts
21
+ * // hooks/on-login.ts
22
+ * import { defineHook } from '$hatk'
23
+ *
24
+ * export default defineHook("on-login", async (ctx) => {
25
+ * await ctx.ensureRepo(ctx.did)
26
+ * })
27
+ * ```
28
+ *
29
+ * @example
30
+ * ```ts
31
+ * // hooks/on-commit-favorite.ts
32
+ * import { defineHook } from '$hatk'
33
+ *
34
+ * export default defineHook("on-commit", { collections: ["social.grain.favorite"] },
35
+ * async ({ action, collection, record, repo, uri, db, lookup, push }) => {
36
+ * if (action !== "create") return
37
+ * // send push notification, etc.
38
+ * }
39
+ * )
40
+ * ```
41
+ */
42
+ import { existsSync } from 'node:fs';
43
+ import { resolve } from 'node:path';
44
+ import { pdsCreateRecord, pdsPutRecord, pdsDeleteRecord } from "./pds-proxy.js";
45
+ import { log, emit } from "./logger.js";
46
+ import { setRepoStatus, runSQL } from "./database/db.js";
47
+ import { triggerAutoBackfill, awaitBackfill } from "./indexer.js";
48
+ import { buildBaseContext } from "./hydrate.js";
49
+ import { buildPushInterface, isPushEnabled } from "./push.js";
50
+ export function defineHook(event, ...args) {
51
+ if (event === 'on-login') {
52
+ return { __type: 'hook', event, handler: args[0] };
53
+ }
54
+ if (event === 'on-commit') {
55
+ const options = args[0];
56
+ const handler = args[1];
57
+ return { __type: 'hook', event, collections: options.collections, handler };
58
+ }
59
+ throw new Error(`Unknown hook event: ${event}`);
60
+ }
61
+ let onLoginHook = null;
62
+ const onCommitHooks = [];
63
+ /**
64
+ * Discover and load the on-login hook from the project's `hooks/` directory.
65
+ * Looks for `on-login.ts` or `on-login.js`. Safe to call if no hook exists.
66
+ */
67
+ export async function loadOnLoginHook(hooksDir) {
68
+ const tsPath = resolve(hooksDir, 'on-login.ts');
69
+ const jsPath = resolve(hooksDir, 'on-login.js');
70
+ const path = existsSync(tsPath) ? tsPath : existsSync(jsPath) ? jsPath : null;
71
+ if (!path)
72
+ return;
73
+ const mod = await import(__rewriteRelativeImportExtension(/* @vite-ignore */ `${path}?t=${Date.now()}`));
74
+ onLoginHook = mod.default;
75
+ log('[hooks] on-login hook loaded');
76
+ }
77
+ /** Mark a DID as pending, trigger auto-backfill, and wait for completion. */
78
+ async function ensureRepo(did) {
79
+ await setRepoStatus(did, 'pending');
80
+ triggerAutoBackfill(did);
81
+ await awaitBackfill(did);
82
+ }
83
+ /** Register a hook from a scanned server/ module. */
84
+ export function registerHook(event, handler, options) {
85
+ if (event === 'on-login') {
86
+ onLoginHook = handler;
87
+ log('[hooks] on-login hook registered');
88
+ }
89
+ else if (event === 'on-commit') {
90
+ const collections = new Set(options?.collections || []);
91
+ onCommitHooks.push({ collections, handler: handler });
92
+ log(`[hooks] on-commit hook registered (collections: ${[...collections].join(', ')})`);
93
+ }
94
+ }
95
+ /** Fire the on-login hook if loaded. Errors are logged but never block login. */
96
+ export async function fireOnLoginHook(did, oauthConfig) {
97
+ if (!onLoginHook)
98
+ return;
99
+ try {
100
+ const base = buildBaseContext({ did });
101
+ const viewer = { did };
102
+ const hookPromise = onLoginHook({
103
+ ...base,
104
+ did,
105
+ db: { query: base.db.query, run: runSQL },
106
+ ensureRepo,
107
+ createRecord: async (collection, record, opts) => {
108
+ if (!oauthConfig)
109
+ throw new Error('No OAuth config — cannot write to PDS');
110
+ return pdsCreateRecord(oauthConfig, viewer, { collection, record, rkey: opts?.rkey });
111
+ },
112
+ putRecord: async (collection, rkey, record) => {
113
+ if (!oauthConfig)
114
+ throw new Error('No OAuth config — cannot write to PDS');
115
+ return pdsPutRecord(oauthConfig, viewer, { collection, rkey, record });
116
+ },
117
+ deleteRecord: async (collection, rkey) => {
118
+ if (!oauthConfig)
119
+ throw new Error('No OAuth config — cannot write to PDS');
120
+ await pdsDeleteRecord(oauthConfig, viewer, { collection, rkey });
121
+ },
122
+ });
123
+ const timeout = new Promise((_, reject) => setTimeout(() => reject(new Error('on-login hook timed out after 30s')), 30_000));
124
+ await Promise.race([hookPromise, timeout]);
125
+ }
126
+ catch (err) {
127
+ emit('hooks', 'on_login_error', { did, error: err.message });
128
+ }
129
+ }
130
+ /**
131
+ * Fire on-commit hooks for a batch of indexed records.
132
+ * Runs async and non-blocking — errors are logged but never throw.
133
+ */
134
+ export function fireOnCommitHooks(items) {
135
+ if (onCommitHooks.length === 0)
136
+ return;
137
+ const base = buildBaseContext(null);
138
+ const push = isPushEnabled() ? buildPushInterface() : { send: async () => { } };
139
+ for (const item of items) {
140
+ for (const hook of onCommitHooks) {
141
+ if (hook.collections.size > 0 && !hook.collections.has(item.collection))
142
+ continue;
143
+ hook.handler({
144
+ action: item.action,
145
+ collection: item.collection,
146
+ record: item.record,
147
+ repo: item.authorDid,
148
+ uri: item.uri,
149
+ db: { query: base.db.query, run: runSQL },
150
+ lookup: base.lookup,
151
+ push,
152
+ }).catch((err) => {
153
+ emit('hooks', 'on_commit_error', {
154
+ collection: item.collection,
155
+ uri: item.uri,
156
+ error: err.message,
157
+ });
158
+ });
159
+ }
160
+ }
161
+ }
package/dist/hydrate.d.ts CHANGED
@@ -1,9 +1,9 @@
1
1
  import type { Row } from './lex-types.ts';
2
2
  export type { Row };
3
- export interface HydrateContext<T = unknown> {
4
- items: Row<T>[];
3
+ export interface BaseContext {
5
4
  viewer: {
6
5
  did: string;
6
+ handle?: string;
7
7
  } | null;
8
8
  db: {
9
9
  query: (sql: string, params?: unknown[]) => Promise<unknown[]>;
@@ -12,12 +12,13 @@ export interface HydrateContext<T = unknown> {
12
12
  lookup: <R = unknown>(collection: string, field: string, values: string[]) => Promise<Map<string, Row<R>>>;
13
13
  count: (collection: string, field: string, values: string[]) => Promise<Map<string, number>>;
14
14
  labels: (uris: string[]) => Promise<Map<string, unknown[]>>;
15
- blobUrl: (did: string, ref: unknown, preset?: 'avatar' | 'banner' | 'feed_thumbnail' | 'feed_fullsize') => string | undefined;
15
+ blobUrl: (did: string, ref: unknown, preset?: string) => string | undefined;
16
16
  }
17
17
  /** Fetch records for URIs, reshape them, and filter out taken-down DIDs. */
18
18
  export declare function resolveRecords(uris: string[]): Promise<Row<unknown>[]>;
19
- /** Build a HydrateContext for a feed's hydrate function. */
20
- export declare function buildHydrateContext(items: Row<unknown>[], viewer: {
19
+ /** Build a BaseContext for hydration. */
20
+ export declare function buildBaseContext(viewer: {
21
21
  did: string;
22
- } | null): HydrateContext;
22
+ handle?: string;
23
+ } | null): BaseContext;
23
24
  //# sourceMappingURL=hydrate.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"hydrate.d.ts","sourceRoot":"","sources":["../src/hydrate.ts"],"names":[],"mappings":"AAUA,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,gBAAgB,CAAA;AAEzC,YAAY,EAAE,GAAG,EAAE,CAAA;AAInB,MAAM,WAAW,cAAc,CAAC,CAAC,GAAG,OAAO;IACzC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;IACf,MAAM,EAAE;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAA;IAC9B,EAAE,EAAE;QAAE,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,OAAO,EAAE,CAAC,CAAA;KAAE,CAAA;IACtE,UAAU,EAAE,CAAC,CAAC,GAAG,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC7F,MAAM,EAAE,CAAC,CAAC,GAAG,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1G,KAAK,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAA;IAC5F,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC,CAAA;IAC3D,OAAO,EAAE,CACP,GAAG,EAAE,MAAM,EACX,GAAG,EAAE,OAAO,EACZ,MAAM,CAAC,EAAE,QAAQ,GAAG,QAAQ,GAAG,gBAAgB,GAAG,eAAe,KAC9D,MAAM,GAAG,SAAS,CAAA;CACxB;AAID,4EAA4E;AAC5E,wBAAsB,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,CAoC5E;AAID,4DAA4D;AAC5D,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,GAAG,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,EAAE;IAAE,GAAG,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,GAAG,cAAc,CA4BzG"}
1
+ {"version":3,"file":"hydrate.d.ts","sourceRoot":"","sources":["../src/hydrate.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,gBAAgB,CAAA;AAEzC,YAAY,EAAE,GAAG,EAAE,CAAA;AAInB,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAA;IAC/C,EAAE,EAAE;QAAE,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,KAAK,OAAO,CAAC,OAAO,EAAE,CAAC,CAAA;KAAE,CAAA;IACtE,UAAU,EAAE,CAAC,CAAC,GAAG,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC7F,MAAM,EAAE,CAAC,CAAC,GAAG,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1G,KAAK,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAA;IAC5F,MAAM,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC,CAAA;IAC3D,OAAO,EAAE,CACP,GAAG,EAAE,MAAM,EACX,GAAG,EAAE,OAAO,EACZ,MAAM,CAAC,EAAE,MAAM,KACZ,MAAM,GAAG,SAAS,CAAA;CACxB;AAID,4EAA4E;AAC5E,wBAAsB,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,CAoC5E;AAID,yCAAyC;AACzC,wBAAgB,gBAAgB,CAAC,MAAM,EAAE;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,MAAM,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,GAAG,WAAW,CAkB7F"}
package/dist/hydrate.js CHANGED
@@ -1,4 +1,4 @@
1
- import { getRecordsByUris, countByFieldBatch, lookupByFieldBatch, querySQL, reshapeRow, queryLabelsForUris, filterTakendownDids, } from "./db.js";
1
+ import { getRecordsMap, countByFieldBatch, lookupByFieldBatch, querySQL, queryLabelsForUris, filterTakendownDids, getRecordsByUris, reshapeRow, } from "./database/db.js";
2
2
  import { blobUrl } from "./xrpc.js";
3
3
  // --- Record Resolution ---
4
4
  /** Fetch records for URIs, reshape them, and filter out taken-down DIDs. */
@@ -39,24 +39,12 @@ export async function resolveRecords(uris) {
39
39
  .filter((r) => r != null);
40
40
  }
41
41
  // --- Context Builder ---
42
- /** Build a HydrateContext for a feed's hydrate function. */
43
- export function buildHydrateContext(items, viewer) {
42
+ /** Build a BaseContext for hydration. */
43
+ export function buildBaseContext(viewer) {
44
44
  return {
45
- items,
46
45
  viewer,
47
46
  db: { query: querySQL },
48
- getRecords: async (collection, uris) => {
49
- if (uris.length === 0)
50
- return new Map();
51
- const records = await getRecordsByUris(collection, uris);
52
- const map = new Map();
53
- for (const r of records) {
54
- const shaped = reshapeRow(r, r?.__childData, r?.__unionData);
55
- if (shaped)
56
- map.set(shaped.uri, shaped);
57
- }
58
- return map;
59
- },
47
+ getRecords: getRecordsMap,
60
48
  lookup: async (collection, field, values) => {
61
49
  if (values.length === 0)
62
50
  return new Map();
package/dist/indexer.d.ts CHANGED
@@ -1,14 +1,37 @@
1
+ /**
2
+ * Auto-backfill a DID's repo when first seen on the firehose.
3
+ *
4
+ * Fetches the full repo via CAR export, inserts all records, then replays any
5
+ * firehose events that arrived during the backfill. Concurrency is capped at
6
+ * `maxConcurrentBackfills`. Failed backfills retry with exponential delay up
7
+ * to `maxRetries`.
8
+ */
9
+ /** Wait for a DID's backfill to complete if one is in flight. */
10
+ export declare function awaitBackfill(did: string): Promise<void>;
1
11
  export declare function triggerAutoBackfill(did: string, attempt?: number): Promise<void>;
12
+ /** Configuration for the firehose indexer. */
2
13
  interface IndexerOpts {
3
14
  relayUrl: string;
15
+ plcUrl: string;
4
16
  collections: Set<string>;
5
17
  signalCollections?: Set<string>;
6
18
  pinnedRepos?: Set<string>;
7
19
  cursor?: string | null;
8
20
  fetchTimeout: number;
9
21
  maxRetries: number;
22
+ parallelism?: number;
10
23
  ftsRebuildInterval?: number;
11
24
  }
25
+ /**
26
+ * Connect to the AT Protocol relay firehose and begin indexing.
27
+ *
28
+ * Opens a WebSocket to `subscribeRepos`, processes commit messages synchronously
29
+ * on the event loop to minimize backpressure, and batches writes through
30
+ * {@link flushBuffer}. New DIDs trigger auto-backfill via {@link triggerAutoBackfill}.
31
+ * Reconnects automatically on disconnect after a 3s delay.
32
+ *
33
+ * @returns The WebSocket connection (for shutdown coordination)
34
+ */
12
35
  export declare function startIndexer(opts: IndexerOpts): Promise<WebSocket>;
13
36
  export {};
14
37
  //# sourceMappingURL=indexer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"indexer.d.ts","sourceRoot":"","sources":["../src/indexer.ts"],"names":[],"mappings":"AAkIA,wBAAsB,mBAAmB,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,SAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAsDjF;AAED,UAAU,WAAW;IACnB,QAAQ,EAAE,MAAM,CAAA;IAChB,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IACxB,iBAAiB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IAC/B,WAAW,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IACzB,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;IACtB,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;IAClB,kBAAkB,CAAC,EAAE,MAAM,CAAA;CAC5B;AAyBD,wBAAsB,YAAY,CAAC,IAAI,EAAE,WAAW,GAAG,OAAO,CAAC,SAAS,CAAC,CAkDxE"}
1
+ {"version":3,"file":"indexer.d.ts","sourceRoot":"","sources":["../src/indexer.ts"],"names":[],"mappings":"AAoKA;;;;;;;GAOG;AACH,iEAAiE;AACjE,wBAAgB,aAAa,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAGxD;AAED,wBAAsB,mBAAmB,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,SAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CA4EjF;AAED,8CAA8C;AAC9C,UAAU,WAAW;IACnB,QAAQ,EAAE,MAAM,CAAA;IAChB,MAAM,EAAE,MAAM,CAAA;IACd,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IACxB,iBAAiB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IAC/B,WAAW,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAA;IACzB,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;IACtB,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,kBAAkB,CAAC,EAAE,MAAM,CAAA;CAC5B;AAyBD;;;;;;;;;GASG;AACH,wBAAsB,YAAY,CAAC,IAAI,EAAE,WAAW,GAAG,OAAO,CAAC,SAAS,CAAC,CAoDxE"}
package/dist/indexer.js CHANGED
@@ -1,11 +1,12 @@
1
1
  import { cborDecode } from "./cbor.js";
2
2
  import { parseCarFrame } from "./car.js";
3
- import { insertRecord, deleteRecord, setCursor, setRepoStatus, getRepoRetryInfo, listAllRepoStatuses } from "./db.js";
3
+ import { insertRecord, deleteRecord, setCursor, setRepoStatus, getRepoRetryInfo, listAllRepoStatuses, getDatabasePort, updateRepoHandle, } from "./database/db.js";
4
4
  import { backfillRepo } from "./backfill.js";
5
- import { rebuildAllIndexes } from "./fts.js";
5
+ import { rebuildAllIndexes } from "./database/fts.js";
6
6
  import { log, emit, timer } from "./logger.js";
7
7
  import { runLabelRules } from "./labels.js";
8
- import { getLexiconArray } from "./schema.js";
8
+ import { fireOnCommitHooks } from "./hooks.js";
9
+ import { getLexiconArray } from "./database/schema.js";
9
10
  import { validateRecord } from '@bigmoves/lexicon';
10
11
  let buffer = [];
11
12
  let flushTimer = null;
@@ -18,7 +19,8 @@ let ftsRebuildInterval = 500;
18
19
  const pendingBuffers = new Map();
19
20
  // Track in-flight backfills to avoid duplicates
20
21
  const backfillInFlight = new Set();
21
- const MAX_CONCURRENT_BACKFILLS = 5;
22
+ const backfillPromises = new Map();
23
+ const pendingReschedule = new Set();
22
24
  // In-memory cache of repo status to avoid flooding the DB read queue
23
25
  const repoStatusCache = new Map();
24
26
  // Set by startIndexer
@@ -27,6 +29,13 @@ let indexerSignalCollections;
27
29
  let indexerPinnedRepos = null;
28
30
  let indexerFetchTimeout;
29
31
  let indexerMaxRetries;
32
+ let indexerPlcUrl;
33
+ let maxConcurrentBackfills = 3;
34
+ /**
35
+ * Flush the write buffer — insert all buffered records, update the relay cursor,
36
+ * run label rules on inserted records, and trigger FTS rebuilds when the write
37
+ * threshold is reached. Emits a wide event with batch stats.
38
+ */
30
39
  async function flushBuffer() {
31
40
  if (buffer.length === 0)
32
41
  return;
@@ -64,6 +73,14 @@ async function flushBuffer() {
64
73
  value: item.record,
65
74
  }).catch(() => { });
66
75
  }
76
+ // Fire on-commit hooks for inserted records (async, non-blocking)
77
+ fireOnCommitHooks(inserted.map((item) => ({
78
+ action: 'create',
79
+ collection: item.collection,
80
+ uri: item.uri,
81
+ authorDid: item.authorDid,
82
+ record: item.record,
83
+ })));
67
84
  // Aggregate collection counts and unique DIDs for wide event
68
85
  const collections = {};
69
86
  const dids = new Set();
@@ -86,9 +103,14 @@ async function flushBuffer() {
86
103
  writesSinceRebuild += batch.length;
87
104
  if (writesSinceRebuild >= ftsRebuildInterval) {
88
105
  writesSinceRebuild = 0;
89
- rebuildAllIndexes([...indexerCollections]).catch(() => { });
106
+ // Skip periodic full rebuild for SQLite — it uses incremental FTS updates
107
+ const port = getDatabasePort();
108
+ if (port.dialect !== 'sqlite') {
109
+ rebuildAllIndexes([...indexerCollections]).catch(() => { });
110
+ }
90
111
  }
91
112
  }
113
+ /** Schedule a flush after FLUSH_INTERVAL_MS if one isn't already pending. */
92
114
  function scheduleFlush() {
93
115
  if (flushTimer)
94
116
  return;
@@ -97,6 +119,7 @@ function scheduleFlush() {
97
119
  await flushBuffer();
98
120
  }, FLUSH_INTERVAL_MS);
99
121
  }
122
+ /** Add a record to the write buffer. Flushes immediately if BATCH_SIZE is reached. */
100
123
  function bufferWrite(item) {
101
124
  buffer.push(item);
102
125
  if (buffer.length >= BATCH_SIZE) {
@@ -110,11 +133,39 @@ function bufferWrite(item) {
110
133
  scheduleFlush();
111
134
  }
112
135
  }
136
+ /**
137
+ * Auto-backfill a DID's repo when first seen on the firehose.
138
+ *
139
+ * Fetches the full repo via CAR export, inserts all records, then replays any
140
+ * firehose events that arrived during the backfill. Concurrency is capped at
141
+ * `maxConcurrentBackfills`. Failed backfills retry with exponential delay up
142
+ * to `maxRetries`.
143
+ */
144
+ /** Wait for a DID's backfill to complete if one is in flight. */
145
+ export function awaitBackfill(did) {
146
+ const entry = backfillPromises.get(did);
147
+ return entry ? entry.promise : Promise.resolve();
148
+ }
113
149
  export async function triggerAutoBackfill(did, attempt = 0) {
114
150
  if (backfillInFlight.has(did))
115
151
  return;
152
+ if (backfillInFlight.size >= maxConcurrentBackfills) {
153
+ if (!pendingReschedule.has(did)) {
154
+ pendingReschedule.add(did);
155
+ setTimeout(() => {
156
+ pendingReschedule.delete(did);
157
+ triggerAutoBackfill(did, attempt);
158
+ }, 10_000);
159
+ }
160
+ return;
161
+ }
116
162
  backfillInFlight.add(did);
117
163
  pendingBuffers.set(did, []);
164
+ if (!backfillPromises.has(did)) {
165
+ let resolveBackfill;
166
+ const promise = new Promise((r) => { resolveBackfill = r; });
167
+ backfillPromises.set(did, { promise, resolve: resolveBackfill });
168
+ }
118
169
  if (attempt === 0)
119
170
  await setRepoStatus(did, 'pending');
120
171
  const elapsed = timer();
@@ -154,6 +205,12 @@ export async function triggerAutoBackfill(did, attempt = 0) {
154
205
  error,
155
206
  retry_count: currentRetryCount,
156
207
  });
208
+ // Resolve awaiting callers (e.g. on-login hooks)
209
+ const entry = backfillPromises.get(did);
210
+ if (entry) {
211
+ entry.resolve();
212
+ backfillPromises.delete(did);
213
+ }
157
214
  if (status === 'error' && currentRetryCount < indexerMaxRetries) {
158
215
  const delaySecs = Math.min(currentRetryCount * 60, 3600);
159
216
  const delayMs = Math.max(delaySecs, 60) * 1000;
@@ -162,7 +219,7 @@ export async function triggerAutoBackfill(did, attempt = 0) {
162
219
  }, delayMs);
163
220
  }
164
221
  }
165
- // Periodic memory diagnostics
222
+ /** Emit a memory diagnostics wide event every 30s for observability. */
166
223
  function startMemoryDiagnostics() {
167
224
  setInterval(() => {
168
225
  const mem = process.memoryUsage();
@@ -184,6 +241,16 @@ function startMemoryDiagnostics() {
184
241
  });
185
242
  }, 30_000);
186
243
  }
244
+ /**
245
+ * Connect to the AT Protocol relay firehose and begin indexing.
246
+ *
247
+ * Opens a WebSocket to `subscribeRepos`, processes commit messages synchronously
248
+ * on the event loop to minimize backpressure, and batches writes through
249
+ * {@link flushBuffer}. New DIDs trigger auto-backfill via {@link triggerAutoBackfill}.
250
+ * Reconnects automatically on disconnect after a 3s delay.
251
+ *
252
+ * @returns The WebSocket connection (for shutdown coordination)
253
+ */
187
254
  export async function startIndexer(opts) {
188
255
  const { relayUrl, collections, cursor, fetchTimeout } = opts;
189
256
  if (opts.ftsRebuildInterval != null)
@@ -193,6 +260,8 @@ export async function startIndexer(opts) {
193
260
  indexerPinnedRepos = opts.pinnedRepos || null;
194
261
  indexerFetchTimeout = fetchTimeout;
195
262
  indexerMaxRetries = opts.maxRetries;
263
+ indexerPlcUrl = opts.plcUrl;
264
+ maxConcurrentBackfills = opts.parallelism ?? 3;
196
265
  // Pre-populate repo status cache from DB so non-signal updates
197
266
  // (e.g. profile changes) are processed for already-tracked DIDs
198
267
  if (repoStatusCache.size === 0) {
@@ -202,7 +271,7 @@ export async function startIndexer(opts) {
202
271
  }
203
272
  log(`[indexer] Warmed repo status cache with ${statuses.length} entries`);
204
273
  }
205
- startMemoryDiagnostics();
274
+ // startMemoryDiagnostics()
206
275
  let wsUrl = `${relayUrl}/xrpc/com.atproto.sync.subscribeRepos`;
207
276
  if (cursor) {
208
277
  wsUrl += `?cursor=${cursor}`;
@@ -220,8 +289,8 @@ export async function startIndexer(opts) {
220
289
  const bytes = new Uint8Array(event.data);
221
290
  processMessage(bytes, collections);
222
291
  }
223
- catch {
224
- // Skip unparseable firehose messages silently
292
+ catch (err) {
293
+ emit('indexer', 'decode_error', { error: err instanceof Error ? err.message : String(err) });
225
294
  }
226
295
  });
227
296
  ws.addEventListener('open', () => log('[indexer] Connected to relay'));
@@ -231,9 +300,77 @@ export async function startIndexer(opts) {
231
300
  });
232
301
  return ws;
233
302
  }
303
+ /**
304
+ * Handle a `#identity` firehose event for a DID. The `handle` field on the
305
+ * event is optional per the lexicon, and some emitters omit it (signalling
306
+ * "re-resolve"). When absent, we re-resolve from the PLC directory so handle
307
+ * renames propagate even when the relay payload is sparse.
308
+ *
309
+ * Only updates DIDs we already track (present in repoStatusCache) to avoid
310
+ * writing rows for the entire network.
311
+ */
312
+ async function handleIdentityEvent(did, payloadHandle) {
313
+ if (!repoStatusCache.has(did))
314
+ return;
315
+ let handle = payloadHandle;
316
+ const payloadHadHandle = handle !== undefined;
317
+ if (!handle) {
318
+ try {
319
+ // Bound the PLC fetch so a slow plc.directory can't pile up unbounded
320
+ // promises during an identity-event burst (fire-and-forget caller).
321
+ const res = await fetch(`${indexerPlcUrl}/${did}`, {
322
+ signal: AbortSignal.timeout(indexerFetchTimeout * 1000),
323
+ });
324
+ if (res.ok) {
325
+ const doc = (await res.json());
326
+ // First at:// entry is the canonical handle (per @atproto/identity convention)
327
+ const aka = doc.alsoKnownAs?.find((u) => u.startsWith('at://'));
328
+ handle = aka ? aka.slice('at://'.length) : undefined;
329
+ }
330
+ else {
331
+ emit('indexer', 'identity_resolve_error', { did, status: res.status });
332
+ }
333
+ }
334
+ catch (err) {
335
+ emit('indexer', 'identity_resolve_error', {
336
+ did,
337
+ error: err instanceof Error ? err.message : String(err),
338
+ });
339
+ }
340
+ }
341
+ if (!handle) {
342
+ emit('indexer', 'identity_no_handle', { did, payload_had_handle: payloadHadHandle });
343
+ return;
344
+ }
345
+ try {
346
+ await updateRepoHandle(did, handle);
347
+ emit('indexer', 'identity_handle_update', { did, handle, payload_had_handle: payloadHadHandle });
348
+ }
349
+ catch (err) {
350
+ emit('indexer', 'identity_update_error', {
351
+ did,
352
+ handle,
353
+ error: err instanceof Error ? err.message : String(err),
354
+ });
355
+ }
356
+ }
357
+ /**
358
+ * Process a single firehose message. Decodes the CBOR header/body, filters
359
+ * for relevant collections, validates records against lexicons, and routes
360
+ * writes to the buffer (or pending buffer if the DID is mid-backfill).
361
+ */
234
362
  function processMessage(bytes, collections) {
235
363
  const header = cborDecode(bytes, 0);
236
364
  const body = cborDecode(bytes, header.offset);
365
+ // Handle identity events (handle changes). Fire-and-forget — keeps
366
+ // processMessage synchronous so the WS event loop drains without backpressure.
367
+ if (header.value.t === '#identity') {
368
+ const did = typeof body.value.did === 'string' ? body.value.did : undefined;
369
+ const handle = typeof body.value.handle === 'string' ? body.value.handle : undefined;
370
+ if (did)
371
+ handleIdentityEvent(did, handle);
372
+ return;
373
+ }
237
374
  if (header.value.op !== 1 || header.value.t !== '#commit')
238
375
  return;
239
376
  if (!body.value.blocks || !body.value.ops)
@@ -264,7 +401,7 @@ function processMessage(bytes, collections) {
264
401
  repoStatusCache.set(did, 'unknown');
265
402
  }
266
403
  if (hasSignalOp && (!indexerPinnedRepos || indexerPinnedRepos.has(did))) {
267
- if (repoStatus === null && backfillInFlight.size < MAX_CONCURRENT_BACKFILLS) {
404
+ if (repoStatus === null && backfillInFlight.size < maxConcurrentBackfills) {
268
405
  repoStatusCache.set(did, 'pending');
269
406
  triggerAutoBackfill(did);
270
407
  }
@@ -283,34 +420,44 @@ function processMessage(bytes, collections) {
283
420
  const uri = `at://${did}/${op.path}`;
284
421
  if (op.action === 'delete') {
285
422
  deleteRecord(collection, uri);
423
+ fireOnCommitHooks([{
424
+ action: 'delete',
425
+ collection,
426
+ uri,
427
+ authorDid: did,
428
+ record: null,
429
+ }]);
286
430
  continue;
287
431
  }
288
- for (const [cid, data] of blocks) {
289
- try {
290
- const { value: record } = cborDecode(data);
291
- if (record?.$type === collection) {
292
- const validationError = validateRecord(getLexiconArray(), collection, record);
293
- if (validationError) {
294
- emit('indexer', 'validation_skip', {
295
- uri,
296
- collection,
297
- path: validationError.path,
298
- error: validationError.message,
299
- });
300
- break;
301
- }
302
- const item = { collection, uri, cid, authorDid: did, record };
303
- // If DID is mid-backfill, buffer instead of writing directly
304
- if (pendingBuffers.has(did)) {
305
- pendingBuffers.get(did).push(item);
306
- }
307
- else {
308
- bufferWrite(item);
309
- }
310
- break;
432
+ const opCid = typeof op.cid === 'string' ? op.cid : op.cid?.$link;
433
+ if (!opCid)
434
+ continue;
435
+ const data = blocks.get(opCid);
436
+ if (!data)
437
+ continue;
438
+ try {
439
+ const { value: record } = cborDecode(data);
440
+ if (record?.$type === collection) {
441
+ const validationError = validateRecord(getLexiconArray(), collection, record);
442
+ if (validationError) {
443
+ emit('indexer', 'validation_skip', {
444
+ uri,
445
+ collection,
446
+ path: validationError.path,
447
+ error: validationError.message,
448
+ });
449
+ continue;
450
+ }
451
+ const item = { collection, uri, cid: opCid, authorDid: did, record };
452
+ // If DID is mid-backfill, buffer instead of writing directly
453
+ if (pendingBuffers.has(did)) {
454
+ pendingBuffers.get(did).push(item);
455
+ }
456
+ else {
457
+ bufferWrite(item);
311
458
  }
312
459
  }
313
- catch { }
314
460
  }
461
+ catch { }
315
462
  }
316
463
  }