@hatk/hatk 0.0.1-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/dist/backfill.d.ts +11 -0
  2. package/dist/backfill.d.ts.map +1 -0
  3. package/dist/backfill.js +328 -0
  4. package/dist/car.d.ts +5 -0
  5. package/dist/car.d.ts.map +1 -0
  6. package/dist/car.js +52 -0
  7. package/dist/cbor.d.ts +7 -0
  8. package/dist/cbor.d.ts.map +1 -0
  9. package/dist/cbor.js +89 -0
  10. package/dist/cid.d.ts +4 -0
  11. package/dist/cid.d.ts.map +1 -0
  12. package/dist/cid.js +39 -0
  13. package/dist/cli.d.ts +3 -0
  14. package/dist/cli.d.ts.map +1 -0
  15. package/dist/cli.js +1663 -0
  16. package/dist/config.d.ts +47 -0
  17. package/dist/config.d.ts.map +1 -0
  18. package/dist/config.js +43 -0
  19. package/dist/db.d.ts +134 -0
  20. package/dist/db.d.ts.map +1 -0
  21. package/dist/db.js +1361 -0
  22. package/dist/feeds.d.ts +95 -0
  23. package/dist/feeds.d.ts.map +1 -0
  24. package/dist/feeds.js +144 -0
  25. package/dist/fts.d.ts +20 -0
  26. package/dist/fts.d.ts.map +1 -0
  27. package/dist/fts.js +762 -0
  28. package/dist/hydrate.d.ts +23 -0
  29. package/dist/hydrate.d.ts.map +1 -0
  30. package/dist/hydrate.js +75 -0
  31. package/dist/indexer.d.ts +14 -0
  32. package/dist/indexer.d.ts.map +1 -0
  33. package/dist/indexer.js +316 -0
  34. package/dist/labels.d.ts +29 -0
  35. package/dist/labels.d.ts.map +1 -0
  36. package/dist/labels.js +111 -0
  37. package/dist/lex-types.d.ts +401 -0
  38. package/dist/lex-types.d.ts.map +1 -0
  39. package/dist/lex-types.js +4 -0
  40. package/dist/lexicon-resolve.d.ts +14 -0
  41. package/dist/lexicon-resolve.d.ts.map +1 -0
  42. package/dist/lexicon-resolve.js +280 -0
  43. package/dist/logger.d.ts +4 -0
  44. package/dist/logger.d.ts.map +1 -0
  45. package/dist/logger.js +23 -0
  46. package/dist/main.d.ts +3 -0
  47. package/dist/main.d.ts.map +1 -0
  48. package/dist/main.js +148 -0
  49. package/dist/mst.d.ts +6 -0
  50. package/dist/mst.d.ts.map +1 -0
  51. package/dist/mst.js +30 -0
  52. package/dist/oauth/client.d.ts +16 -0
  53. package/dist/oauth/client.d.ts.map +1 -0
  54. package/dist/oauth/client.js +54 -0
  55. package/dist/oauth/crypto.d.ts +28 -0
  56. package/dist/oauth/crypto.d.ts.map +1 -0
  57. package/dist/oauth/crypto.js +101 -0
  58. package/dist/oauth/db.d.ts +47 -0
  59. package/dist/oauth/db.d.ts.map +1 -0
  60. package/dist/oauth/db.js +139 -0
  61. package/dist/oauth/discovery.d.ts +22 -0
  62. package/dist/oauth/discovery.d.ts.map +1 -0
  63. package/dist/oauth/discovery.js +50 -0
  64. package/dist/oauth/dpop.d.ts +11 -0
  65. package/dist/oauth/dpop.d.ts.map +1 -0
  66. package/dist/oauth/dpop.js +56 -0
  67. package/dist/oauth/hooks.d.ts +10 -0
  68. package/dist/oauth/hooks.d.ts.map +1 -0
  69. package/dist/oauth/hooks.js +40 -0
  70. package/dist/oauth/server.d.ts +86 -0
  71. package/dist/oauth/server.d.ts.map +1 -0
  72. package/dist/oauth/server.js +572 -0
  73. package/dist/opengraph.d.ts +34 -0
  74. package/dist/opengraph.d.ts.map +1 -0
  75. package/dist/opengraph.js +198 -0
  76. package/dist/schema.d.ts +51 -0
  77. package/dist/schema.d.ts.map +1 -0
  78. package/dist/schema.js +358 -0
  79. package/dist/seed.d.ts +29 -0
  80. package/dist/seed.d.ts.map +1 -0
  81. package/dist/seed.js +86 -0
  82. package/dist/server.d.ts +6 -0
  83. package/dist/server.d.ts.map +1 -0
  84. package/dist/server.js +1024 -0
  85. package/dist/setup.d.ts +8 -0
  86. package/dist/setup.d.ts.map +1 -0
  87. package/dist/setup.js +48 -0
  88. package/dist/test-browser.d.ts +14 -0
  89. package/dist/test-browser.d.ts.map +1 -0
  90. package/dist/test-browser.js +26 -0
  91. package/dist/test.d.ts +47 -0
  92. package/dist/test.d.ts.map +1 -0
  93. package/dist/test.js +256 -0
  94. package/dist/views.d.ts +40 -0
  95. package/dist/views.d.ts.map +1 -0
  96. package/dist/views.js +178 -0
  97. package/dist/vite-plugin.d.ts +5 -0
  98. package/dist/vite-plugin.d.ts.map +1 -0
  99. package/dist/vite-plugin.js +86 -0
  100. package/dist/xrpc-client.d.ts +18 -0
  101. package/dist/xrpc-client.d.ts.map +1 -0
  102. package/dist/xrpc-client.js +54 -0
  103. package/dist/xrpc.d.ts +53 -0
  104. package/dist/xrpc.d.ts.map +1 -0
  105. package/dist/xrpc.js +139 -0
  106. package/fonts/Inter-Regular.woff +0 -0
  107. package/package.json +41 -0
  108. package/public/admin-auth.js +320 -0
  109. package/public/admin.html +2166 -0
@@ -0,0 +1,280 @@
1
+ // Lexicon resolver — fetches lexicons from the AT Protocol registry via DNS → DID → PDS chain
2
+ // and recursively resolves all $ref dependencies.
3
+ import { isValidDid } from '@bigmoves/lexicon';
4
+ // --- Authority ---
5
+ function nsidToDomain(nsid) {
6
+ const parts = nsid.split('.');
7
+ return parts.slice(0, 2).reverse().join('.');
8
+ }
9
+ function domainToLexiconDns(domain) {
10
+ return `_lexicon.${domain}`;
11
+ }
12
+ // --- DNS ---
13
+ function parseDidFromTxt(txt) {
14
+ if (!txt)
15
+ return null;
16
+ const unquoted = txt.replace(/^"|"$/g, '');
17
+ const match = unquoted.match(/^did=(.+)$/);
18
+ if (!match)
19
+ return null;
20
+ const did = match[1];
21
+ return isValidDid(did) ? did : null;
22
+ }
23
+ async function lookupTxt(domain, opts = {}) {
24
+ const dohUrl = opts.dohUrl ?? 'https://cloudflare-dns.com/dns-query';
25
+ const fetchFn = opts.fetch ?? globalThis.fetch;
26
+ try {
27
+ const url = `${dohUrl}?name=${encodeURIComponent(domain)}&type=TXT`;
28
+ const response = await fetchFn(url, {
29
+ headers: { Accept: 'application/dns-json' },
30
+ });
31
+ if (!response.ok)
32
+ return [];
33
+ const data = await response.json();
34
+ if (!data.Answer)
35
+ return [];
36
+ return data.Answer.filter((record) => record.type === 16).map((record) => record.data?.replace(/^"|"$/g, '') ?? '');
37
+ }
38
+ catch {
39
+ return [];
40
+ }
41
+ }
42
+ // --- DID ---
43
+ function extractPdsEndpoint(didDoc) {
44
+ if (!didDoc?.service || !Array.isArray(didDoc.service))
45
+ return null;
46
+ const pdsService = didDoc.service.find((s) => s.id === '#atproto_pds' || s.type === 'AtprotoPersonalDataServer');
47
+ return pdsService?.serviceEndpoint ?? null;
48
+ }
49
+ async function resolveDid(did, opts = {}) {
50
+ const plcUrl = opts.plcUrl ?? 'https://plc.directory';
51
+ const fetchFn = opts.fetch ?? globalThis.fetch;
52
+ try {
53
+ let didDocUrl;
54
+ if (did.startsWith('did:plc:')) {
55
+ didDocUrl = `${plcUrl}/${did}`;
56
+ }
57
+ else if (did.startsWith('did:web:')) {
58
+ const domain = did.slice('did:web:'.length);
59
+ didDocUrl = `https://${domain}/.well-known/did.json`;
60
+ }
61
+ else {
62
+ return null;
63
+ }
64
+ const response = await fetchFn(didDocUrl);
65
+ if (!response.ok)
66
+ return null;
67
+ const didDoc = await response.json();
68
+ return extractPdsEndpoint(didDoc);
69
+ }
70
+ catch {
71
+ return null;
72
+ }
73
+ }
74
+ // --- Built-in core schemas (not published via DNS) ---
75
+ const coreSchemas = {
76
+ 'com.atproto.repo.strongRef': {
77
+ lexicon: 1,
78
+ id: 'com.atproto.repo.strongRef',
79
+ description: 'A URI with a content-hash fingerprint.',
80
+ defs: {
81
+ main: {
82
+ type: 'object',
83
+ required: ['uri', 'cid'],
84
+ properties: { uri: { type: 'string', format: 'at-uri' }, cid: { type: 'string', format: 'cid' } },
85
+ },
86
+ },
87
+ },
88
+ 'com.atproto.label.defs': {
89
+ lexicon: 1,
90
+ id: 'com.atproto.label.defs',
91
+ defs: {
92
+ label: {
93
+ type: 'object',
94
+ description: 'Metadata tag on an atproto resource (eg, repo or record).',
95
+ required: ['src', 'uri', 'val', 'cts'],
96
+ properties: {
97
+ ver: { type: 'integer' },
98
+ src: { type: 'string', format: 'did' },
99
+ uri: { type: 'string', format: 'uri' },
100
+ cid: { type: 'string', format: 'cid' },
101
+ val: { type: 'string', maxLength: 128 },
102
+ neg: { type: 'boolean' },
103
+ cts: { type: 'string', format: 'datetime' },
104
+ exp: { type: 'string', format: 'datetime' },
105
+ sig: { type: 'bytes' },
106
+ },
107
+ },
108
+ selfLabels: {
109
+ type: 'object',
110
+ description: 'Metadata tags on an atproto record, published by the author within the record.',
111
+ required: ['values'],
112
+ properties: { values: { type: 'array', items: { type: 'ref', ref: '#selfLabel' }, maxLength: 10 } },
113
+ },
114
+ selfLabel: { type: 'object', required: ['val'], properties: { val: { type: 'string', maxLength: 128 } } },
115
+ labelValueDefinition: {
116
+ type: 'object',
117
+ description: 'Declares a label value and its expected interpretations and behaviors.',
118
+ required: ['identifier', 'severity', 'blurs', 'locales'],
119
+ properties: {
120
+ identifier: { type: 'string', maxLength: 100 },
121
+ severity: { type: 'string', knownValues: ['inform', 'alert', 'none'] },
122
+ blurs: { type: 'string', knownValues: ['content', 'media', 'none'] },
123
+ defaultSetting: { type: 'string', knownValues: ['ignore', 'warn', 'hide'] },
124
+ adultOnly: { type: 'boolean' },
125
+ locales: { type: 'array', items: { type: 'ref', ref: '#labelValueDefinitionStrings' } },
126
+ },
127
+ },
128
+ labelValueDefinitionStrings: {
129
+ type: 'object',
130
+ required: ['lang', 'name', 'description'],
131
+ properties: {
132
+ lang: { type: 'string', format: 'language' },
133
+ name: { type: 'string', maxLength: 640 },
134
+ description: { type: 'string', maxLength: 100000 },
135
+ },
136
+ },
137
+ labelValue: {
138
+ type: 'string',
139
+ knownValues: [
140
+ '!hide',
141
+ '!no-promote',
142
+ '!warn',
143
+ '!no-unauthenticated',
144
+ 'dmca-violation',
145
+ 'doxxing',
146
+ 'porn',
147
+ 'sexual',
148
+ 'nudity',
149
+ 'nsfl',
150
+ 'gore',
151
+ ],
152
+ },
153
+ },
154
+ },
155
+ 'com.atproto.moderation.defs': {
156
+ lexicon: 1,
157
+ id: 'com.atproto.moderation.defs',
158
+ defs: {
159
+ reasonType: {
160
+ type: 'string',
161
+ knownValues: [
162
+ 'com.atproto.moderation.defs#reasonSpam',
163
+ 'com.atproto.moderation.defs#reasonViolation',
164
+ 'com.atproto.moderation.defs#reasonMisleading',
165
+ 'com.atproto.moderation.defs#reasonSexual',
166
+ 'com.atproto.moderation.defs#reasonRude',
167
+ 'com.atproto.moderation.defs#reasonOther',
168
+ 'com.atproto.moderation.defs#reasonAppeal',
169
+ ],
170
+ },
171
+ reasonSpam: { type: 'token', description: 'Spam: frequent unwanted promotion, replies, mentions.' },
172
+ reasonViolation: { type: 'token', description: 'Direct violation of server rules, laws, terms of service.' },
173
+ reasonMisleading: { type: 'token', description: 'Misleading identity, affiliation, or content.' },
174
+ reasonSexual: { type: 'token', description: 'Unwanted or mislabeled sexual content.' },
175
+ reasonRude: { type: 'token', description: 'Rude, harassing, explicit, or otherwise unwelcoming behavior.' },
176
+ reasonOther: { type: 'token', description: 'Reports not falling under another report category.' },
177
+ reasonAppeal: { type: 'token', description: 'Appeal a previously taken moderation action.' },
178
+ subjectType: {
179
+ type: 'string',
180
+ description: 'Tag describing a type of subject that might be reported.',
181
+ knownValues: ['account', 'record', 'chat'],
182
+ },
183
+ },
184
+ },
185
+ };
186
+ function refToNsid(ref) {
187
+ let nsid = ref.startsWith('lex:') ? ref.slice(4) : ref;
188
+ const hashIndex = nsid.indexOf('#');
189
+ if (hashIndex !== -1)
190
+ nsid = nsid.slice(0, hashIndex);
191
+ const parts = nsid.split('.');
192
+ return parts.length >= 3 ? nsid : null;
193
+ }
194
+ function extractRefs(schema) {
195
+ const refs = new Set();
196
+ const walk = (obj) => {
197
+ if (!obj || typeof obj !== 'object')
198
+ return;
199
+ if (Array.isArray(obj)) {
200
+ for (const item of obj)
201
+ walk(item);
202
+ return;
203
+ }
204
+ const record = obj;
205
+ if (record.type === 'ref' && typeof record.ref === 'string') {
206
+ const nsid = refToNsid(record.ref);
207
+ if (nsid)
208
+ refs.add(nsid);
209
+ }
210
+ if (record.type === 'union' && Array.isArray(record.refs)) {
211
+ for (const ref of record.refs) {
212
+ if (typeof ref === 'string') {
213
+ const nsid = refToNsid(ref);
214
+ if (nsid)
215
+ refs.add(nsid);
216
+ }
217
+ }
218
+ }
219
+ for (const value of Object.values(record))
220
+ walk(value);
221
+ };
222
+ walk(schema);
223
+ return Array.from(refs);
224
+ }
225
+ async function fetchLexicon(nsid) {
226
+ const domain = nsidToDomain(nsid);
227
+ const dnsName = domainToLexiconDns(domain);
228
+ const txtRecords = await lookupTxt(dnsName);
229
+ let did = null;
230
+ for (const txt of txtRecords) {
231
+ did = parseDidFromTxt(txt);
232
+ if (did)
233
+ break;
234
+ }
235
+ if (!did)
236
+ return null;
237
+ const pdsEndpoint = await resolveDid(did);
238
+ if (!pdsEndpoint)
239
+ return null;
240
+ const url = `${pdsEndpoint}/xrpc/com.atproto.repo.getRecord?repo=${encodeURIComponent(did)}&collection=com.atproto.lexicon.schema&rkey=${encodeURIComponent(nsid)}`;
241
+ const response = await fetch(url);
242
+ if (!response.ok)
243
+ return null;
244
+ const data = await response.json();
245
+ return data.value ?? null;
246
+ }
247
+ /**
248
+ * Resolve a lexicon by NSID from the AT Protocol registry,
249
+ * recursively fetching all referenced lexicons.
250
+ * Returns a map of NSID → Lexicon for all resolved schemas.
251
+ */
252
+ export async function resolveLexicon(nsid) {
253
+ const resolved = new Map();
254
+ async function resolve(nsid) {
255
+ if (resolved.has(nsid))
256
+ return;
257
+ // Check built-in core schemas first
258
+ if (coreSchemas[nsid]) {
259
+ console.log(` ${nsid} (built-in)`);
260
+ resolved.set(nsid, coreSchemas[nsid]);
261
+ const refs = extractRefs(coreSchemas[nsid]);
262
+ for (const ref of refs)
263
+ await resolve(ref);
264
+ return;
265
+ }
266
+ console.log(` resolving ${nsid}...`);
267
+ const lexicon = await fetchLexicon(nsid);
268
+ if (!lexicon) {
269
+ console.log(` could not resolve ${nsid}`);
270
+ return;
271
+ }
272
+ resolved.set(nsid, lexicon);
273
+ const refs = extractRefs(lexicon);
274
+ for (const ref of refs) {
275
+ await resolve(ref);
276
+ }
277
+ }
278
+ await resolve(nsid);
279
+ return resolved;
280
+ }
@@ -0,0 +1,4 @@
1
+ export declare function log(...args: unknown[]): void;
2
+ export declare function emit(module: string, op: string, fields: Record<string, unknown>): void;
3
+ export declare function timer(): () => number;
4
+ //# sourceMappingURL=logger.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../src/logger.ts"],"names":[],"mappings":"AAAA,wBAAgB,GAAG,CAAC,GAAG,IAAI,EAAE,OAAO,EAAE,GAAG,IAAI,CAG5C;AAED,wBAAgB,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,IAAI,CAWtF;AAED,wBAAgB,KAAK,IAAI,MAAM,MAAM,CAGpC"}
package/dist/logger.js ADDED
@@ -0,0 +1,23 @@
1
+ export function log(...args) {
2
+ if (process.env.DEBUG === '0')
3
+ return;
4
+ console.log(...args);
5
+ }
6
+ export function emit(module, op, fields) {
7
+ if (process.env.DEBUG === '0')
8
+ return;
9
+ const entry = {
10
+ ts: new Date().toISOString(),
11
+ module,
12
+ op,
13
+ };
14
+ for (const [k, v] of Object.entries(fields)) {
15
+ if (v !== undefined)
16
+ entry[k] = v;
17
+ }
18
+ process.stdout.write(JSON.stringify(entry) + '\n');
19
+ }
20
+ export function timer() {
21
+ const start = performance.now();
22
+ return () => Math.round(performance.now() - start);
23
+ }
package/dist/main.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ export {};
3
+ //# sourceMappingURL=main.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"main.d.ts","sourceRoot":"","sources":["../src/main.ts"],"names":[],"mappings":""}
package/dist/main.js ADDED
@@ -0,0 +1,148 @@
1
+ #!/usr/bin/env node
2
+ import { mkdirSync } from 'node:fs';
3
+ import { dirname, resolve } from 'node:path';
4
+ import { log } from "./logger.js";
5
+ import { loadConfig } from "./config.js";
6
+ import { loadLexicons, storeLexicons, discoverCollections, generateTableSchema, generateCreateTableSQL, } from "./schema.js";
7
+ import { discoverViews } from "./views.js";
8
+ import { initDatabase, getCursor, querySQL, backfillChildTables } from "./db.js";
9
+ import { initFeeds, listFeeds } from "./feeds.js";
10
+ import { initXrpc, listXrpc, configureRelay } from "./xrpc.js";
11
+ import { initOpengraph } from "./opengraph.js";
12
+ import { initLabels, getLabelDefinitions } from "./labels.js";
13
+ import { startIndexer } from "./indexer.js";
14
+ import { rebuildAllIndexes } from "./fts.js";
15
+ import { startServer } from "./server.js";
16
+ import { validateLexicons } from '@bigmoves/lexicon';
17
+ import { relayHttpUrl } from "./config.js";
18
+ import { runBackfill } from "./backfill.js";
19
+ import { initOAuth } from "./oauth/server.js";
20
+ import { loadOnLoginHook } from "./oauth/hooks.js";
21
+ import { initSetup } from "./setup.js";
22
+ const configPath = process.argv[2] || 'config.yaml';
23
+ const configDir = dirname(resolve(configPath));
24
+ // 1. Load config
25
+ const config = loadConfig(configPath);
26
+ configureRelay(config.relay);
27
+ // 2. Load lexicons, validate schemas, and discover collections
28
+ const lexicons = loadLexicons(resolve(configDir, 'lexicons'));
29
+ const lexiconErrors = validateLexicons([...lexicons.values()]);
30
+ if (lexiconErrors) {
31
+ for (const [nsid, errors] of Object.entries(lexiconErrors)) {
32
+ for (const err of errors) {
33
+ console.error(`[main] Invalid lexicon ${nsid}: ${err}`);
34
+ }
35
+ }
36
+ process.exit(1);
37
+ }
38
+ storeLexicons(lexicons);
39
+ // Auto-discover collections from record-type lexicons, fall back to config
40
+ const collections = config.collections.length > 0 ? config.collections : discoverCollections(lexicons);
41
+ if (collections.length === 0) {
42
+ log(`[main] No record collections found — running in API-only mode. Add record lexicons to start indexing.`);
43
+ }
44
+ log(`[main] Loaded config: ${collections.length} collections`);
45
+ // Discover view defs from lexicons
46
+ discoverViews();
47
+ await loadOnLoginHook(resolve(configDir, 'hooks'));
48
+ const schemas = [];
49
+ const ddlStatements = [];
50
+ for (const nsid of collections) {
51
+ const lexicon = lexicons.get(nsid);
52
+ if (!lexicon) {
53
+ log(`[main] No lexicon found for ${nsid}, using generic JSON storage`);
54
+ const genericDDL = `CREATE TABLE IF NOT EXISTS "${nsid}" (
55
+ uri TEXT PRIMARY KEY,
56
+ cid TEXT,
57
+ did TEXT NOT NULL,
58
+ indexed_at TIMESTAMP NOT NULL,
59
+ data JSON
60
+ );
61
+ CREATE INDEX IF NOT EXISTS idx_${nsid.replace(/\./g, '_')}_indexed ON "${nsid}"(indexed_at DESC);
62
+ CREATE INDEX IF NOT EXISTS idx_${nsid.replace(/\./g, '_')}_author ON "${nsid}"(did);`;
63
+ schemas.push({ collection: nsid, tableName: `"${nsid}"`, columns: [], refColumns: [], children: [], unions: [] });
64
+ ddlStatements.push(genericDDL);
65
+ continue;
66
+ }
67
+ const schema = generateTableSchema(nsid, lexicon, lexicons);
68
+ schemas.push(schema);
69
+ ddlStatements.push(generateCreateTableSQL(schema));
70
+ log(`[main] Schema for ${nsid}: ${schema.columns.length} columns, ${schema.unions.length} unions`);
71
+ }
72
+ // 3. Ensure data directory exists and initialize DuckDB
73
+ if (config.database !== ':memory:') {
74
+ mkdirSync(dirname(config.database), { recursive: true });
75
+ }
76
+ await initDatabase(config.database, schemas, ddlStatements);
77
+ log(`[main] DuckDB initialized (${config.database === ':memory:' ? 'in-memory' : config.database})`);
78
+ // 3a. Backfill child tables for decomposed arrays (one-time migration)
79
+ await backfillChildTables();
80
+ // 3b. Run setup hooks (after DB init, before server)
81
+ await initSetup(resolve(configDir, 'setup'));
82
+ // Detect orphaned tables
83
+ try {
84
+ const existingTables = await querySQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = 'main' AND table_name NOT LIKE '\\_%' ESCAPE '\\'`);
85
+ for (const row of existingTables) {
86
+ const tableName = row.table_name;
87
+ const isChildTable = collections.some((c) => tableName.startsWith(c + '__'));
88
+ if (tableName.includes('.') && !collections.includes(tableName) && !isChildTable) {
89
+ console.warn(`[warn] Table "${tableName}" exists but has no lexicon. Run 'hatk destroy collection ${tableName}' to clean up.`);
90
+ }
91
+ }
92
+ }
93
+ catch { }
94
+ // 4. Initialize feeds, xrpc handlers, og, labels from directories
95
+ await initFeeds(resolve(configDir, 'feeds'));
96
+ log(`[main] Feeds initialized: ${listFeeds()
97
+ .map((f) => f.name)
98
+ .join(', ') || 'none'}`);
99
+ await initXrpc(resolve(configDir, 'xrpc'));
100
+ log(`[main] XRPC handlers initialized: ${listXrpc().join(', ') || 'none'}`);
101
+ await initOpengraph(resolve(configDir, 'og'));
102
+ log(`[main] OpenGraph initialized`);
103
+ await initLabels(resolve(configDir, 'labels'));
104
+ log(`[main] Labels initialized: ${getLabelDefinitions().length} definitions`);
105
+ if (config.oauth) {
106
+ await initOAuth(config.oauth, config.plc, config.relay);
107
+ log(`[main] OAuth initialized (issuer: ${config.oauth.issuer})`);
108
+ }
109
+ // 5. Start server immediately (don't wait for backfill)
110
+ const collectionSet = new Set(collections);
111
+ startServer(config.port, collections, config.publicDir, config.oauth, config.admins);
112
+ log(`\nhatk running:`);
113
+ log(` Relay: ${config.relay}`);
114
+ log(` Database: ${config.database}`);
115
+ log(` API: http://localhost:${config.port}`);
116
+ log(` Collections: ${collections.join(', ')}`);
117
+ log(` Feeds: ${listFeeds()
118
+ .map((f) => f.name)
119
+ .join(', ')}`);
120
+ // 6. Start indexer with cursor
121
+ const cursor = await getCursor('relay');
122
+ startIndexer({
123
+ relayUrl: config.relay,
124
+ collections: collectionSet,
125
+ signalCollections: config.backfill.signalCollections ? new Set(config.backfill.signalCollections) : undefined,
126
+ pinnedRepos: config.backfill.repos ? new Set(config.backfill.repos) : undefined,
127
+ cursor,
128
+ fetchTimeout: config.backfill.fetchTimeout,
129
+ maxRetries: config.backfill.maxRetries,
130
+ ftsRebuildInterval: config.ftsRebuildInterval,
131
+ });
132
+ // 7. Run backfill in background
133
+ runBackfill({
134
+ pdsUrl: relayHttpUrl(config.relay),
135
+ plcUrl: config.plc,
136
+ collections: collectionSet,
137
+ config: config.backfill,
138
+ })
139
+ .then(() => {
140
+ log('[main] Backfill complete, rebuilding FTS indexes...');
141
+ return rebuildAllIndexes(collections);
142
+ })
143
+ .then(() => {
144
+ log('[main] FTS indexes ready');
145
+ })
146
+ .catch((err) => {
147
+ console.error('[main] Backfill error:', err.message);
148
+ });
package/dist/mst.d.ts ADDED
@@ -0,0 +1,6 @@
1
+ export interface MstEntry {
2
+ path: string;
3
+ cid: string;
4
+ }
5
+ export declare function walkMst(blocks: Map<string, Uint8Array>, rootCid: string): MstEntry[];
6
+ //# sourceMappingURL=mst.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mst.d.ts","sourceRoot":"","sources":["../src/mst.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAA;IACZ,GAAG,EAAE,MAAM,CAAA;CACZ;AAED,wBAAgB,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE,UAAU,CAAC,EAAE,OAAO,EAAE,MAAM,GAAG,QAAQ,EAAE,CAiCpF"}
package/dist/mst.js ADDED
@@ -0,0 +1,30 @@
1
+ import { cborDecode } from "./cbor.js";
2
+ export function walkMst(blocks, rootCid) {
3
+ const entries = [];
4
+ function visit(cid, prefix) {
5
+ const data = blocks.get(cid);
6
+ if (!data)
7
+ return prefix;
8
+ const { value: node } = cborDecode(data);
9
+ // Visit left subtree
10
+ if (node.l?.$link)
11
+ visit(node.l.$link, prefix);
12
+ let lastKey = prefix;
13
+ for (const entry of node.e || []) {
14
+ const keySuffix = entry.k instanceof Uint8Array ? new TextDecoder().decode(entry.k) : entry.k;
15
+ const prefixLen = entry.p || 0;
16
+ const fullKey = lastKey.substring(0, prefixLen) + keySuffix;
17
+ lastKey = fullKey;
18
+ if (entry.v?.$link) {
19
+ entries.push({ path: fullKey, cid: entry.v.$link });
20
+ }
21
+ // Visit right subtree
22
+ if (entry.t?.$link) {
23
+ visit(entry.t.$link, lastKey);
24
+ }
25
+ }
26
+ return lastKey;
27
+ }
28
+ visit(rootCid, '');
29
+ return entries;
30
+ }
@@ -0,0 +1,16 @@
1
+ import type { OAuthClientConfig } from '../config.ts';
2
+ export interface ClientMetadata {
3
+ client_id: string;
4
+ client_name: string;
5
+ redirect_uris: string[];
6
+ grant_types: string[];
7
+ response_types: string[];
8
+ token_endpoint_auth_method: string;
9
+ dpop_bound_access_tokens: boolean;
10
+ scope: string;
11
+ }
12
+ export declare function isLoopbackClient(clientId: string): boolean;
13
+ export declare function getLoopbackClientMetadata(clientId: string): ClientMetadata;
14
+ export declare function resolveClient(clientId: string, registeredClients: OAuthClientConfig[]): ClientMetadata | null;
15
+ export declare function validateRedirectUri(clientMetadata: ClientMetadata, redirectUri: string): boolean;
16
+ //# sourceMappingURL=client.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../src/oauth/client.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAA;AAErD,MAAM,WAAW,cAAc;IAC7B,SAAS,EAAE,MAAM,CAAA;IACjB,WAAW,EAAE,MAAM,CAAA;IACnB,aAAa,EAAE,MAAM,EAAE,CAAA;IACvB,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,cAAc,EAAE,MAAM,EAAE,CAAA;IACxB,0BAA0B,EAAE,MAAM,CAAA;IAClC,wBAAwB,EAAE,OAAO,CAAA;IACjC,KAAK,EAAE,MAAM,CAAA;CACd;AAED,wBAAgB,gBAAgB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAQ1D;AAED,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,MAAM,GAAG,cAAc,CAW1E;AAED,wBAAgB,aAAa,CAAC,QAAQ,EAAE,MAAM,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,GAAG,cAAc,GAAG,IAAI,CAgB7G;AAED,wBAAgB,mBAAmB,CAAC,cAAc,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,GAAG,OAAO,CAYhG"}
@@ -0,0 +1,54 @@
1
+ // packages/hatk/src/oauth/client.ts
2
+ export function isLoopbackClient(clientId) {
3
+ try {
4
+ const url = new URL(clientId);
5
+ const host = url.hostname.toLowerCase();
6
+ return host === 'localhost' || host === '127.0.0.1' || host === '[::1]';
7
+ }
8
+ catch {
9
+ return false;
10
+ }
11
+ }
12
+ export function getLoopbackClientMetadata(clientId) {
13
+ return {
14
+ client_id: clientId,
15
+ client_name: 'Loopback Client',
16
+ redirect_uris: [clientId],
17
+ grant_types: ['authorization_code', 'refresh_token'],
18
+ response_types: ['code'],
19
+ token_endpoint_auth_method: 'none',
20
+ dpop_bound_access_tokens: true,
21
+ scope: 'atproto',
22
+ };
23
+ }
24
+ export function resolveClient(clientId, registeredClients) {
25
+ if (isLoopbackClient(clientId))
26
+ return getLoopbackClientMetadata(clientId);
27
+ const found = registeredClients.find((c) => c.client_id === clientId);
28
+ if (!found)
29
+ return null;
30
+ return {
31
+ client_id: found.client_id,
32
+ client_name: found.client_name,
33
+ redirect_uris: found.redirect_uris,
34
+ grant_types: ['authorization_code', 'refresh_token'],
35
+ response_types: ['code'],
36
+ token_endpoint_auth_method: 'none',
37
+ dpop_bound_access_tokens: true,
38
+ scope: 'atproto',
39
+ };
40
+ }
41
+ export function validateRedirectUri(clientMetadata, redirectUri) {
42
+ if (isLoopbackClient(clientMetadata.client_id)) {
43
+ // Loopback: match by origin only
44
+ try {
45
+ const clientOrigin = new URL(clientMetadata.client_id).origin;
46
+ const redirectOrigin = new URL(redirectUri).origin;
47
+ return clientOrigin === redirectOrigin;
48
+ }
49
+ catch {
50
+ return false;
51
+ }
52
+ }
53
+ return clientMetadata.redirect_uris.includes(redirectUri);
54
+ }
@@ -0,0 +1,28 @@
1
+ export declare function base64UrlEncode(bytes: Uint8Array): string;
2
+ export declare function base64UrlDecode(str: string): Uint8Array;
3
+ export declare function generateKeyPair(): Promise<{
4
+ privateJwk: JsonWebKey;
5
+ publicJwk: JsonWebKey;
6
+ }>;
7
+ export declare function importPrivateKey(jwk: JsonWebKey): Promise<CryptoKey>;
8
+ export declare function importPublicKey(jwk: JsonWebKey): Promise<CryptoKey>;
9
+ export declare function signEs256(privateKey: CryptoKey, data: Uint8Array): Promise<Uint8Array>;
10
+ export declare function verifyEs256(publicKey: CryptoKey, signature: Uint8Array, data: Uint8Array): Promise<boolean>;
11
+ export declare function computeJwkThumbprint(jwk: {
12
+ kty?: string;
13
+ crv?: string;
14
+ x?: string;
15
+ y?: string;
16
+ }): Promise<string>;
17
+ export declare function sha256(data: string): Promise<Uint8Array>;
18
+ export declare function createJwt(header: Record<string, unknown>, payload: Record<string, unknown>, signature: Uint8Array): string;
19
+ export declare function parseJwt(token: string): {
20
+ header: any;
21
+ payload: any;
22
+ signatureInput: Uint8Array;
23
+ signature: Uint8Array;
24
+ };
25
+ export declare function signJwt(header: Record<string, unknown>, payload: Record<string, unknown>, privateKey: CryptoKey): Promise<string>;
26
+ export declare function randomBytes(length: number): Uint8Array;
27
+ export declare function randomToken(): string;
28
+ //# sourceMappingURL=crypto.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"crypto.d.ts","sourceRoot":"","sources":["../../src/oauth/crypto.ts"],"names":[],"mappings":"AAKA,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAKzD;AAED,wBAAgB,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,UAAU,CAQvD;AAeD,wBAAsB,eAAe,IAAI,OAAO,CAAC;IAAE,UAAU,EAAE,UAAU,CAAC;IAAC,SAAS,EAAE,UAAU,CAAA;CAAE,CAAC,CAKlG;AAED,wBAAsB,gBAAgB,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,SAAS,CAAC,CAE1E;AAED,wBAAsB,eAAe,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,SAAS,CAAC,CAEzE;AAED,wBAAsB,SAAS,CAAC,UAAU,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC,CAe5F;AAED,wBAAsB,WAAW,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,OAAO,CAAC,CAOjH;AAED,wBAAsB,oBAAoB,CAAC,GAAG,EAAE;IAC9C,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,CAAC,CAAC,EAAE,MAAM,CAAA;IACV,CAAC,CAAC,EAAE,MAAM,CAAA;CACX,GAAG,OAAO,CAAC,MAAM,CAAC,CAIlB;AAED,wBAAsB,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAE9D;AAED,wBAAgB,SAAS,CACvB,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAC/B,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAChC,SAAS,EAAE,UAAU,GACpB,MAAM,CAKR;AAED,wBAAgB,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG;IACvC,MAAM,EAAE,GAAG,CAAA;IACX,OAAO,EAAE,GAAG,CAAA;IACZ,cAAc,EAAE,UAAU,CAAA;IAC1B,SAAS,EAAE,UAAU,CAAA;CACtB,CAQA;AAED,wBAAsB,OAAO,CAC3B,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAC/B,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAChC,UAAU,EAAE,SAAS,GACpB,OAAO,CAAC,MAAM,CAAC,CAMjB;AAED,wBAAgB,WAAW,CAAC,MAAM,EAAE,MAAM,GAAG,UAAU,CAEtD;AAED,wBAAgB,WAAW,IAAI,MAAM,CAEpC"}