@hatk/hatk 0.0.1-alpha.4 → 0.0.1-alpha.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. package/dist/adapter.d.ts +19 -0
  2. package/dist/adapter.d.ts.map +1 -0
  3. package/dist/adapter.js +107 -0
  4. package/dist/backfill.d.ts +60 -1
  5. package/dist/backfill.d.ts.map +1 -1
  6. package/dist/backfill.js +167 -33
  7. package/dist/car.d.ts +59 -1
  8. package/dist/car.d.ts.map +1 -1
  9. package/dist/car.js +179 -7
  10. package/dist/cbor.d.ts +37 -0
  11. package/dist/cbor.d.ts.map +1 -1
  12. package/dist/cbor.js +36 -3
  13. package/dist/cid.d.ts +37 -0
  14. package/dist/cid.d.ts.map +1 -1
  15. package/dist/cid.js +38 -3
  16. package/dist/cli.js +417 -133
  17. package/dist/cloudflare/container.d.ts +73 -0
  18. package/dist/cloudflare/container.d.ts.map +1 -0
  19. package/dist/cloudflare/container.js +232 -0
  20. package/dist/cloudflare/hooks.d.ts +33 -0
  21. package/dist/cloudflare/hooks.d.ts.map +1 -0
  22. package/dist/cloudflare/hooks.js +40 -0
  23. package/dist/cloudflare/init.d.ts +27 -0
  24. package/dist/cloudflare/init.d.ts.map +1 -0
  25. package/dist/cloudflare/init.js +103 -0
  26. package/dist/cloudflare/worker.d.ts +27 -0
  27. package/dist/cloudflare/worker.d.ts.map +1 -0
  28. package/dist/cloudflare/worker.js +54 -0
  29. package/dist/config.d.ts +12 -1
  30. package/dist/config.d.ts.map +1 -1
  31. package/dist/config.js +36 -9
  32. package/dist/database/adapter-factory.d.ts +6 -0
  33. package/dist/database/adapter-factory.d.ts.map +1 -0
  34. package/dist/database/adapter-factory.js +20 -0
  35. package/dist/database/adapters/d1.d.ts +56 -0
  36. package/dist/database/adapters/d1.d.ts.map +1 -0
  37. package/dist/database/adapters/d1.js +108 -0
  38. package/dist/database/adapters/duckdb-search.d.ts +12 -0
  39. package/dist/database/adapters/duckdb-search.d.ts.map +1 -0
  40. package/dist/database/adapters/duckdb-search.js +27 -0
  41. package/dist/database/adapters/duckdb.d.ts +25 -0
  42. package/dist/database/adapters/duckdb.d.ts.map +1 -0
  43. package/dist/database/adapters/duckdb.js +161 -0
  44. package/dist/database/adapters/sqlite-search.d.ts +23 -0
  45. package/dist/database/adapters/sqlite-search.d.ts.map +1 -0
  46. package/dist/database/adapters/sqlite-search.js +74 -0
  47. package/dist/database/adapters/sqlite.d.ts +18 -0
  48. package/dist/database/adapters/sqlite.d.ts.map +1 -0
  49. package/dist/database/adapters/sqlite.js +87 -0
  50. package/dist/database/db.d.ts +159 -0
  51. package/dist/database/db.d.ts.map +1 -0
  52. package/dist/database/db.js +1445 -0
  53. package/dist/database/dialect.d.ts +45 -0
  54. package/dist/database/dialect.d.ts.map +1 -0
  55. package/dist/database/dialect.js +72 -0
  56. package/dist/database/fts.d.ts +27 -0
  57. package/dist/database/fts.d.ts.map +1 -0
  58. package/dist/database/fts.js +846 -0
  59. package/dist/database/index.d.ts +7 -0
  60. package/dist/database/index.d.ts.map +1 -0
  61. package/dist/database/index.js +6 -0
  62. package/dist/database/ports.d.ts +50 -0
  63. package/dist/database/ports.d.ts.map +1 -0
  64. package/dist/database/ports.js +1 -0
  65. package/dist/database/schema.d.ts +61 -0
  66. package/dist/database/schema.d.ts.map +1 -0
  67. package/dist/database/schema.js +394 -0
  68. package/dist/db.d.ts +1 -1
  69. package/dist/db.d.ts.map +1 -1
  70. package/dist/db.js +4 -38
  71. package/dist/dev-entry.d.ts +8 -0
  72. package/dist/dev-entry.d.ts.map +1 -0
  73. package/dist/dev-entry.js +111 -0
  74. package/dist/feeds.d.ts +12 -8
  75. package/dist/feeds.d.ts.map +1 -1
  76. package/dist/feeds.js +45 -6
  77. package/dist/fts.d.ts.map +1 -1
  78. package/dist/fts.js +5 -0
  79. package/dist/hooks.d.ts +43 -0
  80. package/dist/hooks.d.ts.map +1 -0
  81. package/dist/hooks.js +102 -0
  82. package/dist/hydrate.d.ts +6 -5
  83. package/dist/hydrate.d.ts.map +1 -1
  84. package/dist/hydrate.js +4 -16
  85. package/dist/indexer.d.ts +22 -0
  86. package/dist/indexer.d.ts.map +1 -1
  87. package/dist/indexer.js +70 -7
  88. package/dist/labels.d.ts +34 -0
  89. package/dist/labels.d.ts.map +1 -1
  90. package/dist/labels.js +66 -6
  91. package/dist/logger.d.ts +29 -0
  92. package/dist/logger.d.ts.map +1 -1
  93. package/dist/logger.js +29 -0
  94. package/dist/main.js +135 -67
  95. package/dist/mst.d.ts +18 -1
  96. package/dist/mst.d.ts.map +1 -1
  97. package/dist/mst.js +19 -8
  98. package/dist/oauth/db.d.ts.map +1 -1
  99. package/dist/oauth/db.js +43 -17
  100. package/dist/oauth/server.d.ts +2 -0
  101. package/dist/oauth/server.d.ts.map +1 -1
  102. package/dist/oauth/server.js +103 -8
  103. package/dist/oauth/session.d.ts +11 -0
  104. package/dist/oauth/session.d.ts.map +1 -0
  105. package/dist/oauth/session.js +65 -0
  106. package/dist/opengraph.d.ts +10 -0
  107. package/dist/opengraph.d.ts.map +1 -1
  108. package/dist/opengraph.js +73 -39
  109. package/dist/pds-proxy.d.ts +42 -0
  110. package/dist/pds-proxy.d.ts.map +1 -0
  111. package/dist/pds-proxy.js +189 -0
  112. package/dist/renderer.d.ts +27 -0
  113. package/dist/renderer.d.ts.map +1 -0
  114. package/dist/renderer.js +46 -0
  115. package/dist/resolve-hatk.d.ts +6 -0
  116. package/dist/resolve-hatk.d.ts.map +1 -0
  117. package/dist/resolve-hatk.js +20 -0
  118. package/dist/response.d.ts +16 -0
  119. package/dist/response.d.ts.map +1 -0
  120. package/dist/response.js +69 -0
  121. package/dist/scanner.d.ts +21 -0
  122. package/dist/scanner.d.ts.map +1 -0
  123. package/dist/scanner.js +88 -0
  124. package/dist/schema.d.ts +8 -0
  125. package/dist/schema.d.ts.map +1 -1
  126. package/dist/schema.js +29 -0
  127. package/dist/seed.d.ts +19 -0
  128. package/dist/seed.d.ts.map +1 -1
  129. package/dist/seed.js +43 -4
  130. package/dist/server-init.d.ts +8 -0
  131. package/dist/server-init.d.ts.map +1 -0
  132. package/dist/server-init.js +61 -0
  133. package/dist/server.d.ts +26 -3
  134. package/dist/server.d.ts.map +1 -1
  135. package/dist/server.js +528 -635
  136. package/dist/setup.d.ts +28 -1
  137. package/dist/setup.d.ts.map +1 -1
  138. package/dist/setup.js +50 -3
  139. package/dist/test.d.ts +1 -1
  140. package/dist/test.d.ts.map +1 -1
  141. package/dist/test.js +38 -32
  142. package/dist/views.js +1 -1
  143. package/dist/vite-plugin.d.ts +1 -1
  144. package/dist/vite-plugin.d.ts.map +1 -1
  145. package/dist/vite-plugin.js +254 -66
  146. package/dist/xrpc.d.ts +60 -10
  147. package/dist/xrpc.d.ts.map +1 -1
  148. package/dist/xrpc.js +155 -39
  149. package/package.json +13 -6
  150. package/public/admin.html +0 -54
@@ -0,0 +1,27 @@
1
+ export interface SSRManifest {
2
+ getPreloadTags(url: string): string;
3
+ }
4
+ export interface RenderResult {
5
+ html: string;
6
+ head?: string;
7
+ }
8
+ export type RendererHandler = (request: Request, manifest: SSRManifest) => Promise<RenderResult>;
9
+ export declare function defineRenderer(handler: RendererHandler): {
10
+ __type: "renderer";
11
+ handler: RendererHandler;
12
+ };
13
+ export declare function registerRenderer(handler: RendererHandler): void;
14
+ export declare function setSSRManifest(manifest: SSRManifest): void;
15
+ export declare function getRenderer(): RendererHandler | null;
16
+ export declare function getSSRManifest(): SSRManifest | null;
17
+ /**
18
+ * Render an HTML page by calling the user's renderer and assembling the result
19
+ * into the index.html template.
20
+ *
21
+ * @param template - The index.html content (with <!--ssr-outlet--> placeholder)
22
+ * @param request - The incoming Request
23
+ * @param ogMeta - Optional OG meta tags to inject
24
+ * @returns Assembled HTML string, or null if no renderer is registered
25
+ */
26
+ export declare function renderPage(template: string, request: Request, ogMeta?: string | null): Promise<string | null>;
27
+ //# sourceMappingURL=renderer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"renderer.d.ts","sourceRoot":"","sources":["../src/renderer.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,WAAW;IAC1B,cAAc,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;CACpC;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,CAAC,EAAE,MAAM,CAAA;CACd;AAED,MAAM,MAAM,eAAe,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,WAAW,KAAK,OAAO,CAAC,YAAY,CAAC,CAAA;AAKhG,wBAAgB,cAAc,CAAC,OAAO,EAAE,eAAe;;;EAEtD;AAED,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI,CAG/D;AAED,wBAAgB,cAAc,CAAC,QAAQ,EAAE,WAAW,GAAG,IAAI,CAE1D;AAED,wBAAgB,WAAW,IAAI,eAAe,GAAG,IAAI,CAEpD;AAED,wBAAgB,cAAc,IAAI,WAAW,GAAG,IAAI,CAEnD;AAED;;;;;;;;GAQG;AACH,wBAAsB,UAAU,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAsBnH"}
@@ -0,0 +1,46 @@
1
+ import { log } from "./logger.js";
2
+ let renderer = null;
3
+ let ssrManifest = null;
4
+ export function defineRenderer(handler) {
5
+ return { __type: 'renderer', handler };
6
+ }
7
+ export function registerRenderer(handler) {
8
+ renderer = handler;
9
+ log('[renderer] SSR renderer registered');
10
+ }
11
+ export function setSSRManifest(manifest) {
12
+ ssrManifest = manifest;
13
+ }
14
+ export function getRenderer() {
15
+ return renderer;
16
+ }
17
+ export function getSSRManifest() {
18
+ return ssrManifest;
19
+ }
20
+ /**
21
+ * Render an HTML page by calling the user's renderer and assembling the result
22
+ * into the index.html template.
23
+ *
24
+ * @param template - The index.html content (with <!--ssr-outlet--> placeholder)
25
+ * @param request - The incoming Request
26
+ * @param ogMeta - Optional OG meta tags to inject
27
+ * @returns Assembled HTML string, or null if no renderer is registered
28
+ */
29
+ export async function renderPage(template, request, ogMeta) {
30
+ if (!renderer)
31
+ return null;
32
+ const manifest = ssrManifest || { getPreloadTags: () => '' };
33
+ const result = await renderer(request, manifest);
34
+ let html = template;
35
+ // Inject SSR head tags (preloads, styles)
36
+ if (result.head) {
37
+ html = html.replace('</head>', `${result.head}\n</head>`);
38
+ }
39
+ // Inject OG meta tags
40
+ if (ogMeta) {
41
+ html = html.replace('</head>', `${ogMeta}\n</head>`);
42
+ }
43
+ // Inject rendered HTML into the outlet
44
+ html = html.replace('<!--ssr-outlet-->', result.html);
45
+ return html;
46
+ }
@@ -0,0 +1,6 @@
1
+ /**
2
+ * Register a Node.js module resolve hook so dynamic import() of server files
3
+ * can resolve the $hatk alias to the generated entry points.
4
+ */
5
+ export declare function registerHatkResolveHook(): void;
6
+ //# sourceMappingURL=resolve-hatk.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"resolve-hatk.d.ts","sourceRoot":"","sources":["../src/resolve-hatk.ts"],"names":[],"mappings":"AAIA;;;GAGG;AACH,wBAAgB,uBAAuB,IAAI,IAAI,CAU9C"}
@@ -0,0 +1,20 @@
1
+ import { resolve } from 'node:path';
2
+ import { pathToFileURL } from 'node:url';
3
+ import { registerHooks } from 'node:module';
4
+ /**
5
+ * Register a Node.js module resolve hook so dynamic import() of server files
6
+ * can resolve the $hatk alias to the generated entry points.
7
+ */
8
+ export function registerHatkResolveHook() {
9
+ const hatkUrl = pathToFileURL(resolve('hatk.generated.ts')).href;
10
+ const hatkClientUrl = pathToFileURL(resolve('hatk.generated.client.ts')).href;
11
+ registerHooks({
12
+ resolve(specifier, context, nextResolve) {
13
+ if (specifier === '$hatk/client')
14
+ return { url: hatkClientUrl, shortCircuit: true };
15
+ if (specifier === '$hatk')
16
+ return { url: hatkUrl, shortCircuit: true };
17
+ return nextResolve(specifier, context);
18
+ },
19
+ });
20
+ }
@@ -0,0 +1,16 @@
1
+ /**
2
+ * Create a JSON Response with optional gzip compression.
3
+ * Mirrors the old jsonResponse/sendJson behavior.
4
+ */
5
+ export declare function json(data: unknown, status?: number, acceptEncoding?: string | null): Response;
6
+ /** Create a JSON error Response. */
7
+ export declare function jsonError(status: number, message: string, acceptEncoding?: string | null): Response;
8
+ /** CORS preflight Response. */
9
+ export declare function cors(): Response;
10
+ /** Add CORS headers to an existing Response. */
11
+ export declare function withCors(response: Response): Response;
12
+ /** Create a static file Response with correct MIME type. */
13
+ export declare function file(content: Buffer | Uint8Array, contentType: string, cacheControl?: string): Response;
14
+ /** 404 Not Found. */
15
+ export declare function notFound(): Response;
16
+ //# sourceMappingURL=response.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"response.d.ts","sourceRoot":"","sources":["../src/response.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,MAAM,SAAM,EAAE,cAAc,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,QAAQ,CAuB1F;AAED,oCAAoC;AACpC,wBAAgB,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,cAAc,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,QAAQ,CAEnG;AAED,+BAA+B;AAC/B,wBAAgB,IAAI,IAAI,QAAQ,CAS/B;AAED,gDAAgD;AAChD,wBAAgB,QAAQ,CAAC,QAAQ,EAAE,QAAQ,GAAG,QAAQ,CAUrD;AAED,4DAA4D;AAC5D,wBAAgB,IAAI,CAAC,OAAO,EAAE,MAAM,GAAG,UAAU,EAAE,WAAW,EAAE,MAAM,EAAE,YAAY,CAAC,EAAE,MAAM,GAAG,QAAQ,CAQvG;AAED,qBAAqB;AACrB,wBAAgB,QAAQ,IAAI,QAAQ,CAEnC"}
@@ -0,0 +1,69 @@
1
+ import { gzipSync } from 'node:zlib';
2
+ import { normalizeValue } from "./database/db.js";
3
+ /**
4
+ * Create a JSON Response with optional gzip compression.
5
+ * Mirrors the old jsonResponse/sendJson behavior.
6
+ */
7
+ export function json(data, status = 200, acceptEncoding) {
8
+ const body = Buffer.from(JSON.stringify(data, (_, v) => normalizeValue(v)));
9
+ if (body.length > 1024 && acceptEncoding && /\bgzip\b/.test(acceptEncoding)) {
10
+ const compressed = gzipSync(body);
11
+ return new Response(compressed, {
12
+ status,
13
+ headers: {
14
+ 'Content-Type': 'application/json',
15
+ 'Content-Encoding': 'gzip',
16
+ Vary: 'Accept-Encoding',
17
+ ...(status === 200 ? { 'Cache-Control': 'no-store' } : {}),
18
+ },
19
+ });
20
+ }
21
+ return new Response(body, {
22
+ status,
23
+ headers: {
24
+ 'Content-Type': 'application/json',
25
+ ...(status === 200 ? { 'Cache-Control': 'no-store' } : {}),
26
+ },
27
+ });
28
+ }
29
+ /** Create a JSON error Response. */
30
+ export function jsonError(status, message, acceptEncoding) {
31
+ return json({ error: message }, status, acceptEncoding);
32
+ }
33
+ /** CORS preflight Response. */
34
+ export function cors() {
35
+ return new Response(null, {
36
+ status: 200,
37
+ headers: {
38
+ 'Access-Control-Allow-Origin': '*',
39
+ 'Access-Control-Allow-Headers': '*',
40
+ 'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
41
+ },
42
+ });
43
+ }
44
+ /** Add CORS headers to an existing Response. */
45
+ export function withCors(response) {
46
+ const headers = new Headers(response.headers);
47
+ headers.set('Access-Control-Allow-Origin', '*');
48
+ headers.set('Access-Control-Allow-Headers', '*');
49
+ headers.set('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
50
+ return new Response(response.body, {
51
+ status: response.status,
52
+ statusText: response.statusText,
53
+ headers,
54
+ });
55
+ }
56
+ /** Create a static file Response with correct MIME type. */
57
+ export function file(content, contentType, cacheControl) {
58
+ return new Response(Buffer.from(content), {
59
+ status: 200,
60
+ headers: {
61
+ 'Content-Type': contentType,
62
+ ...(cacheControl ? { 'Cache-Control': cacheControl } : {}),
63
+ },
64
+ });
65
+ }
66
+ /** 404 Not Found. */
67
+ export function notFound() {
68
+ return new Response('Not Found', { status: 404 });
69
+ }
@@ -0,0 +1,21 @@
1
+ export interface ScannedModule {
2
+ path: string;
3
+ name: string;
4
+ mod: any;
5
+ }
6
+ export interface ScanResult {
7
+ feeds: ScannedModule[];
8
+ queries: ScannedModule[];
9
+ procedures: ScannedModule[];
10
+ hooks: ScannedModule[];
11
+ setup: ScannedModule[];
12
+ labels: ScannedModule[];
13
+ og: ScannedModule[];
14
+ renderer: ScannedModule | null;
15
+ }
16
+ /**
17
+ * Scan a directory for hatk server modules.
18
+ * Each file's default export is inspected for a `__type` tag.
19
+ */
20
+ export declare function scanServerDir(serverDir: string): Promise<ScanResult>;
21
+ //# sourceMappingURL=scanner.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scanner.d.ts","sourceRoot":"","sources":["../src/scanner.ts"],"names":[],"mappings":"AAIA,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,GAAG,EAAE,GAAG,CAAA;CACT;AAED,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,aAAa,EAAE,CAAA;IACtB,OAAO,EAAE,aAAa,EAAE,CAAA;IACxB,UAAU,EAAE,aAAa,EAAE,CAAA;IAC3B,KAAK,EAAE,aAAa,EAAE,CAAA;IACtB,KAAK,EAAE,aAAa,EAAE,CAAA;IACtB,MAAM,EAAE,aAAa,EAAE,CAAA;IACvB,EAAE,EAAE,aAAa,EAAE,CAAA;IACnB,QAAQ,EAAE,aAAa,GAAG,IAAI,CAAA;CAC/B;AAmBD;;;GAGG;AACH,wBAAsB,aAAa,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CA2D1E"}
@@ -0,0 +1,88 @@
1
+ var __rewriteRelativeImportExtension = (this && this.__rewriteRelativeImportExtension) || function (path, preserveJsx) {
2
+ if (typeof path === "string" && /^\.\.?\//.test(path)) {
3
+ return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) {
4
+ return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js");
5
+ });
6
+ }
7
+ return path;
8
+ };
9
+ import { resolve, relative } from 'node:path';
10
+ import { readdirSync, statSync, existsSync } from 'node:fs';
11
+ import { log } from "./logger.js";
12
+ /** Recursively collect .ts/.js files, skipping _ prefixed and dot files */
13
+ function walkDir(dir) {
14
+ const results = [];
15
+ try {
16
+ for (const entry of readdirSync(dir)) {
17
+ if (entry.startsWith('_') || entry.startsWith('.'))
18
+ continue;
19
+ const full = resolve(dir, entry);
20
+ if (statSync(full).isDirectory()) {
21
+ results.push(...walkDir(full));
22
+ }
23
+ else if (entry.endsWith('.ts') || entry.endsWith('.js')) {
24
+ results.push(full);
25
+ }
26
+ }
27
+ }
28
+ catch { }
29
+ return results.sort();
30
+ }
31
+ /**
32
+ * Scan a directory for hatk server modules.
33
+ * Each file's default export is inspected for a `__type` tag.
34
+ */
35
+ export async function scanServerDir(serverDir) {
36
+ const result = {
37
+ feeds: [],
38
+ queries: [],
39
+ procedures: [],
40
+ hooks: [],
41
+ setup: [],
42
+ labels: [],
43
+ og: [],
44
+ renderer: null,
45
+ };
46
+ if (!existsSync(serverDir))
47
+ return result;
48
+ const files = walkDir(serverDir);
49
+ for (const filePath of files) {
50
+ const name = relative(serverDir, filePath).replace(/\.(ts|js)$/, '');
51
+ const mod = await import(__rewriteRelativeImportExtension(/* @vite-ignore */ `${filePath}?t=${Date.now()}`));
52
+ const exported = mod.default;
53
+ if (!exported) {
54
+ log(`[scanner] ${name}: no default export, skipping`);
55
+ continue;
56
+ }
57
+ const entry = { path: filePath, name, mod: exported };
58
+ switch (exported.__type) {
59
+ case 'feed':
60
+ result.feeds.push(entry);
61
+ break;
62
+ case 'query':
63
+ result.queries.push(entry);
64
+ break;
65
+ case 'procedure':
66
+ result.procedures.push(entry);
67
+ break;
68
+ case 'hook':
69
+ result.hooks.push(entry);
70
+ break;
71
+ case 'setup':
72
+ result.setup.push(entry);
73
+ break;
74
+ case 'labels':
75
+ result.labels.push(entry);
76
+ break;
77
+ case 'og':
78
+ result.og.push(entry);
79
+ break;
80
+ case 'renderer':
81
+ result.renderer = entry;
82
+ break;
83
+ default:
84
+ log(`[scanner] ${name}: no recognized __type tag, skipping`);
85
+ }
86
+ }
87
+ return result;
88
+ }
package/dist/schema.d.ts CHANGED
@@ -48,4 +48,12 @@ export declare function getAllLexicons(): Array<{
48
48
  export declare function getLexiconArray(): any[];
49
49
  export declare function generateTableSchema(nsid: string, lexicon: any, lexicons?: Map<string, any>): TableSchema;
50
50
  export declare function generateCreateTableSQL(schema: TableSchema): string;
51
+ /**
52
+ * Build table schemas and DDL from lexicons and collections.
53
+ * Shared by main.ts (server boot) and cli.ts (hatk schema command).
54
+ */
55
+ export declare function buildSchemas(lexicons: Map<string, any>, collections: string[]): {
56
+ schemas: TableSchema[];
57
+ ddlStatements: string[];
58
+ };
51
59
  //# sourceMappingURL=schema.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAA;IACZ,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;IAClB,OAAO,EAAE,OAAO,CAAA;IAChB,KAAK,EAAE,OAAO,CAAA;CACf;AAED,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,OAAO,EAAE,OAAO,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,gBAAgB;IAC/B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,EAAE,iBAAiB,EAAE,CAAA;CAC9B;AAED,MAAM,WAAW,WAAW;IAC1B,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,UAAU,EAAE,MAAM,EAAE,CAAA;IACpB,QAAQ,EAAE,gBAAgB,EAAE,CAAA;IAC5B,MAAM,EAAE,gBAAgB,EAAE,CAAA;CAC3B;AAED,MAAM,WAAW,gBAAgB;IAC/B,gBAAgB,EAAE,MAAM,CAAA;IACxB,SAAS,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;CACrB;AAGD,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AA8CD,wBAAgB,YAAY,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CASlE;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,MAAM,EAAE,CASxE;AAID,wBAAgB,aAAa,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,CAI9D;AAED,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,GAAG,GAAG,SAAS,CAExD;AAED,wBAAgB,cAAc,IAAI,KAAK,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,GAAG,CAAA;CAAE,CAAC,CAEtE;AAED,iFAAiF;AACjF,wBAAgB,eAAe,IAAI,GAAG,EAAE,CAEvC;AAuHD,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,QAAQ,CAAC,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,WAAW,CA0GxG;AAGD,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,WAAW,GAAG,MAAM,CAoElE"}
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../src/schema.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAA;IACZ,YAAY,EAAE,MAAM,CAAA;IACpB,UAAU,EAAE,MAAM,CAAA;IAClB,OAAO,EAAE,OAAO,CAAA;IAChB,KAAK,EAAE,OAAO,CAAA;CACf;AAED,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,OAAO,EAAE,OAAO,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,gBAAgB;IAC/B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,EAAE,iBAAiB,EAAE,CAAA;CAC9B;AAED,MAAM,WAAW,WAAW;IAC1B,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,UAAU,EAAE,MAAM,EAAE,CAAA;IACpB,QAAQ,EAAE,gBAAgB,EAAE,CAAA;IAC5B,MAAM,EAAE,gBAAgB,EAAE,CAAA;CAC3B;AAED,MAAM,WAAW,gBAAgB;IAC/B,gBAAgB,EAAE,MAAM,CAAA;IACxB,SAAS,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;CACrB;AAGD,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AA8CD,wBAAgB,YAAY,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CASlE;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,MAAM,EAAE,CASxE;AAID,wBAAgB,aAAa,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,CAI9D;AAED,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,GAAG,GAAG,SAAS,CAExD;AAED,wBAAgB,cAAc,IAAI,KAAK,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,GAAG,CAAA;CAAE,CAAC,CAEtE;AAED,iFAAiF;AACjF,wBAAgB,eAAe,IAAI,GAAG,EAAE,CAEvC;AAuHD,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,QAAQ,CAAC,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,WAAW,CA0GxG;AAGD,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,WAAW,GAAG,MAAM,CAoElE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,EAC1B,WAAW,EAAE,MAAM,EAAE,GACpB;IAAE,OAAO,EAAE,WAAW,EAAE,CAAC;IAAC,aAAa,EAAE,MAAM,EAAE,CAAA;CAAE,CA2BrD"}
package/dist/schema.js CHANGED
@@ -356,3 +356,32 @@ export function generateCreateTableSQL(schema) {
356
356
  }
357
357
  return [createTable, ...indexes, ...childDDL].join('\n');
358
358
  }
359
+ /**
360
+ * Build table schemas and DDL from lexicons and collections.
361
+ * Shared by main.ts (server boot) and cli.ts (hatk schema command).
362
+ */
363
+ export function buildSchemas(lexicons, collections) {
364
+ const schemas = [];
365
+ const ddlStatements = [];
366
+ for (const nsid of collections) {
367
+ const lexicon = lexicons.get(nsid);
368
+ if (!lexicon) {
369
+ const genericDDL = `CREATE TABLE IF NOT EXISTS "${nsid}" (
370
+ uri TEXT PRIMARY KEY,
371
+ cid TEXT,
372
+ did TEXT NOT NULL,
373
+ indexed_at TIMESTAMP NOT NULL,
374
+ data JSON
375
+ );
376
+ CREATE INDEX IF NOT EXISTS idx_${nsid.replace(/\./g, '_')}_indexed ON "${nsid}"(indexed_at DESC);
377
+ CREATE INDEX IF NOT EXISTS idx_${nsid.replace(/\./g, '_')}_author ON "${nsid}"(did);`;
378
+ schemas.push({ collection: nsid, tableName: `"${nsid}"`, columns: [], refColumns: [], children: [], unions: [] });
379
+ ddlStatements.push(genericDDL);
380
+ continue;
381
+ }
382
+ const schema = generateTableSchema(nsid, lexicon, lexicons);
383
+ schemas.push(schema);
384
+ ddlStatements.push(generateCreateTableSQL(schema));
385
+ }
386
+ return { schemas, ddlStatements };
387
+ }
package/dist/seed.d.ts CHANGED
@@ -1,8 +1,10 @@
1
+ /** Authenticated PDS session — returned by {@link seed.createAccount}. */
1
2
  export type Session = {
2
3
  did: string;
3
4
  accessJwt: string;
4
5
  handle: string;
5
6
  };
7
+ /** AT Protocol blob reference, as returned by `com.atproto.repo.uploadBlob`. */
6
8
  export type BlobRef = {
7
9
  $type: 'blob';
8
10
  ref: {
@@ -11,11 +13,23 @@ export type BlobRef = {
11
13
  mimeType: string;
12
14
  size: number;
13
15
  };
16
+ /** Options for the seed helper. All fields fall back to env vars or sensible defaults. */
14
17
  export type SeedOpts = {
15
18
  pds?: string;
16
19
  password?: string;
17
20
  lexicons?: string;
18
21
  };
22
+ /**
23
+ * Create a seed helper for populating a local PDS with test data.
24
+ *
25
+ * Returns `createAccount`, `createRecord`, and `uploadBlob` functions bound to
26
+ * the target PDS. Records are validated against the project's lexicons before
27
+ * being written. Generic parameter `R` maps collection NSIDs to their record types
28
+ * for type-safe seeding.
29
+ *
30
+ * @typeParam R - Map of collection NSID → record type (defaults to untyped)
31
+ * @param opts - PDS URL, password, and lexicon directory overrides
32
+ */
19
33
  export declare function seed<R extends Record<string, unknown> = Record<string, unknown>>(opts?: SeedOpts): {
20
34
  createAccount: (handle: string) => Promise<Session>;
21
35
  createRecord: <K extends keyof R & string>(session: Session, collection: K, record: R[K] extends Record<string, unknown> ? R[K] : Record<string, unknown>, opts: {
@@ -23,6 +37,11 @@ export declare function seed<R extends Record<string, unknown> = Record<string,
23
37
  }) => Promise<{
24
38
  uri: string;
25
39
  cid: string;
40
+ commit: {
41
+ cid: string;
42
+ rev: string;
43
+ };
44
+ validationStatus: string;
26
45
  }>;
27
46
  uploadBlob: (session: Session, filePath: string) => Promise<BlobRef>;
28
47
  };
@@ -1 +1 @@
1
- {"version":3,"file":"seed.d.ts","sourceRoot":"","sources":["../src/seed.ts"],"names":[],"mappings":"AAKA,MAAM,MAAM,OAAO,GAAG;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAAA;AACxE,MAAM,MAAM,OAAO,GAAG;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE;QAAE,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CAAA;AAC/F,MAAM,MAAM,QAAQ,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,CAAA;AAE7E,wBAAgB,IAAI,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,EAAE,QAAQ;4BAM1D,MAAM,KAAG,OAAO,CAAC,OAAO,CAAC;mBA4BlC,CAAC,SAAS,MAAM,CAAC,GAAG,MAAM,WAC3C,OAAO,cACJ,CAAC,UACL,CAAC,CAAC,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,QACvE;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,KACrB,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC;0BA4BL,OAAO,YAAY,MAAM,KAAG,OAAO,CAAC,OAAO,CAAC;EA4BhF"}
1
+ {"version":3,"file":"seed.d.ts","sourceRoot":"","sources":["../src/seed.ts"],"names":[],"mappings":"AA8BA,0EAA0E;AAC1E,MAAM,MAAM,OAAO,GAAG;IAAE,GAAG,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAAA;AAExE,gFAAgF;AAChF,MAAM,MAAM,OAAO,GAAG;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE;QAAE,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CAAA;AAE/F,0FAA0F;AAC1F,MAAM,MAAM,QAAQ,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAC;IAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,CAAA;AAE7E;;;;;;;;;;GAUG;AACH,wBAAgB,IAAI,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,EAAE,QAAQ;4BAO1D,MAAM,KAAG,OAAO,CAAC,OAAO,CAAC;mBA6BlC,CAAC,SAAS,MAAM,CAAC,GAAG,MAAM,WAC3C,OAAO,cACJ,CAAC,UACL,CAAC,CAAC,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,QACvE;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,KACrB,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,GAAG,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE;YAAE,GAAG,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC;QAAC,gBAAgB,EAAE,MAAM,CAAA;KAAE,CAAC;0BAkCrE,OAAO,YAAY,MAAM,KAAG,OAAO,CAAC,OAAO,CAAC;EA4BhF"}
package/dist/seed.js CHANGED
@@ -1,12 +1,49 @@
1
- import { loadLexicons } from "./schema.js";
1
+ /**
2
+ * Test data seeding helpers for populating a local PDS.
3
+ *
4
+ * Place a seed script at `seeds/seed.ts`. It runs during `hatk dev` to create
5
+ * accounts and records against your local PDS. Records are validated against
6
+ * your project's lexicons before being written.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * // seeds/seed.ts
11
+ * import { seed } from '../hatk.generated.ts'
12
+ *
13
+ * const { createAccount, createRecord } = seed()
14
+ *
15
+ * const alice = await createAccount('alice.test')
16
+ * const bob = await createAccount('bob.test')
17
+ *
18
+ * await createRecord(
19
+ * alice,
20
+ * 'xyz.statusphere.status',
21
+ * { status: '👍', createdAt: new Date().toISOString() },
22
+ * { rkey: 'status1' },
23
+ * )
24
+ * ```
25
+ */
26
+ import { loadLexicons } from "./database/schema.js";
2
27
  import { validateRecord } from '@bigmoves/lexicon';
3
28
  import { resolve } from 'node:path';
4
29
  import { readFileSync } from 'node:fs';
30
+ /**
31
+ * Create a seed helper for populating a local PDS with test data.
32
+ *
33
+ * Returns `createAccount`, `createRecord`, and `uploadBlob` functions bound to
34
+ * the target PDS. Records are validated against the project's lexicons before
35
+ * being written. Generic parameter `R` maps collection NSIDs to their record types
36
+ * for type-safe seeding.
37
+ *
38
+ * @typeParam R - Map of collection NSID → record type (defaults to untyped)
39
+ * @param opts - PDS URL, password, and lexicon directory overrides
40
+ */
5
41
  export function seed(opts) {
6
42
  const pdsUrl = opts?.pds || process.env.PDS_URL || 'http://localhost:2583';
7
43
  const password = opts?.password || process.env.SEED_PASSWORD || 'password';
8
44
  const lexiconsDir = resolve(opts?.lexicons || 'lexicons');
9
45
  const lexiconArray = [...loadLexicons(lexiconsDir).values()];
46
+ /** Create a PDS account (or reuse an existing one) and return an authenticated session. */
10
47
  async function createAccount(handle) {
11
48
  const res = await fetch(`${pdsUrl}/xrpc/com.atproto.server.createAccount`, {
12
49
  method: 'POST',
@@ -34,6 +71,7 @@ export function seed(opts) {
34
71
  const session = (await sessionRes.json());
35
72
  return { ...session, handle };
36
73
  }
74
+ /** Validate a record against its lexicon and write it to the PDS via `putRecord`. */
37
75
  async function createRecord(session, collection, record, opts) {
38
76
  const error = validateRecord(lexiconArray, collection, record);
39
77
  if (error) {
@@ -53,10 +91,11 @@ export function seed(opts) {
53
91
  if (!res.ok) {
54
92
  throw new Error(`[seed] [${session.handle}] failed to create ${collection}: ${await res.text()}`);
55
93
  }
56
- const { uri, cid } = (await res.json());
57
- console.log(`[seed] [${session.handle}] ${collection} → ${uri}`);
58
- return { uri, cid };
94
+ const result = (await res.json());
95
+ console.log(`[seed] [${session.handle}] ${collection} → ${result.uri}`);
96
+ return result;
59
97
  }
98
+ /** Upload a file to the PDS as a blob. MIME type is inferred from the file extension. */
60
99
  async function uploadBlob(session, filePath) {
61
100
  const data = readFileSync(resolve(filePath));
62
101
  const ext = filePath.split('.').pop()?.toLowerCase() || '';
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Scan the server/ directory and register all discovered handlers.
3
+ * Setup scripts run immediately (in sorted order).
4
+ */
5
+ export declare function initServer(serverDir: string, opts?: {
6
+ skipSetup?: boolean;
7
+ }): Promise<void>;
8
+ //# sourceMappingURL=server-init.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"server-init.d.ts","sourceRoot":"","sources":["../src/server-init.ts"],"names":[],"mappings":"AAWA;;;GAGG;AACH,wBAAsB,UAAU,CAAC,SAAS,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE;IAAE,SAAS,CAAC,EAAE,OAAO,CAAA;CAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CA2DjG"}
@@ -0,0 +1,61 @@
1
+ import { existsSync } from 'node:fs';
2
+ import { log } from "./logger.js";
3
+ import { scanServerDir } from "./scanner.js";
4
+ import { registerFeed, listFeeds } from "./feeds.js";
5
+ import { registerXrpcHandler, listXrpc } from "./xrpc.js";
6
+ import { registerLabelModule, getLabelDefinitions } from "./labels.js";
7
+ import { registerOgHandler } from "./opengraph.js";
8
+ import { registerHook } from "./hooks.js";
9
+ import { runSetupHandler } from "./setup.js";
10
+ import { registerRenderer } from "./renderer.js";
11
+ /**
12
+ * Scan the server/ directory and register all discovered handlers.
13
+ * Setup scripts run immediately (in sorted order).
14
+ */
15
+ export async function initServer(serverDir, opts) {
16
+ if (!existsSync(serverDir)) {
17
+ log(`[server] No server/ directory found, skipping`);
18
+ return;
19
+ }
20
+ const scanned = await scanServerDir(serverDir);
21
+ // 1. Run setup scripts first (sorted by name) — skipped in test context
22
+ if (!opts?.skipSetup) {
23
+ for (const entry of scanned.setup.sort((a, b) => a.name.localeCompare(b.name))) {
24
+ await runSetupHandler(entry.name, entry.mod.handler);
25
+ }
26
+ }
27
+ // 2. Register feeds
28
+ for (const entry of scanned.feeds) {
29
+ const feedName = entry.name.includes('/') ? entry.name.split('/').pop() : entry.name;
30
+ registerFeed(feedName, entry.mod);
31
+ }
32
+ // 3. Register XRPC handlers
33
+ for (const entry of scanned.queries) {
34
+ registerXrpcHandler(entry.mod.nsid, entry.mod);
35
+ }
36
+ for (const entry of scanned.procedures) {
37
+ registerXrpcHandler(entry.mod.nsid, entry.mod);
38
+ }
39
+ // 4. Register hooks
40
+ for (const entry of scanned.hooks) {
41
+ registerHook(entry.mod.event, entry.mod.handler);
42
+ }
43
+ // 5. Register labels
44
+ for (const entry of scanned.labels) {
45
+ registerLabelModule(entry.name, entry.mod);
46
+ }
47
+ // 6. Register OG handlers
48
+ for (const entry of scanned.og) {
49
+ registerOgHandler(entry.mod);
50
+ }
51
+ // 7. Register renderer
52
+ if (scanned.renderer) {
53
+ registerRenderer(scanned.renderer.mod.handler);
54
+ }
55
+ log(`[server] Initialized from server/ directory:`);
56
+ log(` Feeds: ${listFeeds()
57
+ .map((f) => f.name)
58
+ .join(', ') || 'none'}`);
59
+ log(` XRPC: ${listXrpc().join(', ') || 'none'}`);
60
+ log(` Labels: ${getLabelDefinitions().length} definitions`);
61
+ }
package/dist/server.d.ts CHANGED
@@ -1,6 +1,29 @@
1
- import { type Server, type IncomingMessage } from 'node:http';
2
1
  import type { OAuthConfig } from './config.ts';
3
- export declare function startServer(port: number, collections: string[], publicDir: string | null, oauth: OAuthConfig | null, admins?: string[], resolveViewer?: (req: IncomingMessage) => {
2
+ /**
3
+ * Register built-in dev.hatk.* XRPC handlers in the handler registry.
4
+ * This makes them available to callXrpc() for use in SSR and server code.
5
+ */
6
+ export declare function registerCoreHandlers(collections: string[], oauth: OAuthConfig | null): void;
7
+ export interface HandlerConfig {
8
+ collections: string[];
9
+ publicDir: string | null;
10
+ oauth: OAuthConfig | null;
11
+ admins: string[];
12
+ renderer?: (request: Request, manifest: any) => Promise<{
13
+ html: string;
14
+ head?: string;
15
+ }>;
16
+ resolveViewer?: (request: Request) => {
17
+ did: string;
18
+ } | null;
19
+ onResync?: () => void;
20
+ }
21
+ /**
22
+ * Create a Web Standard request handler for all hatk routes.
23
+ * Returns a pure function: (Request) → Promise<Response>
24
+ */
25
+ export declare function createHandler(config: HandlerConfig): (request: Request) => Promise<Response>;
26
+ export declare function startServer(port: number, collections: string[], publicDir: string | null, oauth: OAuthConfig | null, admins?: string[], resolveViewer?: (request: Request) => {
4
27
  did: string;
5
- } | null): Server;
28
+ } | null, onResync?: () => void): import('node:http').Server;
6
29
  //# sourceMappingURL=server.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../src/server.ts"],"names":[],"mappings":"AAAA,OAAO,EAAgB,KAAK,MAAM,EAAE,KAAK,eAAe,EAAE,MAAM,WAAW,CAAA;AAmD3E,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AA2B9C,wBAAgB,WAAW,CACzB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,EAAE,EACrB,SAAS,EAAE,MAAM,GAAG,IAAI,EACxB,KAAK,EAAE,WAAW,GAAG,IAAI,EACzB,MAAM,GAAE,MAAM,EAAO,EACrB,aAAa,CAAC,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK;IAAE,GAAG,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,GAC/D,MAAM,CAo8BR"}
1
+ {"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../src/server.ts"],"names":[],"mappings":"AAqDA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AA0B9C;;;GAGG;AACH,wBAAgB,oBAAoB,CAAC,WAAW,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,WAAW,GAAG,IAAI,GAAG,IAAI,CAwH3F;AAED,MAAM,WAAW,aAAa;IAC5B,WAAW,EAAE,MAAM,EAAE,CAAA;IACrB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAA;IACxB,KAAK,EAAE,WAAW,GAAG,IAAI,CAAA;IACzB,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC,CAAA;IACxF,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAA;IAC5D,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,MAAM,EAAE,aAAa,GAAG,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,QAAQ,CAAC,CA8xB5F;AAGD,wBAAgB,WAAW,CACzB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,EAAE,EACrB,SAAS,EAAE,MAAM,GAAG,IAAI,EACxB,KAAK,EAAE,WAAW,GAAG,IAAI,EACzB,MAAM,GAAE,MAAM,EAAO,EACrB,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK;IAAE,GAAG,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,EAC5D,QAAQ,CAAC,EAAE,MAAM,IAAI,GACpB,OAAO,WAAW,EAAE,MAAM,CAG5B"}