@powerhousedao/switchboard 6.0.0-dev.23 → 6.0.0-dev.231

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/Auth.md +45 -27
  2. package/CHANGELOG.md +1786 -5
  3. package/README.md +13 -12
  4. package/dist/esm-aE6sDAbA.mjs +6481 -0
  5. package/dist/esm-aE6sDAbA.mjs.map +1 -0
  6. package/dist/index.d.mts +1 -0
  7. package/dist/index.mjs +134 -0
  8. package/dist/index.mjs.map +1 -0
  9. package/dist/install-packages.d.mts +1 -0
  10. package/dist/install-packages.mjs +31 -0
  11. package/dist/install-packages.mjs.map +1 -0
  12. package/dist/migrate.d.mts +1 -0
  13. package/dist/migrate.mjs +55 -0
  14. package/dist/migrate.mjs.map +1 -0
  15. package/dist/server-UGYERfMo.mjs +762 -0
  16. package/dist/server-UGYERfMo.mjs.map +1 -0
  17. package/dist/server.d.mts +113 -0
  18. package/dist/server.d.mts.map +1 -0
  19. package/dist/server.mjs +4 -0
  20. package/dist/utils-DFl0ezBT.mjs +44 -0
  21. package/dist/utils-DFl0ezBT.mjs.map +1 -0
  22. package/dist/utils.d.mts +9 -0
  23. package/dist/utils.d.mts.map +1 -0
  24. package/dist/utils.mjs +2 -0
  25. package/package.json +58 -40
  26. package/test/attachments/auth.test.ts +219 -0
  27. package/test/attachments/index.test.ts +119 -0
  28. package/test/attachments/routes-integration.test.ts +103 -0
  29. package/test/attachments/routes.test.ts +864 -0
  30. package/test/metrics.test.ts +202 -0
  31. package/test/pglite-dialect.test.ts +40 -0
  32. package/test/pglite-version.test.ts +37 -0
  33. package/tsconfig.json +12 -3
  34. package/tsdown.config.ts +16 -0
  35. package/vitest.config.ts +11 -0
  36. package/Dockerfile +0 -86
  37. package/dist/src/clients/redis.d.ts +0 -5
  38. package/dist/src/clients/redis.d.ts.map +0 -1
  39. package/dist/src/clients/redis.js +0 -48
  40. package/dist/src/clients/redis.js.map +0 -1
  41. package/dist/src/config.d.ts +0 -12
  42. package/dist/src/config.d.ts.map +0 -1
  43. package/dist/src/config.js +0 -33
  44. package/dist/src/config.js.map +0 -1
  45. package/dist/src/connect-crypto.d.ts +0 -41
  46. package/dist/src/connect-crypto.d.ts.map +0 -1
  47. package/dist/src/connect-crypto.js +0 -127
  48. package/dist/src/connect-crypto.js.map +0 -1
  49. package/dist/src/feature-flags.d.ts +0 -2
  50. package/dist/src/feature-flags.d.ts.map +0 -1
  51. package/dist/src/feature-flags.js +0 -9
  52. package/dist/src/feature-flags.js.map +0 -1
  53. package/dist/src/index.d.ts +0 -3
  54. package/dist/src/index.d.ts.map +0 -1
  55. package/dist/src/index.js +0 -21
  56. package/dist/src/index.js.map +0 -1
  57. package/dist/src/install-packages.d.ts +0 -2
  58. package/dist/src/install-packages.d.ts.map +0 -1
  59. package/dist/src/install-packages.js +0 -36
  60. package/dist/src/install-packages.js.map +0 -1
  61. package/dist/src/migrate.d.ts +0 -3
  62. package/dist/src/migrate.d.ts.map +0 -1
  63. package/dist/src/migrate.js +0 -65
  64. package/dist/src/migrate.js.map +0 -1
  65. package/dist/src/profiler.d.ts +0 -4
  66. package/dist/src/profiler.d.ts.map +0 -1
  67. package/dist/src/profiler.js +0 -17
  68. package/dist/src/profiler.js.map +0 -1
  69. package/dist/src/server.d.ts +0 -6
  70. package/dist/src/server.d.ts.map +0 -1
  71. package/dist/src/server.js +0 -304
  72. package/dist/src/server.js.map +0 -1
  73. package/dist/src/types.d.ts +0 -64
  74. package/dist/src/types.d.ts.map +0 -1
  75. package/dist/src/types.js +0 -2
  76. package/dist/src/types.js.map +0 -1
  77. package/dist/src/utils.d.ts +0 -6
  78. package/dist/src/utils.d.ts.map +0 -1
  79. package/dist/src/utils.js +0 -92
  80. package/dist/src/utils.js.map +0 -1
  81. package/dist/tsconfig.tsbuildinfo +0 -1
  82. package/entrypoint.sh +0 -17
@@ -0,0 +1,762 @@
1
+ import { n as isPostgresUrl, t as addDefaultDrive } from "./utils-DFl0ezBT.mjs";
2
+ import { register } from "node:module";
3
+ import * as Sentry from "@sentry/node";
4
+ import { childLogger, documentModelDocumentModelModule, setLogLevel } from "document-model";
5
+ import dotenv from "dotenv";
6
+ import { getConfig } from "@powerhousedao/config/node";
7
+ import { promises } from "node:fs";
8
+ import path from "node:path";
9
+ import { metrics } from "@opentelemetry/api";
10
+ import { ReactorInstrumentation } from "@powerhousedao/opentelemetry-instrumentation-reactor";
11
+ import { ChannelScheme, EventBus, ReactorBuilder, ReactorClientBuilder, driveCollectionId, parseDriveUrl } from "@powerhousedao/reactor";
12
+ import { HttpPackageLoader, ImportPackageLoader, PackageManagementService, PackagesSubgraph, getUniqueDocumentModels, initializeAndStartAPI } from "@powerhousedao/reactor-api";
13
+ import { httpsHooksPath } from "@powerhousedao/reactor-api/https-hooks";
14
+ import { VitePackageLoader, createViteLogger, startViteServer } from "@powerhousedao/reactor-api/vite";
15
+ import { driveDocumentModelModule } from "@powerhousedao/shared/document-drive";
16
+ import { documentModels } from "@powerhousedao/vetra";
17
+ import { processorFactory } from "@powerhousedao/vetra/processors";
18
+ import { Kysely, PostgresDialect } from "kysely";
19
+ import { PGliteDialect } from "kysely-pglite-dialect";
20
+ import net from "node:net";
21
+ import path$1 from "path";
22
+ import { Pool } from "pg";
23
+ import { AttachmentNotFound, InvalidAttachmentRef, ReservationNotFound } from "@powerhousedao/reactor-attachments";
24
+ import { Readable } from "node:stream";
25
+ import { EnvVarProvider } from "@openfeature/env-var-provider";
26
+ import { OpenFeature } from "@openfeature/server-sdk";
27
+ import { DEFAULT_RENOWN_URL, NodeKeyStorage, RenownBuilder, RenownCryptoBuilder, createSignatureVerifier } from "@renown/sdk/node";
28
+ //#region src/pglite-version.ts
29
+ const SUPPORTED_PG_MAJORS = [16, 17];
30
+ async function readPgVersionFile(dataDir) {
31
+ try {
32
+ const raw = await promises.readFile(path.join(dataDir, "PG_VERSION"), "utf8");
33
+ const major = parseInt(raw.trim(), 10);
34
+ return Number.isFinite(major) ? major : null;
35
+ } catch {
36
+ return null;
37
+ }
38
+ }
39
+ function isSupportedMajor(major) {
40
+ return SUPPORTED_PG_MAJORS.includes(major);
41
+ }
42
+ /**
43
+ * Parses the `PH_FORCE_PG_VERSION` env var. Returns the validated major, or
44
+ * `null` when the var is unset/empty. Throws on any value that is not a
45
+ * supported major — invalid configuration must fail before the server starts
46
+ * touching disk.
47
+ */
48
+ function parseForcePgVersion(raw) {
49
+ if (raw === void 0 || raw.trim() === "") return null;
50
+ const parsed = Number(raw);
51
+ if (Number.isInteger(parsed) && isSupportedMajor(parsed)) return parsed;
52
+ throw new Error(`PH_FORCE_PG_VERSION must be one of: ${SUPPORTED_PG_MAJORS.join(", ")} (got: ${raw})`);
53
+ }
54
+ async function loadPGliteModule(major) {
55
+ if (major === 16) return await import("pglite-legacy-02");
56
+ return import("@electric-sql/pglite");
57
+ }
58
+ async function loadPgDump(major) {
59
+ if (major === 16) return (await import("pglite-tools-legacy-02/pg_dump")).pgDump;
60
+ return (await import("@electric-sql/pglite-tools/pg_dump")).pgDump;
61
+ }
62
+ //#endregion
63
+ //#region src/pglite-dialect.ts
64
+ var ClosablePGliteDialect = class extends PGliteDialect {
65
+ #pglite;
66
+ constructor(pglite) {
67
+ super(pglite);
68
+ this.#pglite = pglite;
69
+ }
70
+ createDriver() {
71
+ const driver = super.createDriver();
72
+ const pglite = this.#pglite;
73
+ const innerDestroy = driver.destroy.bind(driver);
74
+ driver.destroy = async () => {
75
+ await innerDestroy();
76
+ if (!pglite.closed) await pglite.close();
77
+ };
78
+ return driver;
79
+ }
80
+ };
81
+ //#endregion
82
+ //#region src/attachments/auth.ts
83
+ /**
84
+ * Wrap a Node-style handler so that, when `authService` is provided and auth is
85
+ * enabled, the request must carry a verifiable Bearer token.
86
+ */
87
+ function requireAuth(authService, handler) {
88
+ if (!authService) return handler;
89
+ return async (req, res) => {
90
+ let result;
91
+ try {
92
+ result = await authService.verifyBearer(req.headers.authorization);
93
+ } catch {
94
+ res.statusCode = 500;
95
+ res.setHeader("Content-Type", "application/json");
96
+ res.end(JSON.stringify({ error: "Internal authentication error" }));
97
+ return;
98
+ }
99
+ if (result instanceof Response) {
100
+ const body = await result.text();
101
+ res.statusCode = result.status;
102
+ const contentType = result.headers.get("content-type");
103
+ if (contentType) res.setHeader("Content-Type", contentType);
104
+ res.end(body);
105
+ return;
106
+ }
107
+ if (result.auth_enabled && !result.user) {
108
+ res.statusCode = 401;
109
+ res.setHeader("Content-Type", "application/json");
110
+ res.end(JSON.stringify({ error: "Authentication required" }));
111
+ return;
112
+ }
113
+ await handler(req, res);
114
+ };
115
+ }
116
+ //#endregion
117
+ //#region src/attachments/mount-auth.ts
118
+ /**
119
+ * Mount a Node-style attachment route with `requireAuth` applied unconditionally.
120
+ * When `api.authService` is undefined (auth disabled), `requireAuth` returns the
121
+ * handler unchanged — that is the only way to opt out. To register a route
122
+ * without auth wrapping you must call `api.httpAdapter.mountNodeRoute` directly.
123
+ */
124
+ function mountAuthenticatedNodeRoute(api, method, path, handler) {
125
+ api.httpAdapter.mountNodeRoute(method, path, requireAuth(api.authService, handler));
126
+ }
127
+ //#endregion
128
+ //#region src/attachments/routes.ts
129
+ const logger$1 = childLogger(["switchboard", "attachments"]);
130
+ const HASH_PATTERN = /^[a-f0-9]{64}$/i;
131
+ const CONTROL_CHARS = /[\x00-\x1f\x7f]/;
132
+ const MIME_TYPE_PATTERN = /^[!#$%&'*+\-.^_`|~\w]+\/[!#$%&'*+\-.^_`|~\w]+(?:\s*;\s*[!#$%&'*+\-.^_`|~\w]+=(?:[!#$%&'*+\-.^_`|~\w]+|"(?:[^"\\\r\n]|\\[^\r\n])*"))*$/;
133
+ const MAX_FILENAME_LEN = 255;
134
+ const MAX_MIMETYPE_LEN = 255;
135
+ function sendJson(res, status, body) {
136
+ res.statusCode = status;
137
+ res.setHeader("Content-Type", "application/json");
138
+ res.end(JSON.stringify(body));
139
+ }
140
+ function sendError(res, status, message) {
141
+ sendJson(res, status, { error: message });
142
+ }
143
+ function statusForError(err) {
144
+ if (err instanceof AttachmentNotFound) return 404;
145
+ if (err instanceof ReservationNotFound) return 404;
146
+ if (err instanceof InvalidAttachmentRef) return 400;
147
+ return 500;
148
+ }
149
+ function sendErrorFromException(res, err) {
150
+ const status = statusForError(err);
151
+ if (status >= 500) {
152
+ logger$1.error("Attachment route error: @error", err);
153
+ sendError(res, status, "Internal error");
154
+ return;
155
+ }
156
+ sendError(res, status, err instanceof Error ? err.message : String(err));
157
+ }
158
+ async function readJsonBody(req, body) {
159
+ if (body !== void 0 && body !== null && typeof body === "object") return body;
160
+ const chunks = [];
161
+ for await (const chunk of req) chunks.push(chunk);
162
+ if (chunks.length === 0) return void 0;
163
+ const text = Buffer.concat(chunks).toString("utf8");
164
+ if (text.length === 0) return void 0;
165
+ return JSON.parse(text);
166
+ }
167
+ function parseReserveOptions(input) {
168
+ if (input === null || typeof input !== "object") return null;
169
+ const obj = input;
170
+ if (typeof obj.mimeType !== "string" || obj.mimeType.length === 0 || obj.mimeType.length > MAX_MIMETYPE_LEN || !MIME_TYPE_PATTERN.test(obj.mimeType)) return null;
171
+ if (typeof obj.fileName !== "string" || obj.fileName.length === 0 || obj.fileName.length > MAX_FILENAME_LEN || CONTROL_CHARS.test(obj.fileName)) return null;
172
+ let extension = null;
173
+ if (typeof obj.extension === "string") {
174
+ if (obj.extension.length === 0 || /[\\/]/.test(obj.extension)) return null;
175
+ extension = obj.extension;
176
+ } else if (obj.extension !== void 0 && obj.extension !== null) return null;
177
+ return {
178
+ mimeType: obj.mimeType,
179
+ fileName: obj.fileName,
180
+ extension
181
+ };
182
+ }
183
+ function quoteFilename(name) {
184
+ return `"${name.replace(/[\\"]/g, "\\$&")}"`;
185
+ }
186
+ function buildContentDisposition(fileName) {
187
+ const ascii = fileName.replace(/[^\x20-\x21\x23-\x5b\x5d-\x7e]/g, "_");
188
+ const encoded = encodeURIComponent(fileName).replace(/['()*!]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`);
189
+ return `attachment; filename=${quoteFilename(ascii)}; filename*=UTF-8''${encoded}`;
190
+ }
191
+ function makeReserveHandler(attachments) {
192
+ return async (req, res, body) => {
193
+ let parsed;
194
+ try {
195
+ parsed = await readJsonBody(req, body);
196
+ } catch {
197
+ sendError(res, 400, "Invalid JSON body");
198
+ return;
199
+ }
200
+ const opts = parseReserveOptions(parsed);
201
+ if (!opts) {
202
+ sendError(res, 400, "Body must be { mimeType: string (type/subtype), fileName: string (no control characters, max 255 chars), extension?: string|null }");
203
+ return;
204
+ }
205
+ try {
206
+ sendJson(res, 201, { reservationId: (await attachments.service.reserve(opts)).reservationId });
207
+ } catch (err) {
208
+ sendErrorFromException(res, err);
209
+ }
210
+ };
211
+ }
212
+ function makeUploadHandler(attachments) {
213
+ return async (req, res) => {
214
+ const reservationId = extractParam(req, "reservationId");
215
+ if (!reservationId) {
216
+ sendError(res, 400, "Missing reservationId");
217
+ return;
218
+ }
219
+ let reservation;
220
+ try {
221
+ reservation = await attachments.reservations.get(reservationId);
222
+ } catch (err) {
223
+ sendErrorFromException(res, err);
224
+ return;
225
+ }
226
+ const upload = attachments.uploadFactory.createUpload(reservation.reservationId, {
227
+ mimeType: reservation.mimeType,
228
+ fileName: reservation.fileName,
229
+ extension: reservation.extension
230
+ });
231
+ const webStream = Readable.toWeb(req);
232
+ try {
233
+ sendJson(res, 200, await upload.send(webStream));
234
+ } catch (err) {
235
+ sendErrorFromException(res, err);
236
+ }
237
+ };
238
+ }
239
+ function makeDownloadHandler(attachments) {
240
+ return async (req, res) => {
241
+ const hash = extractParam(req, "hash");
242
+ if (!hash || !HASH_PATTERN.test(hash)) {
243
+ sendError(res, 400, "Invalid attachment hash");
244
+ return;
245
+ }
246
+ const controller = new AbortController();
247
+ req.once("close", () => controller.abort());
248
+ const canonicalHash = hash.toLowerCase();
249
+ let response;
250
+ try {
251
+ response = await attachments.store.get(canonicalHash, controller.signal);
252
+ } catch (err) {
253
+ sendErrorFromException(res, err);
254
+ return;
255
+ }
256
+ const { header, body } = response;
257
+ res.statusCode = 200;
258
+ res.setHeader("Content-Type", header.mimeType);
259
+ res.setHeader("Content-Length", String(header.sizeBytes));
260
+ res.setHeader("Content-Disposition", buildContentDisposition(header.fileName));
261
+ res.setHeader("Attachment-Metadata", buildMetadataHeader(header));
262
+ Readable.fromWeb(body).pipe(res);
263
+ };
264
+ }
265
+ function buildMetadataHeader(header) {
266
+ return JSON.stringify({
267
+ mimeType: header.mimeType,
268
+ fileName: header.fileName,
269
+ sizeBytes: header.sizeBytes,
270
+ extension: header.extension,
271
+ createdAtUtc: header.createdAtUtc,
272
+ lastAccessedAtUtc: header.lastAccessedAtUtc
273
+ });
274
+ }
275
+ function makeStatHandler(attachments) {
276
+ return async (req, res) => {
277
+ const hash = extractParam(req, "hash");
278
+ if (!hash || !HASH_PATTERN.test(hash)) {
279
+ sendError(res, 400, "Invalid attachment hash");
280
+ return;
281
+ }
282
+ const canonicalHash = hash.toLowerCase();
283
+ let header;
284
+ try {
285
+ header = await attachments.store.stat(canonicalHash);
286
+ } catch (err) {
287
+ sendErrorFromException(res, err);
288
+ return;
289
+ }
290
+ res.statusCode = 200;
291
+ res.setHeader("Content-Type", header.mimeType);
292
+ res.setHeader("Content-Length", String(header.sizeBytes));
293
+ res.setHeader("Content-Disposition", buildContentDisposition(header.fileName));
294
+ res.setHeader("Attachment-Metadata", buildMetadataHeader(header));
295
+ res.end();
296
+ };
297
+ }
298
+ function makeGetReservationHandler(attachments) {
299
+ return async (req, res) => {
300
+ const reservationId = extractParam(req, "reservationId");
301
+ if (!reservationId) {
302
+ sendError(res, 400, "Missing reservationId");
303
+ return;
304
+ }
305
+ try {
306
+ sendJson(res, 200, await attachments.reservations.get(reservationId));
307
+ } catch (err) {
308
+ sendErrorFromException(res, err);
309
+ }
310
+ };
311
+ }
312
+ function makeDeleteReservationHandler(attachments) {
313
+ return async (req, res) => {
314
+ const reservationId = extractParam(req, "reservationId");
315
+ if (!reservationId) {
316
+ sendError(res, 400, "Missing reservationId");
317
+ return;
318
+ }
319
+ try {
320
+ await attachments.reservations.delete(reservationId);
321
+ res.statusCode = 204;
322
+ res.end();
323
+ } catch (err) {
324
+ sendErrorFromException(res, err);
325
+ }
326
+ };
327
+ }
328
+ function extractParam(req, name) {
329
+ return req.params?.[name];
330
+ }
331
+ //#endregion
332
+ //#region src/attachments/index.ts
333
+ function registerAttachmentRoutes(api) {
334
+ const { attachments } = api;
335
+ mountAuthenticatedNodeRoute(api, "POST", "/attachments/reservations", makeReserveHandler(attachments));
336
+ mountAuthenticatedNodeRoute(api, "GET", "/attachments/reservations/:reservationId", makeGetReservationHandler(attachments));
337
+ mountAuthenticatedNodeRoute(api, "DELETE", "/attachments/reservations/:reservationId", makeDeleteReservationHandler(attachments));
338
+ mountAuthenticatedNodeRoute(api, "PUT", "/attachments/reservations/:reservationId", makeUploadHandler(attachments));
339
+ mountAuthenticatedNodeRoute(api, "HEAD", "/attachments/:hash", makeStatHandler(attachments));
340
+ mountAuthenticatedNodeRoute(api, "GET", "/attachments/:hash", makeDownloadHandler(attachments));
341
+ }
342
+ //#endregion
343
+ //#region src/feature-flags.ts
344
+ async function initFeatureFlags() {
345
+ const provider = new EnvVarProvider();
346
+ await OpenFeature.setProviderAndWait(provider);
347
+ return OpenFeature.getClient();
348
+ }
349
+ //#endregion
350
+ //#region src/pglite-migration.ts
351
+ function backupPath(dataDir, major) {
352
+ return `${dataDir}.backup-pg${major}-${(/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-")}`;
353
+ }
354
+ async function pathExists(p) {
355
+ try {
356
+ await promises.stat(p);
357
+ return true;
358
+ } catch {
359
+ return false;
360
+ }
361
+ }
362
+ function logRestoreFailure(dataDir, sql, err, logger) {
363
+ const errObj = err;
364
+ const position = typeof errObj.position === "string" ? parseInt(errObj.position, 10) : typeof errObj.position === "number" ? errObj.position : NaN;
365
+ logger.error(`[pglite-migration] Restore failed for ${dataDir}: code=${errObj.code ?? ""} severity=${errObj.severity ?? ""} message=${errObj.message ?? ""} sqlLength=${sql.length}`);
366
+ if (Number.isFinite(position) && position > 0) {
367
+ const zeroBased = position - 1;
368
+ const start = Math.max(0, zeroBased - 200);
369
+ const end = Math.min(sql.length, zeroBased + 200);
370
+ const before = sql.slice(start, zeroBased);
371
+ const at = sql.slice(zeroBased, zeroBased + 1);
372
+ const after = sql.slice(zeroBased + 1, end);
373
+ logger.error(`[pglite-migration] SQL context around position ${position}:\n${before}»${at}«${after}`);
374
+ } else logger.error(`[pglite-migration] No position info. First 2000 chars of dump:\n${sql.slice(0, 2e3)}`);
375
+ }
376
+ /**
377
+ * Migrate a filesystem PGLite data directory from a legacy PG major to the
378
+ * current one. Renames the existing dir to a timestamped backup, dumps via the
379
+ * matching legacy `pg_dump`, restores into a fresh current-version PGLite at
380
+ * the original path. On failure, the original dir is restored from the backup.
381
+ *
382
+ * No-op when the dir is missing or already at the current major.
383
+ */
384
+ async function migratePgliteDir(dataDir, logger) {
385
+ const major = await readPgVersionFile(dataDir);
386
+ if (major === null) {
387
+ logger.info(`[pglite-migration] No PG_VERSION at ${dataDir}; skipping migration`);
388
+ return;
389
+ }
390
+ if (major === 17) return;
391
+ if (!isSupportedMajor(major)) throw new Error(`Unsupported legacy PGlite data dir: PG_VERSION=${major} for ${dataDir}`);
392
+ const backupDir = backupPath(dataDir, major);
393
+ logger.info(`[pglite-migration] Migrating ${dataDir} from PG${major} to PG17; backup: ${backupDir}`);
394
+ await promises.rename(dataDir, backupDir);
395
+ let sql;
396
+ try {
397
+ const [legacyMod, pgDump] = await Promise.all([loadPGliteModule(major), loadPgDump(major)]);
398
+ const LegacyPGlite = legacyMod.PGlite;
399
+ const pg = new LegacyPGlite(backupDir);
400
+ try {
401
+ await pg.waitReady;
402
+ sql = await (await pgDump({ pg })).text();
403
+ } finally {
404
+ await pg.close();
405
+ }
406
+ } catch (err) {
407
+ await rollback(dataDir, backupDir, err, logger);
408
+ throw err;
409
+ }
410
+ try {
411
+ const CurrentPGlite = (await loadPGliteModule(17)).PGlite;
412
+ const pg = new CurrentPGlite(dataDir, { relaxedDurability: false });
413
+ try {
414
+ await pg.waitReady;
415
+ try {
416
+ await pg.exec("SET standard_conforming_strings = off;");
417
+ } catch (gucErr) {
418
+ logger.warn(`[pglite-migration] Could not force standard_conforming_strings=off: ${String(gucErr)}`);
419
+ }
420
+ try {
421
+ await pg.exec(sql);
422
+ } catch (execErr) {
423
+ logRestoreFailure(dataDir, sql, execErr, logger);
424
+ throw execErr;
425
+ }
426
+ } finally {
427
+ await pg.close();
428
+ }
429
+ } catch (err) {
430
+ await rollback(dataDir, backupDir, err, logger);
431
+ throw err;
432
+ }
433
+ logger.info(`[pglite-migration] Migration of ${dataDir} complete. Backup retained at ${backupDir}; remove it manually once you have verified the upgrade.`);
434
+ }
435
+ async function rollback(dataDir, backupDir, originalError, logger) {
436
+ try {
437
+ if (await pathExists(dataDir)) await promises.rm(dataDir, {
438
+ recursive: true,
439
+ force: true
440
+ });
441
+ if (await pathExists(backupDir)) await promises.rename(backupDir, dataDir);
442
+ } catch (rollbackErr) {
443
+ logger.error(`[pglite-migration] Migration AND rollback failed for ${dataDir}. Original error: ${String(originalError)}; rollback error: ${String(rollbackErr)}; backup may still exist at ${backupDir}.`);
444
+ return;
445
+ }
446
+ logger.error(`[pglite-migration] Migration failed for ${dataDir}; rolled back from ${backupDir}. Original error: ${String(originalError)}`);
447
+ }
448
+ //#endregion
449
+ //#region src/renown.ts
450
+ const logger = childLogger(["switchboard", "renown"]);
451
+ /**
452
+ * Initialize Renown for the Switchboard instance.
453
+ * This allows Switchboard to authenticate with remote services
454
+ * using the same identity established during `ph login`.
455
+ */
456
+ async function initRenown(options = {}) {
457
+ const { keypairPath, requireExisting = false, baseUrl = DEFAULT_RENOWN_URL } = options;
458
+ const keyStorage = new NodeKeyStorage(keypairPath, { logger });
459
+ const existingKeyPair = await keyStorage.loadKeyPair();
460
+ if (!existingKeyPair && requireExisting) throw new Error("No existing keypair found and requireExisting is true. Run \"ph login\" to create one.");
461
+ if (!existingKeyPair) logger.info("No existing keypair found. A new one will be generated.");
462
+ const renownCrypto = await new RenownCryptoBuilder().withKeyPairStorage(keyStorage).build();
463
+ const renown = await new RenownBuilder("switchboard", {}).withCrypto(renownCrypto).withBaseUrl(baseUrl).build();
464
+ logger.info("Switchboard identity initialized: @did", renownCrypto.did);
465
+ return renown;
466
+ }
467
+ /**
468
+ * Get the signer config for the given renown instance.
469
+ *
470
+ * @param renown - The renown instance
471
+ * @param requireSignature - If true, unsigned actions are rejected
472
+ */
473
+ function getRenownSignerConfig(renown, requireSignature) {
474
+ return {
475
+ signer: renown.signer,
476
+ verifier: createSignatureVerifier(requireSignature)
477
+ };
478
+ }
479
+ //#endregion
480
+ //#region src/server.mts
481
+ const defaultLogger = childLogger(["switchboard"]);
482
+ const LogLevel = process.env.LOG_LEVEL || "info";
483
+ setLogLevel(LogLevel);
484
+ dotenv.config();
485
+ const DOCUMENT_MODEL_SUBGRAPHS_ENABLED = "DOCUMENT_MODEL_SUBGRAPHS_ENABLED";
486
+ const DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT = true;
487
+ const REQUIRE_SIGNATURES = "REQUIRE_SIGNATURES";
488
+ const REQUIRE_SIGNATURES_DEFAULT = false;
489
+ if (process.env.SENTRY_DSN) {
490
+ defaultLogger.info("Initialized Sentry with env: @env", process.env.SENTRY_ENV);
491
+ Sentry.init({
492
+ dsn: process.env.SENTRY_DSN,
493
+ environment: process.env.SENTRY_ENV,
494
+ release: process.env.SENTRY_RELEASE || (process.env.npm_package_version ? `v${process.env.npm_package_version}` : void 0)
495
+ });
496
+ }
497
+ const DEFAULT_PORT = process.env.PORT ? Number(process.env.PORT) : 4001;
498
+ const PORT_FALLBACK_ATTEMPTS = 20;
499
+ /**
500
+ * Attempt to bind a throwaway TCP server to the given port. Resolves true if
501
+ * the port is free, false if the OS reports it in use. Any other error is
502
+ * surfaced so we don't silently mask real issues (permissions, bad host, …).
503
+ */
504
+ function isPortAvailable(port) {
505
+ return new Promise((resolve, reject) => {
506
+ const tester = net.createServer();
507
+ tester.once("error", (err) => {
508
+ if (err.code === "EADDRINUSE" || err.code === "EACCES") resolve(false);
509
+ else reject(err);
510
+ });
511
+ tester.once("listening", () => {
512
+ tester.close(() => resolve(true));
513
+ });
514
+ tester.listen({
515
+ port,
516
+ host: "::"
517
+ });
518
+ });
519
+ }
520
+ async function resolveServerPort(requested, strictPort, logger) {
521
+ if (strictPort) return requested;
522
+ for (let i = 0; i < PORT_FALLBACK_ATTEMPTS; i++) {
523
+ const candidate = requested + i;
524
+ if (await isPortAvailable(candidate)) {
525
+ if (candidate !== requested) logger.info(`Port ${requested} is in use. Falling back to port ${candidate}.`);
526
+ return candidate;
527
+ }
528
+ }
529
+ return requested;
530
+ }
531
+ async function initServer(serverPort, options, renown) {
532
+ if (options.meterProvider) metrics.setGlobalMeterProvider(options.meterProvider);
533
+ const { dev, packages = [], remoteDrives = [], logger = defaultLogger } = options;
534
+ logger.level = LogLevel;
535
+ const dbPath = options.dbPath ?? process.env.DATABASE_URL;
536
+ const readModelPath = dbPath || ".ph/read-storage";
537
+ const reactorDbUrl = process.env.PH_REACTOR_DATABASE_URL;
538
+ const reactorPgliteDir = !reactorDbUrl || !isPostgresUrl(reactorDbUrl) ? "./.ph/reactor-storage" : null;
539
+ const readModelPgliteDir = !dbPath || !isPostgresUrl(dbPath) ? readModelPath : null;
540
+ const pgliteDirs = [reactorPgliteDir, readModelPgliteDir].filter((d) => d !== null);
541
+ const detectedMajors = /* @__PURE__ */ new Map();
542
+ if (options.forcePgVersion !== void 0 && pgliteDirs.length > 0) {
543
+ if (options.migratePglite) logger.warn("PH_FORCE_PG_VERSION is set; ignoring --migrate-pglite/PH_MIGRATE_PGLITE because the data dirs will be wiped.");
544
+ logger.warn(`PH_FORCE_PG_VERSION=${options.forcePgVersion} set; wiping PGLite data dirs and re-initializing at PG${options.forcePgVersion}.`);
545
+ for (const dir of pgliteDirs) {
546
+ await promises.rm(dir, {
547
+ recursive: true,
548
+ force: true
549
+ });
550
+ logger.info(`Wiped PGLite data dir ${dir}`);
551
+ }
552
+ } else if (options.forcePgVersion === void 0) {
553
+ for (const dir of pgliteDirs) {
554
+ const major = await readPgVersionFile(dir);
555
+ if (major !== null) detectedMajors.set(dir, major);
556
+ }
557
+ if (options.migratePglite) for (const [dir, major] of detectedMajors) {
558
+ if (major === 17) continue;
559
+ await migratePgliteDir(dir, logger);
560
+ const after = await readPgVersionFile(dir);
561
+ if (after !== null) detectedMajors.set(dir, after);
562
+ }
563
+ else for (const [dir, major] of detectedMajors) {
564
+ if (major === 17) continue;
565
+ logger.warn(`PGLite data dir at ${dir} was created with PG${major} but Switchboard ships PG17. Running on legacy PGLite. Re-start with --migrate-pglite (or PH_MIGRATE_PGLITE=true) to upgrade.`);
566
+ }
567
+ }
568
+ function resolvePgliteMajorForDir(dir) {
569
+ if (options.forcePgVersion !== void 0) return options.forcePgVersion;
570
+ const detected = detectedMajors.get(dir);
571
+ if (detected === void 0) return 17;
572
+ if (!isSupportedMajor(detected)) throw new Error(`Unsupported PGLite data dir at ${dir}: PG_VERSION=${detected}`);
573
+ return detected;
574
+ }
575
+ const reactorPgliteMajor = reactorPgliteDir ? resolvePgliteMajorForDir(reactorPgliteDir) : null;
576
+ const readModelPgliteMajor = readModelPgliteDir ? resolvePgliteMajorForDir(readModelPgliteDir) : null;
577
+ const apiRef = { current: void 0 };
578
+ const config = getConfig(options.configFile ?? path$1.join(process.cwd(), "powerhouse.config.json"));
579
+ const registryUrl = process.env.PH_REGISTRY_URL ?? config.packageRegistryUrl;
580
+ const registryPackages = process.env.PH_REGISTRY_PACKAGES;
581
+ const dynamicModelLoading = options.dynamicModelLoading ?? process.env.DYNAMIC_MODEL_LOADING === "true";
582
+ let httpLoader;
583
+ if (registryUrl) {
584
+ register(httpsHooksPath, import.meta.url);
585
+ httpLoader = new HttpPackageLoader({ registryUrl });
586
+ registryPackages?.split(",").forEach((p) => {
587
+ const name = p.trim();
588
+ if (!packages.includes(name)) packages.push(name);
589
+ });
590
+ }
591
+ const reactorLogger = logger.child(["reactor"]);
592
+ const initializeClient = async (documentModels$1) => {
593
+ const eventBus = new EventBus();
594
+ const builder = new ReactorBuilder().withEventBus(eventBus).withDocumentModels(getUniqueDocumentModels([
595
+ documentModelDocumentModelModule,
596
+ driveDocumentModelModule,
597
+ ...documentModels,
598
+ ...documentModels$1
599
+ ])).withChannelScheme(ChannelScheme.SWITCHBOARD).withSignalHandlers().withLogger(reactorLogger);
600
+ const maxSkipThreshold = parseInt(process.env.MAX_SKIP_THRESHOLD ?? "", 10);
601
+ if (!isNaN(maxSkipThreshold) && maxSkipThreshold > 0) {
602
+ builder.withExecutorConfig({ maxSkipThreshold });
603
+ logger.info(`Reactor maxSkipThreshold set to ${maxSkipThreshold}`);
604
+ }
605
+ if (reactorDbUrl && isPostgresUrl(reactorDbUrl)) {
606
+ const kysely = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({ connectionString: reactorDbUrl.includes("?") ? reactorDbUrl : `${reactorDbUrl}?sslmode=disable` }) }) });
607
+ builder.withKysely(kysely);
608
+ logger.info("Using PostgreSQL for reactor storage");
609
+ } else {
610
+ if (!reactorPgliteDir || reactorPgliteMajor === null) throw new Error("Reactor PGLite directory not resolved");
611
+ const { PGlite } = await loadPGliteModule(reactorPgliteMajor);
612
+ const kysely = new Kysely({ dialect: new ClosablePGliteDialect(new PGlite(reactorPgliteDir)) });
613
+ builder.withKysely(kysely);
614
+ logger.info(`Using PGlite (PG${reactorPgliteMajor}) for reactor storage at ${reactorPgliteDir}`);
615
+ }
616
+ builder.withShutdownHook(async () => {
617
+ if (apiRef.current) await apiRef.current.dispose();
618
+ });
619
+ if (httpLoader && dynamicModelLoading) builder.withDocumentModelLoader(httpLoader.documentModelLoader);
620
+ const clientBuilder = new ReactorClientBuilder().withReactorBuilder(builder);
621
+ if (renown) {
622
+ const signerConfig = getRenownSignerConfig(renown, options.identity?.requireSignatures);
623
+ clientBuilder.withSigner(signerConfig);
624
+ }
625
+ const module = await clientBuilder.buildModule();
626
+ if (module.reactorModule) {
627
+ new ReactorInstrumentation(module.reactorModule).start();
628
+ reactorLogger.info("Reactor metrics instrumentation started");
629
+ }
630
+ return module;
631
+ };
632
+ let defaultDriveUrl = void 0;
633
+ const basePath = process.cwd();
634
+ const viteLogger = createViteLogger(logger);
635
+ const vite = dev ? await startViteServer(process.cwd(), viteLogger) : void 0;
636
+ if (!options.disableLocalPackages) packages.push(basePath);
637
+ const packageLoaders = [];
638
+ if (vite) packageLoaders.push(VitePackageLoader.build(vite));
639
+ else packageLoaders.push(new ImportPackageLoader());
640
+ if (httpLoader) {
641
+ packageLoaders.push(httpLoader);
642
+ registryPackages?.split(",").forEach((p) => {
643
+ const name = p.trim();
644
+ if (!packages.includes(name)) packages.push(name);
645
+ });
646
+ }
647
+ const apiLogger = logger.child(["reactor-api"]);
648
+ let pgliteFactory;
649
+ if (readModelPgliteDir && readModelPgliteMajor !== null) {
650
+ const { PGlite: ReadModelPGlite } = await loadPGliteModule(readModelPgliteMajor);
651
+ pgliteFactory = (connectionString) => new ReadModelPGlite(connectionString ?? readModelPgliteDir);
652
+ }
653
+ const api = await initializeAndStartAPI(initializeClient, {
654
+ port: serverPort,
655
+ dbPath: readModelPath,
656
+ pgliteFactory,
657
+ https: options.https,
658
+ packageLoaders: packageLoaders.length > 0 ? packageLoaders : void 0,
659
+ packages,
660
+ processorConfig: options.processorConfig,
661
+ processors: { "@powerhousedao/vetra": [processorFactory] },
662
+ configFile: options.configFile ?? path$1.join(process.cwd(), "powerhouse.config.json"),
663
+ mcp: options.mcp ?? true,
664
+ logger: apiLogger,
665
+ enableDocumentModelSubgraphs: options.enableDocumentModelSubgraphs
666
+ }, "switchboard");
667
+ apiRef.current = api;
668
+ registerAttachmentRoutes(api);
669
+ if (process.env.SENTRY_DSN) api.httpAdapter.setupSentryErrorHandler(Sentry);
670
+ const { client, graphqlManager, documentModelRegistry } = api;
671
+ if (httpLoader) {
672
+ const packageManagementService = new PackageManagementService({
673
+ defaultRegistryUrl: registryUrl,
674
+ httpLoader,
675
+ documentModelRegistry
676
+ });
677
+ packageManagementService.setOnModelsChanged(() => {
678
+ graphqlManager.regenerateDocumentModelSubgraphs().catch(logger.error);
679
+ });
680
+ const packagesSubgraph = new PackagesSubgraph({
681
+ relationalDb: void 0,
682
+ analyticsStore: void 0,
683
+ reactorClient: client,
684
+ graphqlManager,
685
+ syncManager: api.syncManager,
686
+ path: graphqlManager.getBasePath(),
687
+ packageManagementService
688
+ });
689
+ graphqlManager.registerSubgraphInstance(packagesSubgraph, "graphql", false).then(() => graphqlManager.updateRouter()).catch((error) => {
690
+ logger.error("Failed to register packages subgraph: @error", error);
691
+ });
692
+ }
693
+ if (options.drive) {
694
+ if (!renown) throw new Error("Cannot create default drive without Renown identity");
695
+ defaultDriveUrl = await addDefaultDrive(client, options.drive, serverPort);
696
+ }
697
+ if (vite) api.httpAdapter.mountRawMiddleware(vite.middlewares);
698
+ if (remoteDrives.length > 0) for (const remoteDriveUrl of remoteDrives) {
699
+ let driveId;
700
+ try {
701
+ const { syncManager } = api;
702
+ const parsed = parseDriveUrl(remoteDriveUrl);
703
+ driveId = parsed.driveId;
704
+ const remoteName = `remote-drive-${driveId}-${crypto.randomUUID()}`;
705
+ await syncManager.add(remoteName, driveCollectionId("main", driveId), {
706
+ type: "gql",
707
+ parameters: { url: parsed.graphqlEndpoint }
708
+ });
709
+ logger.debug("Remote drive @remoteDriveUrl synced", remoteDriveUrl);
710
+ } catch (error) {
711
+ if (error instanceof Error && error.message.includes("already exists")) {
712
+ logger.debug("Remote drive already added: @remoteDriveUrl", remoteDriveUrl);
713
+ driveId = remoteDriveUrl.split("/").pop();
714
+ } else logger.error("Failed to connect to remote drive @remoteDriveUrl: @error", remoteDriveUrl, error);
715
+ } finally {
716
+ if (!defaultDriveUrl && driveId) defaultDriveUrl = `${options.https ? "https" : "http"}://localhost:${serverPort}/d/${driveId}`;
717
+ }
718
+ }
719
+ return {
720
+ defaultDriveUrl,
721
+ api,
722
+ reactor: client,
723
+ renown,
724
+ port: serverPort
725
+ };
726
+ }
727
+ const startSwitchboard = async (options = {}) => {
728
+ const requestedPort = options.port ?? DEFAULT_PORT;
729
+ const logger = options.logger ?? defaultLogger;
730
+ const serverPort = await resolveServerPort(requestedPort, options.strictPort ?? false, logger);
731
+ const featureFlags = await initFeatureFlags();
732
+ const enableDocumentModelSubgraphs = await featureFlags.getBooleanValue(DOCUMENT_MODEL_SUBGRAPHS_ENABLED, options.enableDocumentModelSubgraphs ?? DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT);
733
+ options.enableDocumentModelSubgraphs = enableDocumentModelSubgraphs;
734
+ const requireSignatures = options.identity?.requireSignatures ?? await featureFlags.getBooleanValue(REQUIRE_SIGNATURES, REQUIRE_SIGNATURES_DEFAULT);
735
+ options.identity = {
736
+ ...options.identity,
737
+ requireSignatures
738
+ };
739
+ logger.info("Feature flags: @flags", JSON.stringify({
740
+ DOCUMENT_MODEL_SUBGRAPHS_ENABLED: enableDocumentModelSubgraphs,
741
+ REQUIRE_SIGNATURES: requireSignatures
742
+ }, null, 2));
743
+ let renown = null;
744
+ try {
745
+ renown = await initRenown(options.identity);
746
+ } catch (e) {
747
+ logger.warn("Failed to initialize ConnectCrypto: @error", e);
748
+ if (options.identity?.requireExisting) throw new Error("Identity required but failed to initialize. Run \"ph login\" first.");
749
+ }
750
+ try {
751
+ return await initServer(serverPort, options, renown);
752
+ } catch (e) {
753
+ Sentry.captureException(e);
754
+ logger.error("App crashed: @error", e);
755
+ throw e;
756
+ }
757
+ };
758
+ if (import.meta.main) await startSwitchboard();
759
+ //#endregion
760
+ export { startSwitchboard as n, parseForcePgVersion as r, isPortAvailable as t };
761
+
762
+ //# sourceMappingURL=server-UGYERfMo.mjs.map