@powerhousedao/switchboard 6.0.0-dev.207 → 6.0.0-dev.209

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,501 @@
1
+ import { PGlite } from "@electric-sql/pglite";
2
+ import {
3
+ AttachmentBuilder,
4
+ type AttachmentBuildResult,
5
+ } from "@powerhousedao/reactor-attachments";
6
+ import { Kysely } from "kysely";
7
+ import { PGliteDialect } from "kysely-pglite-dialect";
8
+ import { mkdtemp, rm } from "node:fs/promises";
9
+ import type { IncomingMessage, ServerResponse } from "node:http";
10
+ import { tmpdir } from "node:os";
11
+ import { join } from "node:path";
12
+ import { Readable, Writable } from "node:stream";
13
+ import { afterEach, beforeEach, describe, expect, it } from "vitest";
14
+ import {
15
+ buildContentDisposition,
16
+ makeDownloadHandler,
17
+ makeReserveHandler,
18
+ makeUploadHandler,
19
+ parseReserveOptions,
20
+ quoteFilename,
21
+ } from "../../src/attachments/routes.js";
22
+
23
+ type CapturedRes = ServerResponse & {
24
+ _headers: Record<string, string>;
25
+ _body: Buffer;
26
+ _done: Promise<void>;
27
+ };
28
+
29
+ function makeReq(opts: {
30
+ method: string;
31
+ url?: string;
32
+ body?: Buffer | string;
33
+ params?: Record<string, string>;
34
+ }): IncomingMessage {
35
+ const buf =
36
+ typeof opts.body === "string"
37
+ ? Buffer.from(opts.body, "utf8")
38
+ : (opts.body ?? Buffer.alloc(0));
39
+ const req = Readable.from(buf.length === 0 ? [] : [buf]) as Readable & {
40
+ method: string;
41
+ url?: string;
42
+ headers: Record<string, string>;
43
+ params?: Record<string, string>;
44
+ };
45
+ req.method = opts.method;
46
+ req.url = opts.url ?? "/";
47
+ req.headers = {};
48
+ if (opts.params) req.params = opts.params;
49
+ return req as unknown as IncomingMessage;
50
+ }
51
+
52
+ function makeRes(): CapturedRes {
53
+ const chunks: Buffer[] = [];
54
+ const headers: Record<string, string> = {};
55
+ const writable = new Writable({
56
+ write(chunk: string | Buffer, _encoding, callback) {
57
+ chunks.push(
58
+ typeof chunk === "string" ? Buffer.from(chunk, "utf8") : chunk,
59
+ );
60
+ callback();
61
+ },
62
+ });
63
+ const done = new Promise<void>((resolve) => {
64
+ writable.once("finish", resolve);
65
+ });
66
+ Object.assign(writable, {
67
+ statusCode: 200,
68
+ _headers: headers,
69
+ setHeader(name: string, value: string | number | readonly string[]) {
70
+ headers[name.toLowerCase()] = String(value);
71
+ },
72
+ getHeader(name: string) {
73
+ return headers[name.toLowerCase()];
74
+ },
75
+ _done: done,
76
+ });
77
+ Object.defineProperty(writable, "_body", {
78
+ get(): Buffer {
79
+ return Buffer.concat(chunks);
80
+ },
81
+ });
82
+ return writable as unknown as CapturedRes;
83
+ }
84
+
85
+ async function waitFor(res: CapturedRes): Promise<void> {
86
+ await res._done;
87
+ }
88
+
89
+ describe("attachment routes", () => {
90
+ let attachments: AttachmentBuildResult;
91
+ let storagePath: string;
92
+ let kysely: Kysely<unknown>;
93
+ let cleanup: () => Promise<void>;
94
+
95
+ beforeEach(async () => {
96
+ const pglite = new PGlite();
97
+ kysely = new Kysely<unknown>({ dialect: new PGliteDialect(pglite) });
98
+ storagePath = await mkdtemp(join(tmpdir(), "switchboard-attach-"));
99
+ attachments = await new AttachmentBuilder(kysely, storagePath).build();
100
+ cleanup = async () => {
101
+ await kysely.destroy();
102
+ await rm(storagePath, { recursive: true, force: true });
103
+ };
104
+ });
105
+
106
+ afterEach(async () => {
107
+ await cleanup();
108
+ });
109
+
110
+ it("POST reserve returns 201 with reservationId for valid body", async () => {
111
+ const handler = makeReserveHandler(attachments);
112
+ const req = makeReq({
113
+ method: "POST",
114
+ body: JSON.stringify({ mimeType: "text/plain", fileName: "hello.txt" }),
115
+ });
116
+ const res = makeRes();
117
+ await handler(req, res);
118
+ await waitFor(res);
119
+ expect(res.statusCode).toBe(201);
120
+ const body = JSON.parse(res._body.toString("utf8")) as {
121
+ reservationId: string;
122
+ };
123
+ expect(body.reservationId).toMatch(/.+/);
124
+ });
125
+
126
+ it("POST reserve returns 400 for missing fields", async () => {
127
+ const handler = makeReserveHandler(attachments);
128
+ const req = makeReq({
129
+ method: "POST",
130
+ body: JSON.stringify({ mimeType: "text/plain" }),
131
+ });
132
+ const res = makeRes();
133
+ await handler(req, res);
134
+ await waitFor(res);
135
+ expect(res.statusCode).toBe(400);
136
+ });
137
+
138
+ it("PUT upload returns 404 for unknown reservation", async () => {
139
+ const handler = makeUploadHandler(attachments);
140
+ const req = makeReq({
141
+ method: "PUT",
142
+ params: { reservationId: "00000000-0000-0000-0000-000000000000" },
143
+ body: "hello",
144
+ });
145
+ const res = makeRes();
146
+ await handler(req, res);
147
+ await waitFor(res);
148
+ expect(res.statusCode).toBe(404);
149
+ });
150
+
151
+ it("full reserve -> upload -> download cycle round-trips bytes", async () => {
152
+ // Reserve
153
+ const reserveHandler = makeReserveHandler(attachments);
154
+ const reserveReq = makeReq({
155
+ method: "POST",
156
+ body: JSON.stringify({
157
+ mimeType: "text/plain",
158
+ fileName: "hello.txt",
159
+ extension: "txt",
160
+ }),
161
+ });
162
+ const reserveRes = makeRes();
163
+ await reserveHandler(reserveReq, reserveRes);
164
+ await waitFor(reserveRes);
165
+ expect(reserveRes.statusCode).toBe(201);
166
+ const { reservationId } = JSON.parse(reserveRes._body.toString("utf8")) as {
167
+ reservationId: string;
168
+ };
169
+
170
+ // Upload
171
+ const uploadHandler = makeUploadHandler(attachments);
172
+ const payload = "hello world";
173
+ const uploadReq = makeReq({
174
+ method: "PUT",
175
+ params: { reservationId },
176
+ body: payload,
177
+ });
178
+ const uploadRes = makeRes();
179
+ await uploadHandler(uploadReq, uploadRes);
180
+ await waitFor(uploadRes);
181
+ expect(uploadRes.statusCode).toBe(200);
182
+ const upload = JSON.parse(uploadRes._body.toString("utf8")) as {
183
+ hash: string;
184
+ ref: string;
185
+ header: { mimeType: string; fileName: string; sizeBytes: number };
186
+ };
187
+ expect(upload.hash).toMatch(/^[a-f0-9]{64}$/);
188
+ expect(upload.ref).toBe(`attachment://v1:${upload.hash}`);
189
+ expect(upload.header.sizeBytes).toBe(payload.length);
190
+
191
+ // Reservation should be cleaned up
192
+ await expect(attachments.reservations.get(reservationId)).rejects.toThrow();
193
+
194
+ // Download
195
+ const downloadHandler = makeDownloadHandler(attachments);
196
+ const downloadReq = makeReq({
197
+ method: "GET",
198
+ params: { hash: upload.hash },
199
+ });
200
+ const downloadRes = makeRes();
201
+ await downloadHandler(downloadReq, downloadRes);
202
+ await waitFor(downloadRes);
203
+ expect(downloadRes.statusCode).toBe(200);
204
+ expect(downloadRes._body.toString("utf8")).toBe(payload);
205
+ expect(downloadRes.getHeader("content-type")).toBe("text/plain");
206
+ expect(downloadRes.getHeader("content-length")).toBe(
207
+ String(payload.length),
208
+ );
209
+ expect(downloadRes.getHeader("content-disposition")).toContain("hello.txt");
210
+ const meta = JSON.parse(
211
+ downloadRes.getHeader("x-attachment-metadata") as string,
212
+ ) as { fileName: string; mimeType: string; sizeBytes: number };
213
+ expect(meta.fileName).toBe("hello.txt");
214
+ expect(meta.mimeType).toBe("text/plain");
215
+ expect(meta.sizeBytes).toBe(payload.length);
216
+ });
217
+
218
+ it("GET download returns 404 for unknown hash", async () => {
219
+ const handler = makeDownloadHandler(attachments);
220
+ const req = makeReq({
221
+ method: "GET",
222
+ params: { hash: "a".repeat(64) },
223
+ });
224
+ const res = makeRes();
225
+ await handler(req, res);
226
+ await waitFor(res);
227
+ expect(res.statusCode).toBe(404);
228
+ });
229
+
230
+ it("GET download returns 400 for malformed hash", async () => {
231
+ const handler = makeDownloadHandler(attachments);
232
+ const req = makeReq({
233
+ method: "GET",
234
+ params: { hash: "not-a-hash" },
235
+ });
236
+ const res = makeRes();
237
+ await handler(req, res);
238
+ await waitFor(res);
239
+ expect(res.statusCode).toBe(400);
240
+ });
241
+
242
+ it("GET download returns 400 for uppercase hash", async () => {
243
+ const handler = makeDownloadHandler(attachments);
244
+ const req = makeReq({
245
+ method: "GET",
246
+ params: { hash: "A".repeat(64) },
247
+ });
248
+ const res = makeRes();
249
+ await handler(req, res);
250
+ await waitFor(res);
251
+ expect(res.statusCode).toBe(400);
252
+ expect(JSON.parse(res._body.toString("utf8"))).toEqual({
253
+ error: "Invalid attachment hash",
254
+ });
255
+ });
256
+
257
+ it("PUT upload returns opaque 500 when reservation lookup throws an unmapped error", async () => {
258
+ const secret = "INTERNAL_DB_PATH=/var/secret/db.sock";
259
+ const originalGet = attachments.reservations.get.bind(
260
+ attachments.reservations,
261
+ );
262
+ attachments.reservations.get = () => {
263
+ throw new Error(secret);
264
+ };
265
+ try {
266
+ const handler = makeUploadHandler(attachments);
267
+ const req = makeReq({
268
+ method: "PUT",
269
+ params: { reservationId: "00000000-0000-0000-0000-000000000000" },
270
+ body: "hello",
271
+ });
272
+ const res = makeRes();
273
+ await handler(req, res);
274
+ await waitFor(res);
275
+ expect(res.statusCode).toBe(500);
276
+ const bodyText = res._body.toString("utf8");
277
+ expect(JSON.parse(bodyText)).toEqual({ error: "Internal error" });
278
+ expect(bodyText).not.toContain(secret);
279
+ } finally {
280
+ attachments.reservations.get = originalGet;
281
+ }
282
+ });
283
+
284
+ it("GET download returns opaque 500 when store throws an unmapped error", async () => {
285
+ const secret = "INTERNAL_FS_PATH=/var/secret/blobs";
286
+ const originalGet = attachments.store.get.bind(attachments.store);
287
+ attachments.store.get = () => {
288
+ throw new Error(secret);
289
+ };
290
+ try {
291
+ const handler = makeDownloadHandler(attachments);
292
+ const req = makeReq({
293
+ method: "GET",
294
+ params: { hash: "a".repeat(64) },
295
+ });
296
+ const res = makeRes();
297
+ await handler(req, res);
298
+ await waitFor(res);
299
+ expect(res.statusCode).toBe(500);
300
+ const bodyText = res._body.toString("utf8");
301
+ expect(JSON.parse(bodyText)).toEqual({ error: "Internal error" });
302
+ expect(bodyText).not.toContain(secret);
303
+ } finally {
304
+ attachments.store.get = originalGet;
305
+ }
306
+ });
307
+
308
+ it("identical uploads dedupe to the same hash", async () => {
309
+ const reserveHandler = makeReserveHandler(attachments);
310
+ const uploadHandler = makeUploadHandler(attachments);
311
+
312
+ const doRoundTrip = async (): Promise<string> => {
313
+ const r1 = makeReq({
314
+ method: "POST",
315
+ body: JSON.stringify({ mimeType: "text/plain", fileName: "x.txt" }),
316
+ });
317
+ const r1res = makeRes();
318
+ await reserveHandler(r1, r1res);
319
+ await waitFor(r1res);
320
+ const { reservationId } = JSON.parse(r1res._body.toString()) as {
321
+ reservationId: string;
322
+ };
323
+
324
+ const u1 = makeReq({
325
+ method: "PUT",
326
+ params: { reservationId },
327
+ body: "same content",
328
+ });
329
+ const u1res = makeRes();
330
+ await uploadHandler(u1, u1res);
331
+ await waitFor(u1res);
332
+ const { hash } = JSON.parse(u1res._body.toString()) as { hash: string };
333
+ return hash;
334
+ };
335
+
336
+ const h1 = await doRoundTrip();
337
+ const h2 = await doRoundTrip();
338
+ expect(h1).toBe(h2);
339
+ });
340
+
341
+ describe("validation and header encoding", () => {
342
+ it("parseReserveOptions rejects fileName with CR/LF", () => {
343
+ expect(
344
+ parseReserveOptions({
345
+ mimeType: "text/plain",
346
+ fileName: "evil\r\nX-Inj: foo",
347
+ }),
348
+ ).toBeNull();
349
+ });
350
+
351
+ it("parseReserveOptions rejects fileName with NUL", () => {
352
+ expect(
353
+ parseReserveOptions({
354
+ mimeType: "text/plain",
355
+ fileName: "a\x00b",
356
+ }),
357
+ ).toBeNull();
358
+ });
359
+
360
+ it("parseReserveOptions rejects oversized fileName", () => {
361
+ expect(
362
+ parseReserveOptions({
363
+ mimeType: "text/plain",
364
+ fileName: "x".repeat(256),
365
+ }),
366
+ ).toBeNull();
367
+ });
368
+
369
+ it("parseReserveOptions rejects empty fileName", () => {
370
+ expect(
371
+ parseReserveOptions({
372
+ mimeType: "text/plain",
373
+ fileName: "",
374
+ }),
375
+ ).toBeNull();
376
+ });
377
+
378
+ it("parseReserveOptions rejects malformed mimeType", () => {
379
+ for (const mimeType of ["", "plain", "text/plain\r\nX: y", "text/"]) {
380
+ expect(
381
+ parseReserveOptions({ mimeType, fileName: "ok.txt" }),
382
+ ).toBeNull();
383
+ }
384
+ });
385
+
386
+ it("parseReserveOptions accepts non-ASCII fileName", () => {
387
+ const opts = parseReserveOptions({
388
+ mimeType: "application/pdf",
389
+ fileName: "résumé.pdf",
390
+ });
391
+ expect(opts).toEqual({
392
+ mimeType: "application/pdf",
393
+ fileName: "résumé.pdf",
394
+ extension: null,
395
+ });
396
+ });
397
+
398
+ it("parseReserveOptions accepts mimeType with parameters", () => {
399
+ const opts = parseReserveOptions({
400
+ mimeType: "text/plain; charset=utf-8",
401
+ fileName: "ok.txt",
402
+ });
403
+ expect(opts?.mimeType).toBe("text/plain; charset=utf-8");
404
+ });
405
+
406
+ it("quoteFilename escapes backslash and double-quote", () => {
407
+ expect(quoteFilename(`a"b\\c`)).toBe(`"a\\"b\\\\c"`);
408
+ });
409
+
410
+ it("buildContentDisposition emits ASCII fallback and RFC 5987 form for non-ASCII", () => {
411
+ const value = buildContentDisposition("résumé.pdf");
412
+ expect(value).toMatch(
413
+ /^attachment; filename="[^"]*\.pdf"; filename\*=UTF-8''/,
414
+ );
415
+ expect(value).toContain("filename*=UTF-8''r%C3%A9sum%C3%A9.pdf");
416
+ });
417
+
418
+ it("buildContentDisposition produces a header Node accepts even for CR/LF/NUL input", () => {
419
+ const res = makeRes();
420
+ for (const fileName of [
421
+ "evil\r\nX-Inj: foo",
422
+ "a\x00b.txt",
423
+ "name\twith\ttabs",
424
+ ]) {
425
+ expect(() =>
426
+ res.setHeader(
427
+ "Content-Disposition",
428
+ buildContentDisposition(fileName),
429
+ ),
430
+ ).not.toThrow();
431
+ }
432
+ });
433
+
434
+ it("buildContentDisposition encodes RFC-5987-reserved chars in the encoded form", () => {
435
+ const value = buildContentDisposition("a'b(c)*!.txt");
436
+ expect(value).toContain("filename*=UTF-8''a%27b%28c%29%2A%21.txt");
437
+ });
438
+
439
+ it("POST reserve with CRLF in fileName returns 400 and persists no row", async () => {
440
+ const handler = makeReserveHandler(attachments);
441
+ const req = makeReq({
442
+ method: "POST",
443
+ body: JSON.stringify({
444
+ mimeType: "text/plain",
445
+ fileName: "evil\r\nX-Inj: foo",
446
+ }),
447
+ });
448
+ const res = makeRes();
449
+ await handler(req, res);
450
+ await waitFor(res);
451
+ expect(res.statusCode).toBe(400);
452
+ });
453
+
454
+ it("download with non-ASCII fileName produces RFC 6266 Content-Disposition", async () => {
455
+ const reserveHandler = makeReserveHandler(attachments);
456
+ const uploadHandler = makeUploadHandler(attachments);
457
+ const downloadHandler = makeDownloadHandler(attachments);
458
+
459
+ const reserveReq = makeReq({
460
+ method: "POST",
461
+ body: JSON.stringify({
462
+ mimeType: "application/pdf",
463
+ fileName: "résumé.pdf",
464
+ }),
465
+ });
466
+ const reserveRes = makeRes();
467
+ await reserveHandler(reserveReq, reserveRes);
468
+ await waitFor(reserveRes);
469
+ expect(reserveRes.statusCode).toBe(201);
470
+ const { reservationId } = JSON.parse(
471
+ reserveRes._body.toString("utf8"),
472
+ ) as { reservationId: string };
473
+
474
+ const uploadReq = makeReq({
475
+ method: "PUT",
476
+ params: { reservationId },
477
+ body: "pdf-bytes",
478
+ });
479
+ const uploadRes = makeRes();
480
+ await uploadHandler(uploadReq, uploadRes);
481
+ await waitFor(uploadRes);
482
+ expect(uploadRes.statusCode).toBe(200);
483
+ const { hash } = JSON.parse(uploadRes._body.toString("utf8")) as {
484
+ hash: string;
485
+ };
486
+
487
+ const downloadReq = makeReq({ method: "GET", params: { hash } });
488
+ const downloadRes = makeRes();
489
+ await downloadHandler(downloadReq, downloadRes);
490
+ await waitFor(downloadRes);
491
+ expect(downloadRes.statusCode).toBe(200);
492
+ const cd = downloadRes.getHeader("content-disposition") as string;
493
+ expect(cd).toContain("filename*=UTF-8''r%C3%A9sum%C3%A9.pdf");
494
+ expect(cd).toMatch(/filename="[^"]*\.pdf"/);
495
+ const meta = JSON.parse(
496
+ downloadRes.getHeader("x-attachment-metadata") as string,
497
+ ) as { fileName: string };
498
+ expect(meta.fileName).toBe("résumé.pdf");
499
+ });
500
+ });
501
+ });
package/tsconfig.json CHANGED
@@ -2,9 +2,13 @@
2
2
  "extends": "../../tsconfig.options.json",
3
3
  "compilerOptions": {
4
4
  "outDir": "./dist",
5
- "lib": ["ESNext"]
5
+ "lib": [
6
+ "ESNext"
7
+ ]
6
8
  },
7
- "include": ["**/*"],
9
+ "include": [
10
+ "**/*"
11
+ ],
8
12
  "references": [
9
13
  {
10
14
  "path": "../../packages/config"
@@ -21,6 +25,9 @@
21
25
  {
22
26
  "path": "../../packages/reactor-api"
23
27
  },
28
+ {
29
+ "path": "../../packages/reactor-attachments"
30
+ },
24
31
  {
25
32
  "path": "../../packages/renown"
26
33
  },
@@ -1 +0,0 @@
1
- {"version":3,"file":"server-BMtyzhoR.mjs","names":["vetraDocumentModels","documentModels","vetraProcessorFactory"],"sources":["../src/feature-flags.ts","../src/renown.ts","../src/server.mts"],"sourcesContent":["import { EnvVarProvider } from \"@openfeature/env-var-provider\";\nimport { OpenFeature } from \"@openfeature/server-sdk\";\n\nexport async function initFeatureFlags() {\n // for now, we're only using env vars for feature flags\n const provider = new EnvVarProvider();\n\n await OpenFeature.setProviderAndWait(provider);\n\n return OpenFeature.getClient();\n}\n","import type { SignerConfig } from \"@powerhousedao/reactor\";\nimport {\n createSignatureVerifier,\n DEFAULT_RENOWN_URL,\n NodeKeyStorage,\n RenownBuilder,\n RenownCryptoBuilder,\n type IRenown,\n} from \"@renown/sdk/node\";\nimport { childLogger } from \"document-model\";\n\nconst logger = childLogger([\"switchboard\", \"renown\"]);\n\nexport interface RenownOptions {\n /** Path to the keypair file. Defaults to .ph/.keypair.json in cwd */\n keypairPath?: string;\n /** If true, won't generate a new keypair if none exists */\n requireExisting?: boolean;\n /** Base url of the Renown instance to use */\n baseUrl?: string;\n}\n\n/**\n * Initialize Renown for the Switchboard instance.\n * This allows Switchboard to authenticate with remote services\n * using the same identity established during `ph login`.\n */\nexport async function initRenown(\n options: RenownOptions = {},\n): Promise<IRenown | null> {\n const {\n keypairPath,\n requireExisting = false,\n baseUrl = DEFAULT_RENOWN_URL,\n } = options;\n\n const keyStorage = new NodeKeyStorage(keypairPath, {\n logger,\n });\n\n // Check if we have an existing keypair\n const existingKeyPair = await keyStorage.loadKeyPair();\n\n if (!existingKeyPair && requireExisting) {\n throw new Error(\n \"No existing keypair found and requireExisting is true. \" +\n 'Run \"ph login\" to create one.',\n );\n }\n\n if (!existingKeyPair) {\n logger.info(\"No existing keypair found. A new one will be generated.\");\n }\n\n const renownCrypto = await new RenownCryptoBuilder()\n .withKeyPairStorage(keyStorage)\n .build();\n\n const renown = await new RenownBuilder(\"switchboard\", {})\n .withCrypto(renownCrypto)\n .withBaseUrl(baseUrl)\n .build();\n\n logger.info(\"Switchboard identity initialized: @did\", renownCrypto.did);\n\n return renown;\n}\n\n/**\n * Get the signer config for the given renown instance.\n *\n * @param renown - The renown instance\n * @param requireSignature - If true, unsigned actions are rejected\n */\nexport function getRenownSignerConfig(\n renown: IRenown,\n requireSignature?: boolean,\n): SignerConfig {\n return {\n signer: renown.signer,\n verifier: createSignatureVerifier(requireSignature),\n };\n}\n","#!/usr/bin/env node\nimport { PGlite } from \"@electric-sql/pglite\";\nimport { metrics } from \"@opentelemetry/api\";\nimport { getConfig } from \"@powerhousedao/config/node\";\nimport { ReactorInstrumentation } from \"@powerhousedao/opentelemetry-instrumentation-reactor\";\nimport {\n ChannelScheme,\n EventBus,\n ReactorBuilder,\n ReactorClientBuilder,\n driveCollectionId,\n parseDriveUrl,\n type Database,\n} from \"@powerhousedao/reactor\";\nimport {\n HttpPackageLoader,\n ImportPackageLoader,\n PackageManagementService,\n PackagesSubgraph,\n getUniqueDocumentModels,\n initializeAndStartAPI,\n type IPackageLoader,\n} from \"@powerhousedao/reactor-api\";\nimport { httpsHooksPath } from \"@powerhousedao/reactor-api/https-hooks\";\nimport {\n VitePackageLoader,\n createViteLogger,\n startViteServer,\n} from \"@powerhousedao/reactor-api/vite\";\nimport { driveDocumentModelModule } from \"@powerhousedao/shared/document-drive\";\nimport type { DocumentModelModule } from \"@powerhousedao/shared/document-model\";\nimport { documentModels as vetraDocumentModels } from \"@powerhousedao/vetra\";\nimport { processorFactory as vetraProcessorFactory } from \"@powerhousedao/vetra/processors\";\nimport type { IRenown } from \"@renown/sdk/node\";\nimport * as Sentry from \"@sentry/node\";\nimport {\n childLogger,\n documentModelDocumentModelModule,\n setLogLevel,\n type ILogger,\n} from \"document-model\";\nimport dotenv from \"dotenv\";\nimport { Kysely, PostgresDialect } from \"kysely\";\nimport { PGliteDialect } from \"kysely-pglite-dialect\";\nimport net from \"node:net\";\nimport { register } from \"node:module\";\nimport path from \"path\";\nimport { Pool } from \"pg\";\nimport { initFeatureFlags } from \"./feature-flags.js\";\nimport { getRenownSignerConfig, initRenown } from \"./renown.js\";\nimport type { StartServerOptions, SwitchboardReactor } from \"./types.js\";\nimport { addDefaultDrive, isPostgresUrl } from \"./utils.mjs\";\n\nconst defaultLogger = childLogger([\"switchboard\"]);\n\nconst LogLevel = (process.env.LOG_LEVEL as ILogger[\"level\"] | \"\") || \"info\";\nsetLogLevel(LogLevel);\n\ndotenv.config();\n\n// Feature flag constants\nconst DOCUMENT_MODEL_SUBGRAPHS_ENABLED = \"DOCUMENT_MODEL_SUBGRAPHS_ENABLED\";\nconst DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT = true;\nconst REQUIRE_SIGNATURES = \"REQUIRE_SIGNATURES\";\nconst REQUIRE_SIGNATURES_DEFAULT = false;\n\nif (process.env.SENTRY_DSN) {\n defaultLogger.info(\n \"Initialized Sentry with env: @env\",\n process.env.SENTRY_ENV,\n );\n Sentry.init({\n dsn: process.env.SENTRY_DSN,\n environment: process.env.SENTRY_ENV,\n // Match the version tag uploaded by release-branch.yml so source maps\n // resolve. Populated by the CI (WORKSPACE_VERSION) or npm at runtime.\n release:\n process.env.SENTRY_RELEASE ||\n (process.env.npm_package_version\n ? `v${process.env.npm_package_version}`\n : undefined),\n });\n}\n\nconst DEFAULT_PORT = process.env.PORT ? Number(process.env.PORT) : 4001;\n\n// How many ports forward from the requested one we will try before giving up.\nconst PORT_FALLBACK_ATTEMPTS = 20;\n\n/**\n * Attempt to bind a throwaway TCP server to the given port. Resolves true if\n * the port is free, false if the OS reports it in use. Any other error is\n * surfaced so we don't silently mask real issues (permissions, bad host, …).\n */\nexport function isPortAvailable(port: number): Promise<boolean> {\n return new Promise((resolve, reject) => {\n const tester = net.createServer();\n tester.once(\"error\", (err: NodeJS.ErrnoException) => {\n if (err.code === \"EADDRINUSE\" || err.code === \"EACCES\") {\n resolve(false);\n } else {\n reject(err);\n }\n });\n tester.once(\"listening\", () => {\n tester.close(() => resolve(true));\n });\n // Bind on the unspecified IPv6 address so we detect collisions with both\n // IPv6 and IPv4 listeners (Node maps `::` to dual-stack on most systems).\n tester.listen({ port, host: \"::\" });\n });\n}\n\nasync function resolveServerPort(\n requested: number,\n strictPort: boolean,\n logger: ILogger,\n): Promise<number> {\n if (strictPort) return requested;\n for (let i = 0; i < PORT_FALLBACK_ATTEMPTS; i++) {\n const candidate = requested + i;\n if (await isPortAvailable(candidate)) {\n if (candidate !== requested) {\n logger.info(\n `Port ${requested} is in use. Falling back to port ${candidate}.`,\n );\n }\n return candidate;\n }\n }\n // Couldn't find a free port in the window; let the caller surface the\n // original EADDRINUSE when the real bind attempts runs.\n return requested;\n}\n\nasync function initServer(\n serverPort: number,\n options: StartServerOptions,\n renown: IRenown | null,\n) {\n // Register the global MeterProvider before ReactorInstrumentation is\n // constructed. setGlobalMeterProvider is a one-way door — once set it cannot\n // be unset — so this must happen before initializeClient calls\n // instrumentation.start() → createMetrics() → metrics.getMeter().\n if (options.meterProvider) {\n metrics.setGlobalMeterProvider(options.meterProvider);\n }\n\n const {\n dev,\n packages = [],\n remoteDrives = [],\n logger = defaultLogger,\n } = options;\n logger.level = LogLevel;\n const dbPath = options.dbPath ?? process.env.DATABASE_URL;\n\n // use postgres url for read model storage if available, otherwise use local PGlite path\n const readModelPath = dbPath || \".ph/read-storage\";\n\n // HTTP registry package loading\n const configPath =\n options.configFile ?? path.join(process.cwd(), \"powerhouse.config.json\");\n const config = getConfig(configPath);\n const registryUrl = process.env.PH_REGISTRY_URL ?? config.packageRegistryUrl;\n const registryPackages = process.env.PH_REGISTRY_PACKAGES;\n const dynamicModelLoading =\n options.dynamicModelLoading ?? process.env.DYNAMIC_MODEL_LOADING === \"true\";\n let httpLoader: HttpPackageLoader | undefined;\n\n if (registryUrl) {\n // Register HTTP/HTTPS module loader hooks for dynamic package imports\n register(httpsHooksPath, import.meta.url);\n httpLoader = new HttpPackageLoader({ registryUrl });\n registryPackages?.split(\",\").forEach((p) => {\n const name = p.trim();\n if (!packages.includes(name)) {\n packages.push(name);\n }\n });\n }\n\n const reactorLogger = logger.child([\"reactor\"]);\n const initializeClient = async (documentModels: DocumentModelModule[]) => {\n const eventBus = new EventBus();\n const builder = new ReactorBuilder()\n .withEventBus(eventBus)\n .withDocumentModels(\n getUniqueDocumentModels([\n documentModelDocumentModelModule,\n driveDocumentModelModule,\n ...vetraDocumentModels,\n ...documentModels,\n ]),\n )\n .withChannelScheme(ChannelScheme.SWITCHBOARD)\n .withSignalHandlers()\n .withLogger(reactorLogger);\n\n const maxSkipThreshold = parseInt(process.env.MAX_SKIP_THRESHOLD ?? \"\", 10);\n if (!isNaN(maxSkipThreshold) && maxSkipThreshold > 0) {\n builder.withExecutorConfig({ maxSkipThreshold });\n logger.info(`Reactor maxSkipThreshold set to ${maxSkipThreshold}`);\n }\n\n const reactorDbUrl = process.env.PH_REACTOR_DATABASE_URL;\n if (reactorDbUrl && isPostgresUrl(reactorDbUrl)) {\n const connectionString = reactorDbUrl.includes(\"?\")\n ? reactorDbUrl\n : `${reactorDbUrl}?sslmode=disable`;\n const pool = new Pool({ connectionString });\n const kysely = new Kysely<Database>({\n dialect: new PostgresDialect({ pool }),\n });\n builder.withKysely(kysely);\n logger.info(\"Using PostgreSQL for reactor storage\");\n } else {\n const pglitePath = \"./.ph/reactor-storage\";\n const pglite = new PGlite(pglitePath);\n const kysely = new Kysely<Database>({\n dialect: new PGliteDialect(pglite),\n });\n builder.withKysely(kysely);\n logger.info(\"Using PGlite for reactor storage\");\n }\n\n if (httpLoader && dynamicModelLoading) {\n builder.withDocumentModelLoader(httpLoader.documentModelLoader);\n }\n\n const clientBuilder = new ReactorClientBuilder().withReactorBuilder(\n builder,\n );\n\n if (renown) {\n const signerConfig = getRenownSignerConfig(\n renown,\n options.identity?.requireSignatures,\n );\n clientBuilder.withSigner(signerConfig);\n }\n\n const module = await clientBuilder.buildModule();\n\n if (module.reactorModule) {\n const instrumentation = new ReactorInstrumentation(module.reactorModule);\n instrumentation.start();\n reactorLogger.info(\"Reactor metrics instrumentation started\");\n }\n\n return module;\n };\n\n let defaultDriveUrl: undefined | string = undefined;\n\n // TODO get path from powerhouse config\n // start vite server if dev mode is enabled\n const basePath = process.cwd();\n const viteLogger = createViteLogger(logger);\n const vite = dev\n ? await startViteServer(process.cwd(), viteLogger)\n : undefined;\n\n // get paths to local document models\n if (!options.disableLocalPackages) {\n packages.push(basePath);\n }\n\n // create loaders\n const packageLoaders: IPackageLoader[] = [];\n if (vite) {\n packageLoaders.push(VitePackageLoader.build(vite));\n } else {\n packageLoaders.push(new ImportPackageLoader());\n }\n if (httpLoader) {\n packageLoaders.push(httpLoader);\n registryPackages?.split(\",\").forEach((p) => {\n const name = p.trim();\n if (!packages.includes(name)) {\n packages.push(name);\n }\n });\n }\n\n const apiLogger = logger.child([\"reactor-api\"]);\n const api = await initializeAndStartAPI(\n initializeClient,\n {\n port: serverPort,\n dbPath: readModelPath,\n https: options.https,\n packageLoaders: packageLoaders.length > 0 ? packageLoaders : undefined,\n packages: packages,\n processorConfig: options.processorConfig,\n processors: {\n \"@powerhousedao/vetra\": [vetraProcessorFactory],\n },\n configFile:\n options.configFile ??\n path.join(process.cwd(), \"powerhouse.config.json\"),\n mcp: options.mcp ?? true,\n logger: apiLogger,\n enableDocumentModelSubgraphs: options.enableDocumentModelSubgraphs,\n },\n \"switchboard\",\n );\n\n if (process.env.SENTRY_DSN) {\n // Register Sentry error handler after all routes are established.\n // The adapter calls the framework-specific Sentry setup internally.\n api.httpAdapter.setupSentryErrorHandler(Sentry);\n }\n\n const { client, graphqlManager, documentModelRegistry } = api;\n\n // Wire up dynamic package management if HTTP loader is configured\n if (httpLoader) {\n const packageManagementService = new PackageManagementService({\n defaultRegistryUrl: registryUrl,\n httpLoader,\n documentModelRegistry,\n });\n\n packageManagementService.setOnModelsChanged(() => {\n graphqlManager.regenerateDocumentModelSubgraphs().catch(logger.error);\n });\n\n const packagesSubgraph = new PackagesSubgraph({\n relationalDb: undefined as never,\n analyticsStore: undefined as never,\n reactorClient: client,\n graphqlManager,\n syncManager: api.syncManager,\n path: graphqlManager.getBasePath(),\n packageManagementService,\n });\n\n void graphqlManager\n .registerSubgraphInstance(packagesSubgraph, \"graphql\", false)\n .then(() => graphqlManager.updateRouter())\n .catch((error: unknown) => {\n logger.error(\"Failed to register packages subgraph: @error\", error);\n });\n }\n\n // Create default drive if provided\n if (options.drive) {\n if (!renown) {\n throw new Error(\"Cannot create default drive without Renown identity\");\n }\n\n defaultDriveUrl = await addDefaultDrive(client, options.drive, serverPort);\n }\n\n // add vite middleware after express app is initialized if applicable\n if (vite) {\n api.httpAdapter.mountRawMiddleware(vite.middlewares);\n }\n\n // Connect to remote drives AFTER packages are loaded\n if (remoteDrives.length > 0) {\n for (const remoteDriveUrl of remoteDrives) {\n let driveId: string | undefined;\n\n try {\n const { syncManager } = api;\n const parsed = parseDriveUrl(remoteDriveUrl);\n driveId = parsed.driveId;\n const remoteName = `remote-drive-${driveId}-${crypto.randomUUID()}`;\n await syncManager.add(remoteName, driveCollectionId(\"main\", driveId), {\n type: \"gql\",\n parameters: { url: parsed.graphqlEndpoint },\n });\n logger.debug(\"Remote drive @remoteDriveUrl synced\", remoteDriveUrl);\n } catch (error) {\n if (\n error instanceof Error &&\n error.message.includes(\"already exists\")\n ) {\n logger.debug(\n \"Remote drive already added: @remoteDriveUrl\",\n remoteDriveUrl,\n );\n driveId = remoteDriveUrl.split(\"/\").pop();\n } else {\n logger.error(\n \"Failed to connect to remote drive @remoteDriveUrl: @error\",\n remoteDriveUrl,\n error,\n );\n }\n } finally {\n // Construct local URL once in finally block\n if (!defaultDriveUrl && driveId) {\n const protocol = options.https ? \"https\" : \"http\";\n defaultDriveUrl = `${protocol}://localhost:${serverPort}/d/${driveId}`;\n }\n }\n }\n }\n\n return {\n defaultDriveUrl,\n api,\n reactor: client,\n renown,\n port: serverPort,\n };\n}\n\nexport const startSwitchboard = async (\n options: StartServerOptions = {},\n): Promise<SwitchboardReactor> => {\n const requestedPort = options.port ?? DEFAULT_PORT;\n const logger = options.logger ?? defaultLogger;\n const serverPort = await resolveServerPort(\n requestedPort,\n options.strictPort ?? false,\n logger,\n );\n\n // Initialize feature flags\n const featureFlags = await initFeatureFlags();\n\n const enableDocumentModelSubgraphs = await featureFlags.getBooleanValue(\n DOCUMENT_MODEL_SUBGRAPHS_ENABLED,\n options.enableDocumentModelSubgraphs ??\n DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT,\n );\n\n options.enableDocumentModelSubgraphs = enableDocumentModelSubgraphs;\n\n const requireSignatures =\n options.identity?.requireSignatures ??\n (await featureFlags.getBooleanValue(\n REQUIRE_SIGNATURES,\n REQUIRE_SIGNATURES_DEFAULT,\n ));\n options.identity = { ...options.identity, requireSignatures };\n\n logger.info(\n \"Feature flags: @flags\",\n JSON.stringify(\n {\n DOCUMENT_MODEL_SUBGRAPHS_ENABLED: enableDocumentModelSubgraphs,\n REQUIRE_SIGNATURES: requireSignatures,\n },\n null,\n 2,\n ),\n );\n\n // Initialize Renown if identity options are provided or keypair exists\n let renown: IRenown | null = null;\n try {\n renown = await initRenown(options.identity);\n } catch (e) {\n logger.warn(\"Failed to initialize ConnectCrypto: @error\", e);\n if (options.identity?.requireExisting) {\n throw new Error(\n 'Identity required but failed to initialize. Run \"ph login\" first.',\n );\n }\n }\n\n try {\n return await initServer(serverPort, options, renown);\n } catch (e) {\n Sentry.captureException(e);\n logger.error(\"App crashed: @error\", e);\n throw e;\n }\n};\n\nexport * from \"./types.js\";\n\nif (import.meta.main) {\n await startSwitchboard();\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAGA,eAAsB,mBAAmB;CAEvC,MAAM,WAAW,IAAI,gBAAgB;AAErC,OAAM,YAAY,mBAAmB,SAAS;AAE9C,QAAO,YAAY,WAAW;;;;ACEhC,MAAM,SAAS,YAAY,CAAC,eAAe,SAAS,CAAC;;;;;;AAgBrD,eAAsB,WACpB,UAAyB,EAAE,EACF;CACzB,MAAM,EACJ,aACA,kBAAkB,OAClB,UAAU,uBACR;CAEJ,MAAM,aAAa,IAAI,eAAe,aAAa,EACjD,QACD,CAAC;CAGF,MAAM,kBAAkB,MAAM,WAAW,aAAa;AAEtD,KAAI,CAAC,mBAAmB,gBACtB,OAAM,IAAI,MACR,yFAED;AAGH,KAAI,CAAC,gBACH,QAAO,KAAK,0DAA0D;CAGxE,MAAM,eAAe,MAAM,IAAI,qBAAqB,CACjD,mBAAmB,WAAW,CAC9B,OAAO;CAEV,MAAM,SAAS,MAAM,IAAI,cAAc,eAAe,EAAE,CAAC,CACtD,WAAW,aAAa,CACxB,YAAY,QAAQ,CACpB,OAAO;AAEV,QAAO,KAAK,0CAA0C,aAAa,IAAI;AAEvE,QAAO;;;;;;;;AAST,SAAgB,sBACd,QACA,kBACc;AACd,QAAO;EACL,QAAQ,OAAO;EACf,UAAU,wBAAwB,iBAAiB;EACpD;;;;AC5BH,MAAM,gBAAgB,YAAY,CAAC,cAAc,CAAC;AAElD,MAAM,WAAY,QAAQ,IAAI,aAAuC;AACrE,YAAY,SAAS;AAErB,OAAO,QAAQ;AAGf,MAAM,mCAAmC;AACzC,MAAM,2CAA2C;AACjD,MAAM,qBAAqB;AAC3B,MAAM,6BAA6B;AAEnC,IAAI,QAAQ,IAAI,YAAY;AAC1B,eAAc,KACZ,qCACA,QAAQ,IAAI,WACb;AACD,QAAO,KAAK;EACV,KAAK,QAAQ,IAAI;EACjB,aAAa,QAAQ,IAAI;EAGzB,SACE,QAAQ,IAAI,mBACX,QAAQ,IAAI,sBACT,IAAI,QAAQ,IAAI,wBAChB,KAAA;EACP,CAAC;;AAGJ,MAAM,eAAe,QAAQ,IAAI,OAAO,OAAO,QAAQ,IAAI,KAAK,GAAG;AAGnE,MAAM,yBAAyB;;;;;;AAO/B,SAAgB,gBAAgB,MAAgC;AAC9D,QAAO,IAAI,SAAS,SAAS,WAAW;EACtC,MAAM,SAAS,IAAI,cAAc;AACjC,SAAO,KAAK,UAAU,QAA+B;AACnD,OAAI,IAAI,SAAS,gBAAgB,IAAI,SAAS,SAC5C,SAAQ,MAAM;OAEd,QAAO,IAAI;IAEb;AACF,SAAO,KAAK,mBAAmB;AAC7B,UAAO,YAAY,QAAQ,KAAK,CAAC;IACjC;AAGF,SAAO,OAAO;GAAE;GAAM,MAAM;GAAM,CAAC;GACnC;;AAGJ,eAAe,kBACb,WACA,YACA,QACiB;AACjB,KAAI,WAAY,QAAO;AACvB,MAAK,IAAI,IAAI,GAAG,IAAI,wBAAwB,KAAK;EAC/C,MAAM,YAAY,YAAY;AAC9B,MAAI,MAAM,gBAAgB,UAAU,EAAE;AACpC,OAAI,cAAc,UAChB,QAAO,KACL,QAAQ,UAAU,mCAAmC,UAAU,GAChE;AAEH,UAAO;;;AAKX,QAAO;;AAGT,eAAe,WACb,YACA,SACA,QACA;AAKA,KAAI,QAAQ,cACV,SAAQ,uBAAuB,QAAQ,cAAc;CAGvD,MAAM,EACJ,KACA,WAAW,EAAE,EACb,eAAe,EAAE,EACjB,SAAS,kBACP;AACJ,QAAO,QAAQ;CAIf,MAAM,iBAHS,QAAQ,UAAU,QAAQ,IAAI,iBAGb;CAKhC,MAAM,SAAS,UADb,QAAQ,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,yBAAyB,CACtC;CACpC,MAAM,cAAc,QAAQ,IAAI,mBAAmB,OAAO;CAC1D,MAAM,mBAAmB,QAAQ,IAAI;CACrC,MAAM,sBACJ,QAAQ,uBAAuB,QAAQ,IAAI,0BAA0B;CACvE,IAAI;AAEJ,KAAI,aAAa;AAEf,WAAS,gBAAgB,OAAO,KAAK,IAAI;AACzC,eAAa,IAAI,kBAAkB,EAAE,aAAa,CAAC;AACnD,oBAAkB,MAAM,IAAI,CAAC,SAAS,MAAM;GAC1C,MAAM,OAAO,EAAE,MAAM;AACrB,OAAI,CAAC,SAAS,SAAS,KAAK,CAC1B,UAAS,KAAK,KAAK;IAErB;;CAGJ,MAAM,gBAAgB,OAAO,MAAM,CAAC,UAAU,CAAC;CAC/C,MAAM,mBAAmB,OAAO,qBAA0C;EACxE,MAAM,WAAW,IAAI,UAAU;EAC/B,MAAM,UAAU,IAAI,gBAAgB,CACjC,aAAa,SAAS,CACtB,mBACC,wBAAwB;GACtB;GACA;GACA,GAAGA;GACH,GAAGC;GACJ,CAAC,CACH,CACA,kBAAkB,cAAc,YAAY,CAC5C,oBAAoB,CACpB,WAAW,cAAc;EAE5B,MAAM,mBAAmB,SAAS,QAAQ,IAAI,sBAAsB,IAAI,GAAG;AAC3E,MAAI,CAAC,MAAM,iBAAiB,IAAI,mBAAmB,GAAG;AACpD,WAAQ,mBAAmB,EAAE,kBAAkB,CAAC;AAChD,UAAO,KAAK,mCAAmC,mBAAmB;;EAGpE,MAAM,eAAe,QAAQ,IAAI;AACjC,MAAI,gBAAgB,cAAc,aAAa,EAAE;GAK/C,MAAM,SAAS,IAAI,OAAiB,EAClC,SAAS,IAAI,gBAAgB,EAAE,MAFpB,IAAI,KAAK,EAAE,kBAHC,aAAa,SAAS,IAAI,GAC/C,eACA,GAAG,aAAa,mBACsB,CAAC,EAEJ,CAAC,EACvC,CAAC;AACF,WAAQ,WAAW,OAAO;AAC1B,UAAO,KAAK,uCAAuC;SAC9C;GAGL,MAAM,SAAS,IAAI,OAAiB,EAClC,SAAS,IAAI,cAFA,IAAI,OADA,wBACkB,CAED,EACnC,CAAC;AACF,WAAQ,WAAW,OAAO;AAC1B,UAAO,KAAK,mCAAmC;;AAGjD,MAAI,cAAc,oBAChB,SAAQ,wBAAwB,WAAW,oBAAoB;EAGjE,MAAM,gBAAgB,IAAI,sBAAsB,CAAC,mBAC/C,QACD;AAED,MAAI,QAAQ;GACV,MAAM,eAAe,sBACnB,QACA,QAAQ,UAAU,kBACnB;AACD,iBAAc,WAAW,aAAa;;EAGxC,MAAM,SAAS,MAAM,cAAc,aAAa;AAEhD,MAAI,OAAO,eAAe;AACA,OAAI,uBAAuB,OAAO,cAAc,CACxD,OAAO;AACvB,iBAAc,KAAK,0CAA0C;;AAG/D,SAAO;;CAGT,IAAI,kBAAsC,KAAA;CAI1C,MAAM,WAAW,QAAQ,KAAK;CAC9B,MAAM,aAAa,iBAAiB,OAAO;CAC3C,MAAM,OAAO,MACT,MAAM,gBAAgB,QAAQ,KAAK,EAAE,WAAW,GAChD,KAAA;AAGJ,KAAI,CAAC,QAAQ,qBACX,UAAS,KAAK,SAAS;CAIzB,MAAM,iBAAmC,EAAE;AAC3C,KAAI,KACF,gBAAe,KAAK,kBAAkB,MAAM,KAAK,CAAC;KAElD,gBAAe,KAAK,IAAI,qBAAqB,CAAC;AAEhD,KAAI,YAAY;AACd,iBAAe,KAAK,WAAW;AAC/B,oBAAkB,MAAM,IAAI,CAAC,SAAS,MAAM;GAC1C,MAAM,OAAO,EAAE,MAAM;AACrB,OAAI,CAAC,SAAS,SAAS,KAAK,CAC1B,UAAS,KAAK,KAAK;IAErB;;CAGJ,MAAM,YAAY,OAAO,MAAM,CAAC,cAAc,CAAC;CAC/C,MAAM,MAAM,MAAM,sBAChB,kBACA;EACE,MAAM;EACN,QAAQ;EACR,OAAO,QAAQ;EACf,gBAAgB,eAAe,SAAS,IAAI,iBAAiB,KAAA;EACnD;EACV,iBAAiB,QAAQ;EACzB,YAAY,EACV,wBAAwB,CAACC,iBAAsB,EAChD;EACD,YACE,QAAQ,cACR,KAAK,KAAK,QAAQ,KAAK,EAAE,yBAAyB;EACpD,KAAK,QAAQ,OAAO;EACpB,QAAQ;EACR,8BAA8B,QAAQ;EACvC,EACD,cACD;AAED,KAAI,QAAQ,IAAI,WAGd,KAAI,YAAY,wBAAwB,OAAO;CAGjD,MAAM,EAAE,QAAQ,gBAAgB,0BAA0B;AAG1D,KAAI,YAAY;EACd,MAAM,2BAA2B,IAAI,yBAAyB;GAC5D,oBAAoB;GACpB;GACA;GACD,CAAC;AAEF,2BAAyB,yBAAyB;AAChD,kBAAe,kCAAkC,CAAC,MAAM,OAAO,MAAM;IACrE;EAEF,MAAM,mBAAmB,IAAI,iBAAiB;GAC5C,cAAc,KAAA;GACd,gBAAgB,KAAA;GAChB,eAAe;GACf;GACA,aAAa,IAAI;GACjB,MAAM,eAAe,aAAa;GAClC;GACD,CAAC;AAEG,iBACF,yBAAyB,kBAAkB,WAAW,MAAM,CAC5D,WAAW,eAAe,cAAc,CAAC,CACzC,OAAO,UAAmB;AACzB,UAAO,MAAM,gDAAgD,MAAM;IACnE;;AAIN,KAAI,QAAQ,OAAO;AACjB,MAAI,CAAC,OACH,OAAM,IAAI,MAAM,sDAAsD;AAGxE,oBAAkB,MAAM,gBAAgB,QAAQ,QAAQ,OAAO,WAAW;;AAI5E,KAAI,KACF,KAAI,YAAY,mBAAmB,KAAK,YAAY;AAItD,KAAI,aAAa,SAAS,EACxB,MAAK,MAAM,kBAAkB,cAAc;EACzC,IAAI;AAEJ,MAAI;GACF,MAAM,EAAE,gBAAgB;GACxB,MAAM,SAAS,cAAc,eAAe;AAC5C,aAAU,OAAO;GACjB,MAAM,aAAa,gBAAgB,QAAQ,GAAG,OAAO,YAAY;AACjE,SAAM,YAAY,IAAI,YAAY,kBAAkB,QAAQ,QAAQ,EAAE;IACpE,MAAM;IACN,YAAY,EAAE,KAAK,OAAO,iBAAiB;IAC5C,CAAC;AACF,UAAO,MAAM,uCAAuC,eAAe;WAC5D,OAAO;AACd,OACE,iBAAiB,SACjB,MAAM,QAAQ,SAAS,iBAAiB,EACxC;AACA,WAAO,MACL,+CACA,eACD;AACD,cAAU,eAAe,MAAM,IAAI,CAAC,KAAK;SAEzC,QAAO,MACL,6DACA,gBACA,MACD;YAEK;AAER,OAAI,CAAC,mBAAmB,QAEtB,mBAAkB,GADD,QAAQ,QAAQ,UAAU,OACb,eAAe,WAAW,KAAK;;;AAMrE,QAAO;EACL;EACA;EACA,SAAS;EACT;EACA,MAAM;EACP;;AAGH,MAAa,mBAAmB,OAC9B,UAA8B,EAAE,KACA;CAChC,MAAM,gBAAgB,QAAQ,QAAQ;CACtC,MAAM,SAAS,QAAQ,UAAU;CACjC,MAAM,aAAa,MAAM,kBACvB,eACA,QAAQ,cAAc,OACtB,OACD;CAGD,MAAM,eAAe,MAAM,kBAAkB;CAE7C,MAAM,+BAA+B,MAAM,aAAa,gBACtD,kCACA,QAAQ,gCACN,yCACH;AAED,SAAQ,+BAA+B;CAEvC,MAAM,oBACJ,QAAQ,UAAU,qBACjB,MAAM,aAAa,gBAClB,oBACA,2BACD;AACH,SAAQ,WAAW;EAAE,GAAG,QAAQ;EAAU;EAAmB;AAE7D,QAAO,KACL,yBACA,KAAK,UACH;EACE,kCAAkC;EAClC,oBAAoB;EACrB,EACD,MACA,EACD,CACF;CAGD,IAAI,SAAyB;AAC7B,KAAI;AACF,WAAS,MAAM,WAAW,QAAQ,SAAS;UACpC,GAAG;AACV,SAAO,KAAK,8CAA8C,EAAE;AAC5D,MAAI,QAAQ,UAAU,gBACpB,OAAM,IAAI,MACR,sEACD;;AAIL,KAAI;AACF,SAAO,MAAM,WAAW,YAAY,SAAS,OAAO;UAC7C,GAAG;AACV,SAAO,iBAAiB,EAAE;AAC1B,SAAO,MAAM,uBAAuB,EAAE;AACtC,QAAM;;;AAMV,IAAI,OAAO,KAAK,KACd,OAAM,kBAAkB"}