@powerhousedao/switchboard 6.0.0-dev.19 → 6.0.0-dev.190
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Auth.md +45 -27
- package/CHANGELOG.md +1487 -5
- package/README.md +13 -12
- package/dist/index.d.mts +1 -0
- package/dist/index.mjs +129 -0
- package/dist/index.mjs.map +1 -0
- package/dist/install-packages.d.mts +1 -0
- package/dist/install-packages.mjs +31 -0
- package/dist/install-packages.mjs.map +1 -0
- package/dist/migrate.d.mts +1 -0
- package/dist/migrate.mjs +55 -0
- package/dist/migrate.mjs.map +1 -0
- package/dist/server-DxVTcVoC.mjs +255 -0
- package/dist/server-DxVTcVoC.mjs.map +1 -0
- package/dist/server.d.mts +76 -0
- package/dist/server.d.mts.map +1 -0
- package/dist/server.mjs +4 -0
- package/dist/utils-DFl0ezBT.mjs +44 -0
- package/dist/utils-DFl0ezBT.mjs.map +1 -0
- package/dist/utils.d.mts +9 -0
- package/dist/utils.d.mts.map +1 -0
- package/dist/utils.mjs +2 -0
- package/package.json +53 -39
- package/test/metrics.test.ts +202 -0
- package/tsconfig.json +11 -9
- package/tsdown.config.ts +16 -0
- package/vitest.config.ts +11 -0
- package/Dockerfile +0 -86
- package/dist/src/clients/redis.d.ts +0 -5
- package/dist/src/clients/redis.d.ts.map +0 -1
- package/dist/src/clients/redis.js +0 -48
- package/dist/src/clients/redis.js.map +0 -1
- package/dist/src/config.d.ts +0 -12
- package/dist/src/config.d.ts.map +0 -1
- package/dist/src/config.js +0 -33
- package/dist/src/config.js.map +0 -1
- package/dist/src/connect-crypto.d.ts +0 -41
- package/dist/src/connect-crypto.d.ts.map +0 -1
- package/dist/src/connect-crypto.js +0 -127
- package/dist/src/connect-crypto.js.map +0 -1
- package/dist/src/feature-flags.d.ts +0 -2
- package/dist/src/feature-flags.d.ts.map +0 -1
- package/dist/src/feature-flags.js +0 -9
- package/dist/src/feature-flags.js.map +0 -1
- package/dist/src/index.d.ts +0 -3
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -21
- package/dist/src/index.js.map +0 -1
- package/dist/src/install-packages.d.ts +0 -2
- package/dist/src/install-packages.d.ts.map +0 -1
- package/dist/src/install-packages.js +0 -36
- package/dist/src/install-packages.js.map +0 -1
- package/dist/src/migrate.d.ts +0 -3
- package/dist/src/migrate.d.ts.map +0 -1
- package/dist/src/migrate.js +0 -65
- package/dist/src/migrate.js.map +0 -1
- package/dist/src/profiler.d.ts +0 -4
- package/dist/src/profiler.d.ts.map +0 -1
- package/dist/src/profiler.js +0 -17
- package/dist/src/profiler.js.map +0 -1
- package/dist/src/server.d.ts +0 -6
- package/dist/src/server.d.ts.map +0 -1
- package/dist/src/server.js +0 -304
- package/dist/src/server.js.map +0 -1
- package/dist/src/types.d.ts +0 -64
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/types.js +0 -2
- package/dist/src/types.js.map +0 -1
- package/dist/src/utils.d.ts +0 -6
- package/dist/src/utils.d.ts.map +0 -1
- package/dist/src/utils.js +0 -92
- package/dist/src/utils.js.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/entrypoint.sh +0 -17
package/README.md
CHANGED
|
@@ -50,6 +50,7 @@ docker compose -f packages/reactor/docker-compose.yml up -d
|
|
|
50
50
|
```
|
|
51
51
|
|
|
52
52
|
This starts:
|
|
53
|
+
|
|
53
54
|
- PostgreSQL on port `5433` (mapped from container port 5432)
|
|
54
55
|
- Adminer (database UI) on port `8080`
|
|
55
56
|
|
|
@@ -94,22 +95,21 @@ pnpm add -g @powerhousedao/switchboard
|
|
|
94
95
|
|
|
95
96
|
## 🏃♂️ Quick Start
|
|
96
97
|
|
|
97
|
-
|
|
98
98
|
## ⚙️ Configuration
|
|
99
99
|
|
|
100
100
|
### Environment Variables
|
|
101
101
|
|
|
102
|
-
| Variable
|
|
103
|
-
|
|
|
104
|
-
| `PORT`
|
|
105
|
-
| `DATABASE_URL`
|
|
106
|
-
| `PH_REACTOR_DATABASE_URL`
|
|
107
|
-
| `REDIS_URL`
|
|
108
|
-
| `REDIS_TLS_URL`
|
|
109
|
-
| `SENTRY_DSN`
|
|
110
|
-
| `SENTRY_ENV`
|
|
111
|
-
| `PYROSCOPE_SERVER_ADDRESS`
|
|
112
|
-
| `FEATURE_REACTORV2_ENABLED`
|
|
102
|
+
| Variable | Description | Default |
|
|
103
|
+
| --------------------------- | ---------------------------------- | --------------------- |
|
|
104
|
+
| `PORT` | Server port | `4001` |
|
|
105
|
+
| `DATABASE_URL` | Database connection string | `./.ph/drive-storage` |
|
|
106
|
+
| `PH_REACTOR_DATABASE_URL` | PostgreSQL URL (takes precedence) | - |
|
|
107
|
+
| `REDIS_URL` | Redis connection URL | - |
|
|
108
|
+
| `REDIS_TLS_URL` | Redis TLS connection URL | - |
|
|
109
|
+
| `SENTRY_DSN` | Sentry DSN for error tracking | - |
|
|
110
|
+
| `SENTRY_ENV` | Sentry environment | - |
|
|
111
|
+
| `PYROSCOPE_SERVER_ADDRESS` | Pyroscope server address | - |
|
|
112
|
+
| `FEATURE_REACTORV2_ENABLED` | Enable Reactor v2 subgraph feature | `false` |
|
|
113
113
|
|
|
114
114
|
### Authentication Configuration
|
|
115
115
|
|
|
@@ -245,6 +245,7 @@ ph switchboard --db-path postgresql://user:pass@localhost:5432/db --migrate-stat
|
|
|
245
245
|
#### Environment Variables for Migrations
|
|
246
246
|
|
|
247
247
|
The migration commands check for a PostgreSQL URL in this order:
|
|
248
|
+
|
|
248
249
|
1. `PH_REACTOR_DATABASE_URL`
|
|
249
250
|
2. `DATABASE_URL`
|
|
250
251
|
3. Config file (`powerhouse.config.json` -> `switchboard.database.url`)
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { t as startSwitchboard } from "./server-DxVTcVoC.mjs";
|
|
3
|
+
import "./utils-DFl0ezBT.mjs";
|
|
4
|
+
import * as Sentry from "@sentry/node";
|
|
5
|
+
import { childLogger } from "document-model";
|
|
6
|
+
import dotenv from "dotenv";
|
|
7
|
+
import { getConfig } from "@powerhousedao/config/node";
|
|
8
|
+
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-http";
|
|
9
|
+
import { Resource } from "@opentelemetry/resources";
|
|
10
|
+
import { MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
|
|
11
|
+
//#region src/config.ts
|
|
12
|
+
dotenv.config();
|
|
13
|
+
const { switchboard } = getConfig();
|
|
14
|
+
const config = {
|
|
15
|
+
database: { url: process.env.PH_SWITCHBOARD_DATABASE_URL ?? switchboard?.database?.url ?? "dev.db" },
|
|
16
|
+
port: process.env.PH_SWITCHBOARD_PORT && !isNaN(Number(process.env.PH_SWITCHBOARD_PORT)) ? Number(process.env.PH_SWITCHBOARD_PORT) : switchboard?.port ?? 4001,
|
|
17
|
+
mcp: true,
|
|
18
|
+
drive: {
|
|
19
|
+
id: "powerhouse",
|
|
20
|
+
slug: "powerhouse",
|
|
21
|
+
global: {
|
|
22
|
+
name: "Powerhouse",
|
|
23
|
+
icon: "https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7"
|
|
24
|
+
},
|
|
25
|
+
local: {
|
|
26
|
+
availableOffline: true,
|
|
27
|
+
listeners: [],
|
|
28
|
+
sharingType: "public",
|
|
29
|
+
triggers: []
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
//#endregion
|
|
34
|
+
//#region src/metrics.ts
|
|
35
|
+
const logger$1 = childLogger(["switchboard", "metrics"]);
|
|
36
|
+
function createMeterProviderFromEnv(env) {
|
|
37
|
+
const endpoint = env.OTEL_EXPORTER_OTLP_ENDPOINT;
|
|
38
|
+
if (!endpoint) return void 0;
|
|
39
|
+
const parsed = parseInt(env.OTEL_METRIC_EXPORT_INTERVAL ?? "", 10);
|
|
40
|
+
const exportIntervalMillis = Number.isFinite(parsed) && parsed > 0 ? parsed : 5e3;
|
|
41
|
+
const base = endpoint.replace(/\/$/, "");
|
|
42
|
+
const exporterUrl = base.endsWith("/v1/metrics") ? base : `${base}/v1/metrics`;
|
|
43
|
+
logger$1.info(`Initializing OpenTelemetry metrics exporter at: ${endpoint}`);
|
|
44
|
+
const meterProvider = new MeterProvider({
|
|
45
|
+
resource: new Resource({ "service.name": env.OTEL_SERVICE_NAME ?? "switchboard" }),
|
|
46
|
+
readers: [new PeriodicExportingMetricReader({
|
|
47
|
+
exporter: new OTLPMetricExporter({ url: exporterUrl }),
|
|
48
|
+
exportIntervalMillis,
|
|
49
|
+
exportTimeoutMillis: Math.max(exportIntervalMillis - 250, 1)
|
|
50
|
+
})]
|
|
51
|
+
});
|
|
52
|
+
logger$1.info(`Metrics export enabled (interval: ${exportIntervalMillis}ms)`);
|
|
53
|
+
return meterProvider;
|
|
54
|
+
}
|
|
55
|
+
//#endregion
|
|
56
|
+
//#region src/profiler.ts
|
|
57
|
+
async function initProfilerFromEnv(env) {
|
|
58
|
+
const { PYROSCOPE_SERVER_ADDRESS: serverAddress, PYROSCOPE_APPLICATION_NAME: appName, PYROSCOPE_USER: basicAuthUser, PYROSCOPE_PASSWORD: basicAuthPassword, PYROSCOPE_WALL_ENABLED: wallEnabled, PYROSCOPE_HEAP_ENABLED: heapEnabled } = env;
|
|
59
|
+
return initProfiler({
|
|
60
|
+
serverAddress,
|
|
61
|
+
appName,
|
|
62
|
+
basicAuthUser,
|
|
63
|
+
basicAuthPassword,
|
|
64
|
+
wall: {
|
|
65
|
+
samplingDurationMs: 1e4,
|
|
66
|
+
samplingIntervalMicros: 1e4,
|
|
67
|
+
collectCpuTime: true
|
|
68
|
+
},
|
|
69
|
+
heap: {
|
|
70
|
+
samplingIntervalBytes: 512 * 1024,
|
|
71
|
+
stackDepth: 64
|
|
72
|
+
}
|
|
73
|
+
}, {
|
|
74
|
+
wallEnabled: wallEnabled !== "false",
|
|
75
|
+
heapEnabled: heapEnabled === "true"
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
async function initProfiler(options, flags = {
|
|
79
|
+
wallEnabled: true,
|
|
80
|
+
heapEnabled: false
|
|
81
|
+
}) {
|
|
82
|
+
console.log("Initializing Pyroscope profiler at:", options?.serverAddress);
|
|
83
|
+
console.log(" Wall profiling:", flags.wallEnabled ? "enabled" : "disabled");
|
|
84
|
+
console.log(" Heap profiling:", flags.heapEnabled ? "enabled" : "disabled");
|
|
85
|
+
const { default: Pyroscope } = await import("@pyroscope/nodejs");
|
|
86
|
+
Pyroscope.init(options);
|
|
87
|
+
if (flags.wallEnabled) Pyroscope.startWallProfiling();
|
|
88
|
+
Pyroscope.startCpuProfiling();
|
|
89
|
+
if (flags.heapEnabled) Pyroscope.startHeapProfiling();
|
|
90
|
+
}
|
|
91
|
+
//#endregion
|
|
92
|
+
//#region src/index.mts
|
|
93
|
+
const logger = childLogger(["switchboard"]);
|
|
94
|
+
function ensureNodeVersion(minVersion = "24") {
|
|
95
|
+
const version = process.versions.node;
|
|
96
|
+
if (!version) return;
|
|
97
|
+
if (version < minVersion) {
|
|
98
|
+
console.error(`Node version ${minVersion} or higher is required. Current version: ${version}`);
|
|
99
|
+
process.exit(1);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
ensureNodeVersion("24");
|
|
103
|
+
process.setMaxListeners(0);
|
|
104
|
+
const meterProvider = createMeterProviderFromEnv({
|
|
105
|
+
OTEL_EXPORTER_OTLP_ENDPOINT: process.env.OTEL_EXPORTER_OTLP_ENDPOINT,
|
|
106
|
+
OTEL_METRIC_EXPORT_INTERVAL: process.env.OTEL_METRIC_EXPORT_INTERVAL,
|
|
107
|
+
OTEL_SERVICE_NAME: process.env.OTEL_SERVICE_NAME
|
|
108
|
+
});
|
|
109
|
+
async function shutdown() {
|
|
110
|
+
console.log("\nShutting down...");
|
|
111
|
+
await Promise.race([meterProvider?.shutdown().catch(() => void 0), new Promise((resolve) => setTimeout(resolve, 5e3))]);
|
|
112
|
+
process.exit(0);
|
|
113
|
+
}
|
|
114
|
+
process.on("SIGINT", shutdown);
|
|
115
|
+
process.on("SIGTERM", shutdown);
|
|
116
|
+
if (process.env.PYROSCOPE_SERVER_ADDRESS) try {
|
|
117
|
+
await initProfilerFromEnv(process.env);
|
|
118
|
+
} catch (e) {
|
|
119
|
+
Sentry.captureException(e);
|
|
120
|
+
logger.error("Error starting profiler: @error", e);
|
|
121
|
+
}
|
|
122
|
+
startSwitchboard({
|
|
123
|
+
...config,
|
|
124
|
+
meterProvider
|
|
125
|
+
}).catch(console.error);
|
|
126
|
+
//#endregion
|
|
127
|
+
export {};
|
|
128
|
+
|
|
129
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["logger"],"sources":["../src/config.ts","../src/metrics.ts","../src/profiler.ts","../src/index.mts"],"sourcesContent":["import dotenv from \"dotenv\";\ndotenv.config();\n\nimport { getConfig } from \"@powerhousedao/config/node\";\nimport type { DriveInput } from \"@powerhousedao/shared/document-drive\";\nconst phConfig = getConfig();\nconst { switchboard } = phConfig;\ninterface Config {\n database: {\n url: string;\n };\n port: number;\n mcp: boolean;\n drive: DriveInput;\n}\nexport const config: Config = {\n database: {\n // url: process.env.PH_SWITCHBOARD_DATABASE_URL ?? switchboard?.database?.url ?? \"dev.db\",\n url:\n process.env.PH_SWITCHBOARD_DATABASE_URL ??\n switchboard?.database?.url ??\n \"dev.db\",\n },\n port:\n process.env.PH_SWITCHBOARD_PORT &&\n !isNaN(Number(process.env.PH_SWITCHBOARD_PORT))\n ? Number(process.env.PH_SWITCHBOARD_PORT)\n : (switchboard?.port ?? 4001),\n mcp: true,\n drive: {\n id: \"powerhouse\",\n slug: \"powerhouse\",\n global: {\n name: \"Powerhouse\",\n icon: \"https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7\",\n },\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n};\n","import { OTLPMetricExporter } from \"@opentelemetry/exporter-metrics-otlp-http\";\nimport { Resource } from \"@opentelemetry/resources\";\nimport {\n MeterProvider,\n PeriodicExportingMetricReader,\n} from \"@opentelemetry/sdk-metrics\";\nimport { childLogger } from \"document-model\";\n\nconst logger = childLogger([\"switchboard\", \"metrics\"]);\n\nexport function createMeterProviderFromEnv(env: {\n OTEL_EXPORTER_OTLP_ENDPOINT?: string;\n OTEL_METRIC_EXPORT_INTERVAL?: string;\n OTEL_SERVICE_NAME?: string;\n}): MeterProvider | undefined {\n const endpoint = env.OTEL_EXPORTER_OTLP_ENDPOINT;\n if (!endpoint) return undefined;\n\n const parsed = parseInt(env.OTEL_METRIC_EXPORT_INTERVAL ?? \"\", 10);\n const exportIntervalMillis =\n Number.isFinite(parsed) && parsed > 0 ? parsed : 5_000;\n\n const base = endpoint.replace(/\\/$/, \"\");\n const exporterUrl = base.endsWith(\"/v1/metrics\")\n ? base\n : `${base}/v1/metrics`;\n\n logger.info(`Initializing OpenTelemetry metrics exporter at: ${endpoint}`);\n const meterProvider = new MeterProvider({\n resource: new Resource({\n \"service.name\": env.OTEL_SERVICE_NAME ?? \"switchboard\",\n }),\n readers: [\n new PeriodicExportingMetricReader({\n exporter: new OTLPMetricExporter({\n url: exporterUrl,\n }),\n exportIntervalMillis,\n exportTimeoutMillis: Math.max(exportIntervalMillis - 250, 1),\n }),\n ],\n });\n logger.info(`Metrics export enabled (interval: ${exportIntervalMillis}ms)`);\n return meterProvider;\n}\n","import type { PyroscopeConfig } from \"@pyroscope/nodejs\";\n\nexport async function initProfilerFromEnv(env: typeof process.env) {\n const {\n PYROSCOPE_SERVER_ADDRESS: serverAddress,\n PYROSCOPE_APPLICATION_NAME: appName,\n PYROSCOPE_USER: basicAuthUser,\n PYROSCOPE_PASSWORD: basicAuthPassword,\n PYROSCOPE_WALL_ENABLED: wallEnabled,\n PYROSCOPE_HEAP_ENABLED: heapEnabled,\n } = env;\n\n const options: PyroscopeConfig = {\n serverAddress,\n appName,\n basicAuthUser,\n basicAuthPassword,\n // Wall profiling captures wall-clock time (includes async I/O waits)\n // This shows GraphQL resolvers even when waiting for database\n wall: {\n samplingDurationMs: 10000, // 10 second sampling windows\n samplingIntervalMicros: 10000, // 10ms sampling interval (100 samples/sec)\n collectCpuTime: true, // Also collect CPU time alongside wall time\n },\n // Heap profiling for memory allocation tracking\n heap: {\n samplingIntervalBytes: 512 * 1024, // Sample every 512KB allocated\n stackDepth: 64, // Capture deeper stacks for better context\n },\n };\n return initProfiler(options, {\n wallEnabled: wallEnabled !== \"false\",\n heapEnabled: heapEnabled === \"true\",\n });\n}\n\ninterface ProfilerFlags {\n wallEnabled?: boolean;\n heapEnabled?: boolean;\n}\n\nexport async function initProfiler(\n options?: PyroscopeConfig,\n flags: ProfilerFlags = { wallEnabled: true, heapEnabled: false },\n) {\n console.log(\"Initializing Pyroscope profiler at:\", options?.serverAddress);\n console.log(\" Wall profiling:\", flags.wallEnabled ? \"enabled\" : \"disabled\");\n console.log(\" Heap profiling:\", flags.heapEnabled ? \"enabled\" : \"disabled\");\n\n const { default: Pyroscope } = await import(\"@pyroscope/nodejs\");\n Pyroscope.init(options);\n\n // Start wall profiling (captures async I/O time - shows resolvers)\n if (flags.wallEnabled) {\n Pyroscope.startWallProfiling();\n }\n\n // Start CPU profiling (captures CPU-bound work)\n Pyroscope.startCpuProfiling();\n\n // Optionally start heap profiling (memory allocations)\n if (flags.heapEnabled) {\n Pyroscope.startHeapProfiling();\n }\n}\n","#!/usr/bin/env node\nimport * as Sentry from \"@sentry/node\";\nimport { childLogger } from \"document-model\";\nimport { config } from \"./config.js\";\nimport { createMeterProviderFromEnv } from \"./metrics.js\";\nimport { initProfilerFromEnv } from \"./profiler.js\";\nimport { startSwitchboard } from \"./server.mjs\";\n\nconst logger = childLogger([\"switchboard\"]);\n\nfunction ensureNodeVersion(minVersion = \"24\") {\n const version = process.versions.node;\n if (!version) {\n return;\n }\n\n if (version < minVersion) {\n console.error(\n `Node version ${minVersion} or higher is required. Current version: ${version}`,\n );\n process.exit(1);\n }\n}\n// Ensure minimum Node.js version\nensureNodeVersion(\"24\");\n\n// Each subgraph registers its own SIGINT/SIGTERM listeners, and the count\n// scales with dynamically-loaded document models beyond the default cap of 10.\nprocess.setMaxListeners(0);\n\nconst meterProvider = createMeterProviderFromEnv({\n OTEL_EXPORTER_OTLP_ENDPOINT: process.env.OTEL_EXPORTER_OTLP_ENDPOINT,\n OTEL_METRIC_EXPORT_INTERVAL: process.env.OTEL_METRIC_EXPORT_INTERVAL,\n OTEL_SERVICE_NAME: process.env.OTEL_SERVICE_NAME,\n});\n\nasync function shutdown() {\n console.log(\"\\nShutting down...\");\n // Flush final metrics before exit. Races against a 5s deadline so an\n // unresponsive OTLP endpoint cannot exhaust terminationGracePeriodSeconds.\n await Promise.race([\n meterProvider?.shutdown().catch(() => undefined),\n new Promise<void>((resolve) => setTimeout(resolve, 5_000)),\n ]);\n process.exit(0);\n}\n\n// SIGINT: Ctrl-C in development; SIGTERM: graceful shutdown in Docker/Kubernetes\nprocess.on(\"SIGINT\", shutdown);\nprocess.on(\"SIGTERM\", shutdown);\n\nif (process.env.PYROSCOPE_SERVER_ADDRESS) {\n try {\n await initProfilerFromEnv(process.env);\n } catch (e) {\n Sentry.captureException(e);\n logger.error(\"Error starting profiler: @error\", e);\n }\n}\n\nstartSwitchboard({ ...config, meterProvider }).catch(console.error);\n"],"mappings":";;;;;;;;;;;AACA,OAAO,QAAQ;AAKf,MAAM,EAAE,gBADS,WAAW;AAU5B,MAAa,SAAiB;CAC5B,UAAU,EAER,KACE,QAAQ,IAAI,+BACZ,aAAa,UAAU,OACvB,UACH;CACD,MACE,QAAQ,IAAI,uBACZ,CAAC,MAAM,OAAO,QAAQ,IAAI,oBAAoB,CAAC,GAC3C,OAAO,QAAQ,IAAI,oBAAoB,GACtC,aAAa,QAAQ;CAC5B,KAAK;CACL,OAAO;EACL,IAAI;EACJ,MAAM;EACN,QAAQ;GACN,MAAM;GACN,MAAM;GACP;EACD,OAAO;GACL,kBAAkB;GAClB,WAAW,EAAE;GACb,aAAa;GACb,UAAU,EAAE;GACb;EACF;CACF;;;ACnCD,MAAMA,WAAS,YAAY,CAAC,eAAe,UAAU,CAAC;AAEtD,SAAgB,2BAA2B,KAIb;CAC5B,MAAM,WAAW,IAAI;AACrB,KAAI,CAAC,SAAU,QAAO,KAAA;CAEtB,MAAM,SAAS,SAAS,IAAI,+BAA+B,IAAI,GAAG;CAClE,MAAM,uBACJ,OAAO,SAAS,OAAO,IAAI,SAAS,IAAI,SAAS;CAEnD,MAAM,OAAO,SAAS,QAAQ,OAAO,GAAG;CACxC,MAAM,cAAc,KAAK,SAAS,cAAc,GAC5C,OACA,GAAG,KAAK;AAEZ,UAAO,KAAK,mDAAmD,WAAW;CAC1E,MAAM,gBAAgB,IAAI,cAAc;EACtC,UAAU,IAAI,SAAS,EACrB,gBAAgB,IAAI,qBAAqB,eAC1C,CAAC;EACF,SAAS,CACP,IAAI,8BAA8B;GAChC,UAAU,IAAI,mBAAmB,EAC/B,KAAK,aACN,CAAC;GACF;GACA,qBAAqB,KAAK,IAAI,uBAAuB,KAAK,EAAE;GAC7D,CAAC,CACH;EACF,CAAC;AACF,UAAO,KAAK,qCAAqC,qBAAqB,KAAK;AAC3E,QAAO;;;;ACzCT,eAAsB,oBAAoB,KAAyB;CACjE,MAAM,EACJ,0BAA0B,eAC1B,4BAA4B,SAC5B,gBAAgB,eAChB,oBAAoB,mBACpB,wBAAwB,aACxB,wBAAwB,gBACtB;AAoBJ,QAAO,aAlB0B;EAC/B;EACA;EACA;EACA;EAGA,MAAM;GACJ,oBAAoB;GACpB,wBAAwB;GACxB,gBAAgB;GACjB;EAED,MAAM;GACJ,uBAAuB,MAAM;GAC7B,YAAY;GACb;EACF,EAC4B;EAC3B,aAAa,gBAAgB;EAC7B,aAAa,gBAAgB;EAC9B,CAAC;;AAQJ,eAAsB,aACpB,SACA,QAAuB;CAAE,aAAa;CAAM,aAAa;CAAO,EAChE;AACA,SAAQ,IAAI,uCAAuC,SAAS,cAAc;AAC1E,SAAQ,IAAI,qBAAqB,MAAM,cAAc,YAAY,WAAW;AAC5E,SAAQ,IAAI,qBAAqB,MAAM,cAAc,YAAY,WAAW;CAE5E,MAAM,EAAE,SAAS,cAAc,MAAM,OAAO;AAC5C,WAAU,KAAK,QAAQ;AAGvB,KAAI,MAAM,YACR,WAAU,oBAAoB;AAIhC,WAAU,mBAAmB;AAG7B,KAAI,MAAM,YACR,WAAU,oBAAoB;;;;ACtDlC,MAAM,SAAS,YAAY,CAAC,cAAc,CAAC;AAE3C,SAAS,kBAAkB,aAAa,MAAM;CAC5C,MAAM,UAAU,QAAQ,SAAS;AACjC,KAAI,CAAC,QACH;AAGF,KAAI,UAAU,YAAY;AACxB,UAAQ,MACN,gBAAgB,WAAW,2CAA2C,UACvE;AACD,UAAQ,KAAK,EAAE;;;AAInB,kBAAkB,KAAK;AAIvB,QAAQ,gBAAgB,EAAE;AAE1B,MAAM,gBAAgB,2BAA2B;CAC/C,6BAA6B,QAAQ,IAAI;CACzC,6BAA6B,QAAQ,IAAI;CACzC,mBAAmB,QAAQ,IAAI;CAChC,CAAC;AAEF,eAAe,WAAW;AACxB,SAAQ,IAAI,qBAAqB;AAGjC,OAAM,QAAQ,KAAK,CACjB,eAAe,UAAU,CAAC,YAAY,KAAA,EAAU,EAChD,IAAI,SAAe,YAAY,WAAW,SAAS,IAAM,CAAC,CAC3D,CAAC;AACF,SAAQ,KAAK,EAAE;;AAIjB,QAAQ,GAAG,UAAU,SAAS;AAC9B,QAAQ,GAAG,WAAW,SAAS;AAE/B,IAAI,QAAQ,IAAI,yBACd,KAAI;AACF,OAAM,oBAAoB,QAAQ,IAAI;SAC/B,GAAG;AACV,QAAO,iBAAiB,EAAE;AAC1B,QAAO,MAAM,mCAAmC,EAAE;;AAItD,iBAAiB;CAAE,GAAG;CAAQ;CAAe,CAAC,CAAC,MAAM,QAAQ,MAAM"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { };
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import path from "path";
|
|
2
|
+
import { execSync } from "child_process";
|
|
3
|
+
import fs from "fs";
|
|
4
|
+
//#region src/install-packages.mts
|
|
5
|
+
const pkgs = process.env.PH_PACKAGES?.split(",") || [];
|
|
6
|
+
if (pkgs.length === 0 || pkgs.length === 1 && pkgs[0] === "") process.exit(0);
|
|
7
|
+
try {
|
|
8
|
+
const packageJsonPath = path.join(process.cwd(), "package.json");
|
|
9
|
+
const packageJsonContent = fs.readFileSync(packageJsonPath, "utf-8");
|
|
10
|
+
const packageJson = JSON.parse(packageJsonContent);
|
|
11
|
+
const installedDependencies = {
|
|
12
|
+
...packageJson.dependencies || {},
|
|
13
|
+
...packageJson.devDependencies || {}
|
|
14
|
+
};
|
|
15
|
+
for (const pkg of pkgs) {
|
|
16
|
+
if (pkg === "") continue;
|
|
17
|
+
if (installedDependencies[pkg]) {
|
|
18
|
+
console.log(`> Package ${pkg} is already installed, skipping`);
|
|
19
|
+
continue;
|
|
20
|
+
}
|
|
21
|
+
console.log(`> Installing ${pkg}`);
|
|
22
|
+
execSync(`pnpm add ${pkg}@latest`, { stdio: "inherit" });
|
|
23
|
+
}
|
|
24
|
+
} catch (error) {
|
|
25
|
+
console.error("Error in package installation:", error);
|
|
26
|
+
process.exit(1);
|
|
27
|
+
}
|
|
28
|
+
//#endregion
|
|
29
|
+
export {};
|
|
30
|
+
|
|
31
|
+
//# sourceMappingURL=install-packages.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"install-packages.mjs","names":[],"sources":["../src/install-packages.mts"],"sourcesContent":["import { execSync } from \"child_process\";\nimport fs from \"fs\";\nimport path from \"path\";\n\n// Define interface for package.json\ninterface PackageJson {\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n}\n\n// Get the list of packages to install from the environment variable\nconst pkgs = process.env.PH_PACKAGES?.split(\",\") || [];\n\n// Skip if no packages to install\nif (pkgs.length === 0 || (pkgs.length === 1 && pkgs[0] === \"\")) {\n process.exit(0);\n}\n\ntry {\n // Read the package.json file to check existing dependencies\n const packageJsonPath = path.join(process.cwd(), \"package.json\");\n const packageJsonContent = fs.readFileSync(packageJsonPath, \"utf-8\");\n const packageJson = JSON.parse(packageJsonContent) as PackageJson;\n\n // Get all installed dependencies\n const installedDependencies: Record<string, string> = {\n ...(packageJson.dependencies || {}),\n ...(packageJson.devDependencies || {}),\n };\n\n for (const pkg of pkgs) {\n if (pkg === \"\") continue;\n\n // Check if the package is already installed\n if (installedDependencies[pkg]) {\n console.log(`> Package ${pkg} is already installed, skipping`);\n continue;\n }\n\n console.log(`> Installing ${pkg}`);\n execSync(`pnpm add ${pkg}@latest`, { stdio: \"inherit\" });\n }\n} catch (error) {\n console.error(\"Error in package installation:\", error);\n process.exit(1);\n}\n"],"mappings":";;;;AAWA,MAAM,OAAO,QAAQ,IAAI,aAAa,MAAM,IAAI,IAAI,EAAE;AAGtD,IAAI,KAAK,WAAW,KAAM,KAAK,WAAW,KAAK,KAAK,OAAO,GACzD,SAAQ,KAAK,EAAE;AAGjB,IAAI;CAEF,MAAM,kBAAkB,KAAK,KAAK,QAAQ,KAAK,EAAE,eAAe;CAChE,MAAM,qBAAqB,GAAG,aAAa,iBAAiB,QAAQ;CACpE,MAAM,cAAc,KAAK,MAAM,mBAAmB;CAGlD,MAAM,wBAAgD;EACpD,GAAI,YAAY,gBAAgB,EAAE;EAClC,GAAI,YAAY,mBAAmB,EAAE;EACtC;AAED,MAAK,MAAM,OAAO,MAAM;AACtB,MAAI,QAAQ,GAAI;AAGhB,MAAI,sBAAsB,MAAM;AAC9B,WAAQ,IAAI,aAAa,IAAI,iCAAiC;AAC9D;;AAGF,UAAQ,IAAI,gBAAgB,MAAM;AAClC,WAAS,YAAY,IAAI,UAAU,EAAE,OAAO,WAAW,CAAC;;SAEnD,OAAO;AACd,SAAQ,MAAM,kCAAkC,MAAM;AACtD,SAAQ,KAAK,EAAE"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { };
|
package/dist/migrate.mjs
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { getConfig } from "@powerhousedao/config/node";
|
|
3
|
+
import { REACTOR_SCHEMA, getMigrationStatus, runMigrations } from "@powerhousedao/reactor";
|
|
4
|
+
import { Kysely, PostgresDialect } from "kysely";
|
|
5
|
+
import { Pool } from "pg";
|
|
6
|
+
//#region src/migrate.mts
|
|
7
|
+
function isPostgresUrl(url) {
|
|
8
|
+
return url.startsWith("postgresql://") || url.startsWith("postgres://");
|
|
9
|
+
}
|
|
10
|
+
async function main() {
|
|
11
|
+
const command = process.argv[2];
|
|
12
|
+
const config = getConfig();
|
|
13
|
+
const dbPath = process.env.PH_REACTOR_DATABASE_URL ?? process.env.DATABASE_URL ?? config.switchboard?.database?.url;
|
|
14
|
+
if (!dbPath || !isPostgresUrl(dbPath)) {
|
|
15
|
+
console.log("No PostgreSQL URL configured. Skipping migrations.");
|
|
16
|
+
console.log("(PGlite migrations are handled automatically on startup)");
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
console.log(`Database: ${dbPath}`);
|
|
20
|
+
const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({ connectionString: dbPath }) }) });
|
|
21
|
+
try {
|
|
22
|
+
if (command === "status") {
|
|
23
|
+
console.log("\nChecking migration status...");
|
|
24
|
+
const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);
|
|
25
|
+
console.log("\nMigration Status:");
|
|
26
|
+
console.log("=================");
|
|
27
|
+
for (const migration of migrations) {
|
|
28
|
+
const status = migration.executedAt ? `[OK] Executed at ${migration.executedAt.toISOString()}` : "[--] Pending";
|
|
29
|
+
console.log(`${status} - ${migration.name}`);
|
|
30
|
+
}
|
|
31
|
+
} else {
|
|
32
|
+
console.log("\nRunning migrations...");
|
|
33
|
+
const result = await runMigrations(db, REACTOR_SCHEMA);
|
|
34
|
+
if (!result.success) {
|
|
35
|
+
console.error("Migration failed:", result.error?.message);
|
|
36
|
+
process.exit(1);
|
|
37
|
+
}
|
|
38
|
+
if (result.migrationsExecuted.length === 0) console.log("No migrations to run - database is up to date");
|
|
39
|
+
else {
|
|
40
|
+
console.log(`Successfully executed ${result.migrationsExecuted.length} migration(s):`);
|
|
41
|
+
for (const name of result.migrationsExecuted) console.log(` - ${name}`);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
} catch (error) {
|
|
45
|
+
console.error("Error:", error instanceof Error ? error.message : String(error));
|
|
46
|
+
process.exit(1);
|
|
47
|
+
} finally {
|
|
48
|
+
await db.destroy();
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
main();
|
|
52
|
+
//#endregion
|
|
53
|
+
export {};
|
|
54
|
+
|
|
55
|
+
//# sourceMappingURL=migrate.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrate.mjs","names":[],"sources":["../src/migrate.mts"],"sourcesContent":["#!/usr/bin/env node\nimport { Kysely, PostgresDialect } from \"kysely\";\nimport { Pool } from \"pg\";\nimport {\n runMigrations,\n getMigrationStatus,\n REACTOR_SCHEMA,\n} from \"@powerhousedao/reactor\";\nimport { getConfig } from \"@powerhousedao/config/node\";\n\nfunction isPostgresUrl(url: string): boolean {\n return url.startsWith(\"postgresql://\") || url.startsWith(\"postgres://\");\n}\n\nasync function main() {\n const command = process.argv[2];\n const config = getConfig();\n\n const dbPath =\n process.env.PH_REACTOR_DATABASE_URL ??\n process.env.DATABASE_URL ??\n config.switchboard?.database?.url;\n\n if (!dbPath || !isPostgresUrl(dbPath)) {\n console.log(\"No PostgreSQL URL configured. Skipping migrations.\");\n console.log(\"(PGlite migrations are handled automatically on startup)\");\n return;\n }\n\n console.log(`Database: ${dbPath}`);\n\n const pool = new Pool({ connectionString: dbPath });\n\n const db = new Kysely<any>({\n dialect: new PostgresDialect({ pool }),\n });\n\n try {\n if (command === \"status\") {\n console.log(\"\\nChecking migration status...\");\n const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);\n\n console.log(\"\\nMigration Status:\");\n console.log(\"=================\");\n\n for (const migration of migrations) {\n const status = migration.executedAt\n ? `[OK] Executed at ${migration.executedAt.toISOString()}`\n : \"[--] Pending\";\n console.log(`${status} - ${migration.name}`);\n }\n } else {\n console.log(\"\\nRunning migrations...\");\n const result = await runMigrations(db, REACTOR_SCHEMA);\n\n if (!result.success) {\n console.error(\"Migration failed:\", result.error?.message);\n process.exit(1);\n }\n\n if (result.migrationsExecuted.length === 0) {\n console.log(\"No migrations to run - database is up to date\");\n } else {\n console.log(\n `Successfully executed ${result.migrationsExecuted.length} migration(s):`,\n );\n for (const name of result.migrationsExecuted) {\n console.log(` - ${name}`);\n }\n }\n }\n } catch (error) {\n console.error(\n \"Error:\",\n error instanceof Error ? error.message : String(error),\n );\n process.exit(1);\n } finally {\n await db.destroy();\n }\n}\n\nvoid main();\n"],"mappings":";;;;;;AAUA,SAAS,cAAc,KAAsB;AAC3C,QAAO,IAAI,WAAW,gBAAgB,IAAI,IAAI,WAAW,cAAc;;AAGzE,eAAe,OAAO;CACpB,MAAM,UAAU,QAAQ,KAAK;CAC7B,MAAM,SAAS,WAAW;CAE1B,MAAM,SACJ,QAAQ,IAAI,2BACZ,QAAQ,IAAI,gBACZ,OAAO,aAAa,UAAU;AAEhC,KAAI,CAAC,UAAU,CAAC,cAAc,OAAO,EAAE;AACrC,UAAQ,IAAI,qDAAqD;AACjE,UAAQ,IAAI,2DAA2D;AACvE;;AAGF,SAAQ,IAAI,aAAa,SAAS;CAIlC,MAAM,KAAK,IAAI,OAAY,EACzB,SAAS,IAAI,gBAAgB,EAAE,MAHpB,IAAI,KAAK,EAAE,kBAAkB,QAAQ,CAAC,EAGZ,CAAC,EACvC,CAAC;AAEF,KAAI;AACF,MAAI,YAAY,UAAU;AACxB,WAAQ,IAAI,iCAAiC;GAC7C,MAAM,aAAa,MAAM,mBAAmB,IAAI,eAAe;AAE/D,WAAQ,IAAI,sBAAsB;AAClC,WAAQ,IAAI,oBAAoB;AAEhC,QAAK,MAAM,aAAa,YAAY;IAClC,MAAM,SAAS,UAAU,aACrB,oBAAoB,UAAU,WAAW,aAAa,KACtD;AACJ,YAAQ,IAAI,GAAG,OAAO,KAAK,UAAU,OAAO;;SAEzC;AACL,WAAQ,IAAI,0BAA0B;GACtC,MAAM,SAAS,MAAM,cAAc,IAAI,eAAe;AAEtD,OAAI,CAAC,OAAO,SAAS;AACnB,YAAQ,MAAM,qBAAqB,OAAO,OAAO,QAAQ;AACzD,YAAQ,KAAK,EAAE;;AAGjB,OAAI,OAAO,mBAAmB,WAAW,EACvC,SAAQ,IAAI,gDAAgD;QACvD;AACL,YAAQ,IACN,yBAAyB,OAAO,mBAAmB,OAAO,gBAC3D;AACD,SAAK,MAAM,QAAQ,OAAO,mBACxB,SAAQ,IAAI,OAAO,OAAO;;;UAIzB,OAAO;AACd,UAAQ,MACN,UACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;AACD,UAAQ,KAAK,EAAE;WACP;AACR,QAAM,GAAG,SAAS;;;AAIjB,MAAM"}
|
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
import { n as isPostgresUrl, t as addDefaultDrive } from "./utils-DFl0ezBT.mjs";
|
|
2
|
+
import { register } from "node:module";
|
|
3
|
+
import * as Sentry from "@sentry/node";
|
|
4
|
+
import { childLogger, documentModelDocumentModelModule, setLogLevel } from "document-model";
|
|
5
|
+
import dotenv from "dotenv";
|
|
6
|
+
import { getConfig } from "@powerhousedao/config/node";
|
|
7
|
+
import { PGlite } from "@electric-sql/pglite";
|
|
8
|
+
import { metrics } from "@opentelemetry/api";
|
|
9
|
+
import { ReactorInstrumentation } from "@powerhousedao/opentelemetry-instrumentation-reactor";
|
|
10
|
+
import { ChannelScheme, EventBus, ReactorBuilder, ReactorClientBuilder, driveCollectionId, parseDriveUrl } from "@powerhousedao/reactor";
|
|
11
|
+
import { HttpPackageLoader, ImportPackageLoader, PackageManagementService, PackagesSubgraph, getUniqueDocumentModels, initializeAndStartAPI } from "@powerhousedao/reactor-api";
|
|
12
|
+
import { httpsHooksPath } from "@powerhousedao/reactor-api/https-hooks";
|
|
13
|
+
import { VitePackageLoader, createViteLogger, startViteServer } from "@powerhousedao/reactor-api/vite";
|
|
14
|
+
import { driveDocumentModelModule } from "@powerhousedao/shared/document-drive";
|
|
15
|
+
import { documentModels } from "@powerhousedao/vetra";
|
|
16
|
+
import { processorFactory } from "@powerhousedao/vetra/processors";
|
|
17
|
+
import { Kysely, PostgresDialect } from "kysely";
|
|
18
|
+
import { PGliteDialect } from "kysely-pglite-dialect";
|
|
19
|
+
import path from "path";
|
|
20
|
+
import { Pool } from "pg";
|
|
21
|
+
import { EnvVarProvider } from "@openfeature/env-var-provider";
|
|
22
|
+
import { OpenFeature } from "@openfeature/server-sdk";
|
|
23
|
+
import { DEFAULT_RENOWN_URL, NodeKeyStorage, RenownBuilder, RenownCryptoBuilder, createSignatureVerifier } from "@renown/sdk/node";
|
|
24
|
+
//#region src/feature-flags.ts
|
|
25
|
+
async function initFeatureFlags() {
|
|
26
|
+
const provider = new EnvVarProvider();
|
|
27
|
+
await OpenFeature.setProviderAndWait(provider);
|
|
28
|
+
return OpenFeature.getClient();
|
|
29
|
+
}
|
|
30
|
+
//#endregion
|
|
31
|
+
//#region src/renown.ts
|
|
32
|
+
const logger = childLogger(["switchboard", "renown"]);
|
|
33
|
+
/**
|
|
34
|
+
* Initialize Renown for the Switchboard instance.
|
|
35
|
+
* This allows Switchboard to authenticate with remote services
|
|
36
|
+
* using the same identity established during `ph login`.
|
|
37
|
+
*/
|
|
38
|
+
async function initRenown(options = {}) {
|
|
39
|
+
const { keypairPath, requireExisting = false, baseUrl = DEFAULT_RENOWN_URL } = options;
|
|
40
|
+
const keyStorage = new NodeKeyStorage(keypairPath, { logger });
|
|
41
|
+
const existingKeyPair = await keyStorage.loadKeyPair();
|
|
42
|
+
if (!existingKeyPair && requireExisting) throw new Error("No existing keypair found and requireExisting is true. Run \"ph login\" to create one.");
|
|
43
|
+
if (!existingKeyPair) logger.info("No existing keypair found. A new one will be generated.");
|
|
44
|
+
const renownCrypto = await new RenownCryptoBuilder().withKeyPairStorage(keyStorage).build();
|
|
45
|
+
const renown = await new RenownBuilder("switchboard", {}).withCrypto(renownCrypto).withBaseUrl(baseUrl).build();
|
|
46
|
+
logger.info("Switchboard identity initialized: @did", renownCrypto.did);
|
|
47
|
+
return renown;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Get the signer config for the given renown instance.
|
|
51
|
+
*
|
|
52
|
+
* @param renown - The renown instance
|
|
53
|
+
* @param requireSignature - If true, unsigned actions are rejected
|
|
54
|
+
*/
|
|
55
|
+
function getRenownSignerConfig(renown, requireSignature) {
|
|
56
|
+
return {
|
|
57
|
+
signer: renown.signer,
|
|
58
|
+
verifier: createSignatureVerifier(requireSignature)
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
//#endregion
|
|
62
|
+
//#region src/server.mts
|
|
63
|
+
const defaultLogger = childLogger(["switchboard"]);
|
|
64
|
+
const LogLevel = process.env.LOG_LEVEL || "info";
|
|
65
|
+
setLogLevel(LogLevel);
|
|
66
|
+
dotenv.config();
|
|
67
|
+
const DOCUMENT_MODEL_SUBGRAPHS_ENABLED = "DOCUMENT_MODEL_SUBGRAPHS_ENABLED";
|
|
68
|
+
const DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT = true;
|
|
69
|
+
const REQUIRE_SIGNATURES = "REQUIRE_SIGNATURES";
|
|
70
|
+
const REQUIRE_SIGNATURES_DEFAULT = false;
|
|
71
|
+
if (process.env.SENTRY_DSN) {
|
|
72
|
+
defaultLogger.info("Initialized Sentry with env: @env", process.env.SENTRY_ENV);
|
|
73
|
+
Sentry.init({
|
|
74
|
+
dsn: process.env.SENTRY_DSN,
|
|
75
|
+
environment: process.env.SENTRY_ENV,
|
|
76
|
+
release: process.env.SENTRY_RELEASE || (process.env.npm_package_version ? `v${process.env.npm_package_version}` : void 0)
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
const DEFAULT_PORT = process.env.PORT ? Number(process.env.PORT) : 4001;
|
|
80
|
+
async function initServer(serverPort, options, renown) {
|
|
81
|
+
if (options.meterProvider) metrics.setGlobalMeterProvider(options.meterProvider);
|
|
82
|
+
const { dev, packages = [], remoteDrives = [], logger = defaultLogger } = options;
|
|
83
|
+
logger.level = LogLevel;
|
|
84
|
+
const readModelPath = (options.dbPath ?? process.env.DATABASE_URL) || ".ph/read-storage";
|
|
85
|
+
const config = getConfig(options.configFile ?? path.join(process.cwd(), "powerhouse.config.json"));
|
|
86
|
+
const registryUrl = process.env.PH_REGISTRY_URL ?? config.packageRegistryUrl;
|
|
87
|
+
const registryPackages = process.env.PH_REGISTRY_PACKAGES;
|
|
88
|
+
const dynamicModelLoading = options.dynamicModelLoading ?? process.env.DYNAMIC_MODEL_LOADING === "true";
|
|
89
|
+
let httpLoader;
|
|
90
|
+
if (registryUrl) {
|
|
91
|
+
register(httpsHooksPath, import.meta.url);
|
|
92
|
+
httpLoader = new HttpPackageLoader({ registryUrl });
|
|
93
|
+
registryPackages?.split(",").forEach((p) => {
|
|
94
|
+
const name = p.trim();
|
|
95
|
+
if (!packages.includes(name)) packages.push(name);
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
const reactorLogger = logger.child(["reactor"]);
|
|
99
|
+
const initializeClient = async (documentModels$1) => {
|
|
100
|
+
const eventBus = new EventBus();
|
|
101
|
+
const builder = new ReactorBuilder().withEventBus(eventBus).withDocumentModels(getUniqueDocumentModels([
|
|
102
|
+
documentModelDocumentModelModule,
|
|
103
|
+
driveDocumentModelModule,
|
|
104
|
+
...documentModels,
|
|
105
|
+
...documentModels$1
|
|
106
|
+
])).withChannelScheme(ChannelScheme.SWITCHBOARD).withSignalHandlers().withLogger(reactorLogger);
|
|
107
|
+
const maxSkipThreshold = parseInt(process.env.MAX_SKIP_THRESHOLD ?? "", 10);
|
|
108
|
+
if (!isNaN(maxSkipThreshold) && maxSkipThreshold > 0) {
|
|
109
|
+
builder.withExecutorConfig({ maxSkipThreshold });
|
|
110
|
+
logger.info(`Reactor maxSkipThreshold set to ${maxSkipThreshold}`);
|
|
111
|
+
}
|
|
112
|
+
const reactorDbUrl = process.env.PH_REACTOR_DATABASE_URL;
|
|
113
|
+
if (reactorDbUrl && isPostgresUrl(reactorDbUrl)) {
|
|
114
|
+
const kysely = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({ connectionString: reactorDbUrl.includes("?") ? reactorDbUrl : `${reactorDbUrl}?sslmode=disable` }) }) });
|
|
115
|
+
builder.withKysely(kysely);
|
|
116
|
+
logger.info("Using PostgreSQL for reactor storage");
|
|
117
|
+
} else {
|
|
118
|
+
const kysely = new Kysely({ dialect: new PGliteDialect(new PGlite("./.ph/reactor-storage")) });
|
|
119
|
+
builder.withKysely(kysely);
|
|
120
|
+
logger.info("Using PGlite for reactor storage");
|
|
121
|
+
}
|
|
122
|
+
if (httpLoader && dynamicModelLoading) builder.withDocumentModelLoader(httpLoader.documentModelLoader);
|
|
123
|
+
const clientBuilder = new ReactorClientBuilder().withReactorBuilder(builder);
|
|
124
|
+
if (renown) {
|
|
125
|
+
const signerConfig = getRenownSignerConfig(renown, options.identity?.requireSignatures);
|
|
126
|
+
clientBuilder.withSigner(signerConfig);
|
|
127
|
+
}
|
|
128
|
+
const module = await clientBuilder.buildModule();
|
|
129
|
+
if (module.reactorModule) {
|
|
130
|
+
new ReactorInstrumentation(module.reactorModule).start();
|
|
131
|
+
reactorLogger.info("Reactor metrics instrumentation started");
|
|
132
|
+
}
|
|
133
|
+
return module;
|
|
134
|
+
};
|
|
135
|
+
let defaultDriveUrl = void 0;
|
|
136
|
+
const basePath = process.cwd();
|
|
137
|
+
const viteLogger = createViteLogger(logger);
|
|
138
|
+
const vite = dev ? await startViteServer(process.cwd(), viteLogger) : void 0;
|
|
139
|
+
if (!options.disableLocalPackages) packages.push(basePath);
|
|
140
|
+
const packageLoaders = [];
|
|
141
|
+
if (vite) packageLoaders.push(VitePackageLoader.build(vite));
|
|
142
|
+
else packageLoaders.push(new ImportPackageLoader());
|
|
143
|
+
if (httpLoader) {
|
|
144
|
+
packageLoaders.push(httpLoader);
|
|
145
|
+
registryPackages?.split(",").forEach((p) => {
|
|
146
|
+
const name = p.trim();
|
|
147
|
+
if (!packages.includes(name)) packages.push(name);
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
const apiLogger = logger.child(["reactor-api"]);
|
|
151
|
+
const api = await initializeAndStartAPI(initializeClient, {
|
|
152
|
+
port: serverPort,
|
|
153
|
+
dbPath: readModelPath,
|
|
154
|
+
https: options.https,
|
|
155
|
+
packageLoaders: packageLoaders.length > 0 ? packageLoaders : void 0,
|
|
156
|
+
packages,
|
|
157
|
+
processorConfig: options.processorConfig,
|
|
158
|
+
processors: { "@powerhousedao/vetra": [processorFactory] },
|
|
159
|
+
configFile: options.configFile ?? path.join(process.cwd(), "powerhouse.config.json"),
|
|
160
|
+
mcp: options.mcp ?? true,
|
|
161
|
+
logger: apiLogger,
|
|
162
|
+
enableDocumentModelSubgraphs: options.enableDocumentModelSubgraphs
|
|
163
|
+
}, "switchboard");
|
|
164
|
+
if (process.env.SENTRY_DSN) api.httpAdapter.setupSentryErrorHandler(Sentry);
|
|
165
|
+
const { client, graphqlManager, documentModelRegistry } = api;
|
|
166
|
+
if (httpLoader) {
|
|
167
|
+
const packageManagementService = new PackageManagementService({
|
|
168
|
+
defaultRegistryUrl: registryUrl,
|
|
169
|
+
httpLoader,
|
|
170
|
+
documentModelRegistry
|
|
171
|
+
});
|
|
172
|
+
packageManagementService.setOnModelsChanged(() => {
|
|
173
|
+
graphqlManager.regenerateDocumentModelSubgraphs().catch(logger.error);
|
|
174
|
+
});
|
|
175
|
+
const packagesSubgraph = new PackagesSubgraph({
|
|
176
|
+
relationalDb: void 0,
|
|
177
|
+
analyticsStore: void 0,
|
|
178
|
+
reactorClient: client,
|
|
179
|
+
graphqlManager,
|
|
180
|
+
syncManager: api.syncManager,
|
|
181
|
+
path: graphqlManager.getBasePath(),
|
|
182
|
+
packageManagementService
|
|
183
|
+
});
|
|
184
|
+
graphqlManager.registerSubgraphInstance(packagesSubgraph, "graphql", false).then(() => graphqlManager.updateRouter()).catch((error) => {
|
|
185
|
+
logger.error("Failed to register packages subgraph: @error", error);
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
if (options.drive) {
|
|
189
|
+
if (!renown) throw new Error("Cannot create default drive without Renown identity");
|
|
190
|
+
defaultDriveUrl = await addDefaultDrive(client, options.drive, serverPort);
|
|
191
|
+
}
|
|
192
|
+
if (vite) api.httpAdapter.mountRawMiddleware(vite.middlewares);
|
|
193
|
+
if (remoteDrives.length > 0) for (const remoteDriveUrl of remoteDrives) {
|
|
194
|
+
let driveId;
|
|
195
|
+
try {
|
|
196
|
+
const { syncManager } = api;
|
|
197
|
+
const parsed = parseDriveUrl(remoteDriveUrl);
|
|
198
|
+
driveId = parsed.driveId;
|
|
199
|
+
const remoteName = `remote-drive-${driveId}-${crypto.randomUUID()}`;
|
|
200
|
+
await syncManager.add(remoteName, driveCollectionId("main", driveId), {
|
|
201
|
+
type: "gql",
|
|
202
|
+
parameters: { url: parsed.graphqlEndpoint }
|
|
203
|
+
});
|
|
204
|
+
logger.debug("Remote drive @remoteDriveUrl synced", remoteDriveUrl);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
if (error instanceof Error && error.message.includes("already exists")) {
|
|
207
|
+
logger.debug("Remote drive already added: @remoteDriveUrl", remoteDriveUrl);
|
|
208
|
+
driveId = remoteDriveUrl.split("/").pop();
|
|
209
|
+
} else logger.error("Failed to connect to remote drive @remoteDriveUrl: @error", remoteDriveUrl, error);
|
|
210
|
+
} finally {
|
|
211
|
+
if (!defaultDriveUrl && driveId) defaultDriveUrl = `${options.https ? "https" : "http"}://localhost:${serverPort}/d/${driveId}`;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
return {
|
|
215
|
+
defaultDriveUrl,
|
|
216
|
+
api,
|
|
217
|
+
reactor: client,
|
|
218
|
+
renown
|
|
219
|
+
};
|
|
220
|
+
}
|
|
221
|
+
const startSwitchboard = async (options = {}) => {
|
|
222
|
+
const serverPort = options.port ?? DEFAULT_PORT;
|
|
223
|
+
const featureFlags = await initFeatureFlags();
|
|
224
|
+
const enableDocumentModelSubgraphs = await featureFlags.getBooleanValue(DOCUMENT_MODEL_SUBGRAPHS_ENABLED, options.enableDocumentModelSubgraphs ?? DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT);
|
|
225
|
+
options.enableDocumentModelSubgraphs = enableDocumentModelSubgraphs;
|
|
226
|
+
const requireSignatures = options.identity?.requireSignatures ?? await featureFlags.getBooleanValue(REQUIRE_SIGNATURES, REQUIRE_SIGNATURES_DEFAULT);
|
|
227
|
+
options.identity = {
|
|
228
|
+
...options.identity,
|
|
229
|
+
requireSignatures
|
|
230
|
+
};
|
|
231
|
+
const logger = options.logger ?? defaultLogger;
|
|
232
|
+
logger.info("Feature flags: @flags", JSON.stringify({
|
|
233
|
+
DOCUMENT_MODEL_SUBGRAPHS_ENABLED: enableDocumentModelSubgraphs,
|
|
234
|
+
REQUIRE_SIGNATURES: requireSignatures
|
|
235
|
+
}, null, 2));
|
|
236
|
+
let renown = null;
|
|
237
|
+
try {
|
|
238
|
+
renown = await initRenown(options.identity);
|
|
239
|
+
} catch (e) {
|
|
240
|
+
logger.warn("Failed to initialize ConnectCrypto: @error", e);
|
|
241
|
+
if (options.identity?.requireExisting) throw new Error("Identity required but failed to initialize. Run \"ph login\" first.");
|
|
242
|
+
}
|
|
243
|
+
try {
|
|
244
|
+
return await initServer(serverPort, options, renown);
|
|
245
|
+
} catch (e) {
|
|
246
|
+
Sentry.captureException(e);
|
|
247
|
+
logger.error("App crashed: @error", e);
|
|
248
|
+
throw e;
|
|
249
|
+
}
|
|
250
|
+
};
|
|
251
|
+
if (import.meta.main) await startSwitchboard();
|
|
252
|
+
//#endregion
|
|
253
|
+
export { startSwitchboard as t };
|
|
254
|
+
|
|
255
|
+
//# sourceMappingURL=server-DxVTcVoC.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"server-DxVTcVoC.mjs","names":["vetraDocumentModels","documentModels","vetraProcessorFactory"],"sources":["../src/feature-flags.ts","../src/renown.ts","../src/server.mts"],"sourcesContent":["import { EnvVarProvider } from \"@openfeature/env-var-provider\";\nimport { OpenFeature } from \"@openfeature/server-sdk\";\n\nexport async function initFeatureFlags() {\n // for now, we're only using env vars for feature flags\n const provider = new EnvVarProvider();\n\n await OpenFeature.setProviderAndWait(provider);\n\n return OpenFeature.getClient();\n}\n","import type { SignerConfig } from \"@powerhousedao/reactor\";\nimport {\n createSignatureVerifier,\n DEFAULT_RENOWN_URL,\n NodeKeyStorage,\n RenownBuilder,\n RenownCryptoBuilder,\n type IRenown,\n} from \"@renown/sdk/node\";\nimport { childLogger } from \"document-model\";\n\nconst logger = childLogger([\"switchboard\", \"renown\"]);\n\nexport interface RenownOptions {\n /** Path to the keypair file. Defaults to .ph/.keypair.json in cwd */\n keypairPath?: string;\n /** If true, won't generate a new keypair if none exists */\n requireExisting?: boolean;\n /** Base url of the Renown instance to use */\n baseUrl?: string;\n}\n\n/**\n * Initialize Renown for the Switchboard instance.\n * This allows Switchboard to authenticate with remote services\n * using the same identity established during `ph login`.\n */\nexport async function initRenown(\n options: RenownOptions = {},\n): Promise<IRenown | null> {\n const {\n keypairPath,\n requireExisting = false,\n baseUrl = DEFAULT_RENOWN_URL,\n } = options;\n\n const keyStorage = new NodeKeyStorage(keypairPath, {\n logger,\n });\n\n // Check if we have an existing keypair\n const existingKeyPair = await keyStorage.loadKeyPair();\n\n if (!existingKeyPair && requireExisting) {\n throw new Error(\n \"No existing keypair found and requireExisting is true. \" +\n 'Run \"ph login\" to create one.',\n );\n }\n\n if (!existingKeyPair) {\n logger.info(\"No existing keypair found. A new one will be generated.\");\n }\n\n const renownCrypto = await new RenownCryptoBuilder()\n .withKeyPairStorage(keyStorage)\n .build();\n\n const renown = await new RenownBuilder(\"switchboard\", {})\n .withCrypto(renownCrypto)\n .withBaseUrl(baseUrl)\n .build();\n\n logger.info(\"Switchboard identity initialized: @did\", renownCrypto.did);\n\n return renown;\n}\n\n/**\n * Get the signer config for the given renown instance.\n *\n * @param renown - The renown instance\n * @param requireSignature - If true, unsigned actions are rejected\n */\nexport function getRenownSignerConfig(\n renown: IRenown,\n requireSignature?: boolean,\n): SignerConfig {\n return {\n signer: renown.signer,\n verifier: createSignatureVerifier(requireSignature),\n };\n}\n","#!/usr/bin/env node\nimport { PGlite } from \"@electric-sql/pglite\";\nimport { metrics } from \"@opentelemetry/api\";\nimport { getConfig } from \"@powerhousedao/config/node\";\nimport { ReactorInstrumentation } from \"@powerhousedao/opentelemetry-instrumentation-reactor\";\nimport {\n ChannelScheme,\n EventBus,\n ReactorBuilder,\n ReactorClientBuilder,\n driveCollectionId,\n parseDriveUrl,\n type Database,\n} from \"@powerhousedao/reactor\";\nimport {\n HttpPackageLoader,\n ImportPackageLoader,\n PackageManagementService,\n PackagesSubgraph,\n getUniqueDocumentModels,\n initializeAndStartAPI,\n type IPackageLoader,\n} from \"@powerhousedao/reactor-api\";\nimport { httpsHooksPath } from \"@powerhousedao/reactor-api/https-hooks\";\nimport {\n VitePackageLoader,\n createViteLogger,\n startViteServer,\n} from \"@powerhousedao/reactor-api/vite\";\nimport { driveDocumentModelModule } from \"@powerhousedao/shared/document-drive\";\nimport type { DocumentModelModule } from \"@powerhousedao/shared/document-model\";\nimport { documentModels as vetraDocumentModels } from \"@powerhousedao/vetra\";\nimport { processorFactory as vetraProcessorFactory } from \"@powerhousedao/vetra/processors\";\nimport type { IRenown } from \"@renown/sdk/node\";\nimport * as Sentry from \"@sentry/node\";\nimport {\n childLogger,\n documentModelDocumentModelModule,\n setLogLevel,\n type ILogger,\n} from \"document-model\";\nimport dotenv from \"dotenv\";\nimport { Kysely, PostgresDialect } from \"kysely\";\nimport { PGliteDialect } from \"kysely-pglite-dialect\";\nimport { register } from \"node:module\";\nimport path from \"path\";\nimport { Pool } from \"pg\";\nimport { initFeatureFlags } from \"./feature-flags.js\";\nimport { getRenownSignerConfig, initRenown } from \"./renown.js\";\nimport type { StartServerOptions, SwitchboardReactor } from \"./types.js\";\nimport { addDefaultDrive, isPostgresUrl } from \"./utils.mjs\";\n\nconst defaultLogger = childLogger([\"switchboard\"]);\n\nconst LogLevel = (process.env.LOG_LEVEL as ILogger[\"level\"] | \"\") || \"info\";\nsetLogLevel(LogLevel);\n\ndotenv.config();\n\n// Feature flag constants\nconst DOCUMENT_MODEL_SUBGRAPHS_ENABLED = \"DOCUMENT_MODEL_SUBGRAPHS_ENABLED\";\nconst DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT = true;\nconst REQUIRE_SIGNATURES = \"REQUIRE_SIGNATURES\";\nconst REQUIRE_SIGNATURES_DEFAULT = false;\n\nif (process.env.SENTRY_DSN) {\n defaultLogger.info(\n \"Initialized Sentry with env: @env\",\n process.env.SENTRY_ENV,\n );\n Sentry.init({\n dsn: process.env.SENTRY_DSN,\n environment: process.env.SENTRY_ENV,\n // Match the version tag uploaded by release-branch.yml so source maps\n // resolve. Populated by the CI (WORKSPACE_VERSION) or npm at runtime.\n release:\n process.env.SENTRY_RELEASE ||\n (process.env.npm_package_version\n ? `v${process.env.npm_package_version}`\n : undefined),\n });\n}\n\nconst DEFAULT_PORT = process.env.PORT ? Number(process.env.PORT) : 4001;\n\nasync function initServer(\n serverPort: number,\n options: StartServerOptions,\n renown: IRenown | null,\n) {\n // Register the global MeterProvider before ReactorInstrumentation is\n // constructed. setGlobalMeterProvider is a one-way door — once set it cannot\n // be unset — so this must happen before initializeClient calls\n // instrumentation.start() → createMetrics() → metrics.getMeter().\n if (options.meterProvider) {\n metrics.setGlobalMeterProvider(options.meterProvider);\n }\n\n const {\n dev,\n packages = [],\n remoteDrives = [],\n logger = defaultLogger,\n } = options;\n logger.level = LogLevel;\n const dbPath = options.dbPath ?? process.env.DATABASE_URL;\n\n // use postgres url for read model storage if available, otherwise use local PGlite path\n const readModelPath = dbPath || \".ph/read-storage\";\n\n // HTTP registry package loading\n const configPath =\n options.configFile ?? path.join(process.cwd(), \"powerhouse.config.json\");\n const config = getConfig(configPath);\n const registryUrl = process.env.PH_REGISTRY_URL ?? config.packageRegistryUrl;\n const registryPackages = process.env.PH_REGISTRY_PACKAGES;\n const dynamicModelLoading =\n options.dynamicModelLoading ?? process.env.DYNAMIC_MODEL_LOADING === \"true\";\n let httpLoader: HttpPackageLoader | undefined;\n\n if (registryUrl) {\n // Register HTTP/HTTPS module loader hooks for dynamic package imports\n register(httpsHooksPath, import.meta.url);\n httpLoader = new HttpPackageLoader({ registryUrl });\n registryPackages?.split(\",\").forEach((p) => {\n const name = p.trim();\n if (!packages.includes(name)) {\n packages.push(name);\n }\n });\n }\n\n const reactorLogger = logger.child([\"reactor\"]);\n const initializeClient = async (documentModels: DocumentModelModule[]) => {\n const eventBus = new EventBus();\n const builder = new ReactorBuilder()\n .withEventBus(eventBus)\n .withDocumentModels(\n getUniqueDocumentModels([\n documentModelDocumentModelModule,\n driveDocumentModelModule,\n ...vetraDocumentModels,\n ...documentModels,\n ]),\n )\n .withChannelScheme(ChannelScheme.SWITCHBOARD)\n .withSignalHandlers()\n .withLogger(reactorLogger);\n\n const maxSkipThreshold = parseInt(process.env.MAX_SKIP_THRESHOLD ?? \"\", 10);\n if (!isNaN(maxSkipThreshold) && maxSkipThreshold > 0) {\n builder.withExecutorConfig({ maxSkipThreshold });\n logger.info(`Reactor maxSkipThreshold set to ${maxSkipThreshold}`);\n }\n\n const reactorDbUrl = process.env.PH_REACTOR_DATABASE_URL;\n if (reactorDbUrl && isPostgresUrl(reactorDbUrl)) {\n const connectionString = reactorDbUrl.includes(\"?\")\n ? reactorDbUrl\n : `${reactorDbUrl}?sslmode=disable`;\n const pool = new Pool({ connectionString });\n const kysely = new Kysely<Database>({\n dialect: new PostgresDialect({ pool }),\n });\n builder.withKysely(kysely);\n logger.info(\"Using PostgreSQL for reactor storage\");\n } else {\n const pglitePath = \"./.ph/reactor-storage\";\n const pglite = new PGlite(pglitePath);\n const kysely = new Kysely<Database>({\n dialect: new PGliteDialect(pglite),\n });\n builder.withKysely(kysely);\n logger.info(\"Using PGlite for reactor storage\");\n }\n\n if (httpLoader && dynamicModelLoading) {\n builder.withDocumentModelLoader(httpLoader.documentModelLoader);\n }\n\n const clientBuilder = new ReactorClientBuilder().withReactorBuilder(\n builder,\n );\n\n if (renown) {\n const signerConfig = getRenownSignerConfig(\n renown,\n options.identity?.requireSignatures,\n );\n clientBuilder.withSigner(signerConfig);\n }\n\n const module = await clientBuilder.buildModule();\n\n if (module.reactorModule) {\n const instrumentation = new ReactorInstrumentation(module.reactorModule);\n instrumentation.start();\n reactorLogger.info(\"Reactor metrics instrumentation started\");\n }\n\n return module;\n };\n\n let defaultDriveUrl: undefined | string = undefined;\n\n // TODO get path from powerhouse config\n // start vite server if dev mode is enabled\n const basePath = process.cwd();\n const viteLogger = createViteLogger(logger);\n const vite = dev\n ? await startViteServer(process.cwd(), viteLogger)\n : undefined;\n\n // get paths to local document models\n if (!options.disableLocalPackages) {\n packages.push(basePath);\n }\n\n // create loaders\n const packageLoaders: IPackageLoader[] = [];\n if (vite) {\n packageLoaders.push(VitePackageLoader.build(vite));\n } else {\n packageLoaders.push(new ImportPackageLoader());\n }\n if (httpLoader) {\n packageLoaders.push(httpLoader);\n registryPackages?.split(\",\").forEach((p) => {\n const name = p.trim();\n if (!packages.includes(name)) {\n packages.push(name);\n }\n });\n }\n\n const apiLogger = logger.child([\"reactor-api\"]);\n const api = await initializeAndStartAPI(\n initializeClient,\n {\n port: serverPort,\n dbPath: readModelPath,\n https: options.https,\n packageLoaders: packageLoaders.length > 0 ? packageLoaders : undefined,\n packages: packages,\n processorConfig: options.processorConfig,\n processors: {\n \"@powerhousedao/vetra\": [vetraProcessorFactory],\n },\n configFile:\n options.configFile ??\n path.join(process.cwd(), \"powerhouse.config.json\"),\n mcp: options.mcp ?? true,\n logger: apiLogger,\n enableDocumentModelSubgraphs: options.enableDocumentModelSubgraphs,\n },\n \"switchboard\",\n );\n\n if (process.env.SENTRY_DSN) {\n // Register Sentry error handler after all routes are established.\n // The adapter calls the framework-specific Sentry setup internally.\n api.httpAdapter.setupSentryErrorHandler(Sentry);\n }\n\n const { client, graphqlManager, documentModelRegistry } = api;\n\n // Wire up dynamic package management if HTTP loader is configured\n if (httpLoader) {\n const packageManagementService = new PackageManagementService({\n defaultRegistryUrl: registryUrl,\n httpLoader,\n documentModelRegistry,\n });\n\n packageManagementService.setOnModelsChanged(() => {\n graphqlManager.regenerateDocumentModelSubgraphs().catch(logger.error);\n });\n\n const packagesSubgraph = new PackagesSubgraph({\n relationalDb: undefined as never,\n analyticsStore: undefined as never,\n reactorClient: client,\n graphqlManager,\n syncManager: api.syncManager,\n path: graphqlManager.getBasePath(),\n packageManagementService,\n });\n\n void graphqlManager\n .registerSubgraphInstance(packagesSubgraph, \"graphql\", false)\n .then(() => graphqlManager.updateRouter())\n .catch((error: unknown) => {\n logger.error(\"Failed to register packages subgraph: @error\", error);\n });\n }\n\n // Create default drive if provided\n if (options.drive) {\n if (!renown) {\n throw new Error(\"Cannot create default drive without Renown identity\");\n }\n\n defaultDriveUrl = await addDefaultDrive(client, options.drive, serverPort);\n }\n\n // add vite middleware after express app is initialized if applicable\n if (vite) {\n api.httpAdapter.mountRawMiddleware(vite.middlewares);\n }\n\n // Connect to remote drives AFTER packages are loaded\n if (remoteDrives.length > 0) {\n for (const remoteDriveUrl of remoteDrives) {\n let driveId: string | undefined;\n\n try {\n const { syncManager } = api;\n const parsed = parseDriveUrl(remoteDriveUrl);\n driveId = parsed.driveId;\n const remoteName = `remote-drive-${driveId}-${crypto.randomUUID()}`;\n await syncManager.add(remoteName, driveCollectionId(\"main\", driveId), {\n type: \"gql\",\n parameters: { url: parsed.graphqlEndpoint },\n });\n logger.debug(\"Remote drive @remoteDriveUrl synced\", remoteDriveUrl);\n } catch (error) {\n if (\n error instanceof Error &&\n error.message.includes(\"already exists\")\n ) {\n logger.debug(\n \"Remote drive already added: @remoteDriveUrl\",\n remoteDriveUrl,\n );\n driveId = remoteDriveUrl.split(\"/\").pop();\n } else {\n logger.error(\n \"Failed to connect to remote drive @remoteDriveUrl: @error\",\n remoteDriveUrl,\n error,\n );\n }\n } finally {\n // Construct local URL once in finally block\n if (!defaultDriveUrl && driveId) {\n const protocol = options.https ? \"https\" : \"http\";\n defaultDriveUrl = `${protocol}://localhost:${serverPort}/d/${driveId}`;\n }\n }\n }\n }\n\n return {\n defaultDriveUrl,\n api,\n reactor: client,\n renown,\n };\n}\n\nexport const startSwitchboard = async (\n options: StartServerOptions = {},\n): Promise<SwitchboardReactor> => {\n const serverPort = options.port ?? DEFAULT_PORT;\n\n // Initialize feature flags\n const featureFlags = await initFeatureFlags();\n\n const enableDocumentModelSubgraphs = await featureFlags.getBooleanValue(\n DOCUMENT_MODEL_SUBGRAPHS_ENABLED,\n options.enableDocumentModelSubgraphs ??\n DOCUMENT_MODEL_SUBGRAPHS_ENABLED_DEFAULT,\n );\n\n options.enableDocumentModelSubgraphs = enableDocumentModelSubgraphs;\n\n const requireSignatures =\n options.identity?.requireSignatures ??\n (await featureFlags.getBooleanValue(\n REQUIRE_SIGNATURES,\n REQUIRE_SIGNATURES_DEFAULT,\n ));\n options.identity = { ...options.identity, requireSignatures };\n\n const logger = options.logger ?? defaultLogger;\n\n logger.info(\n \"Feature flags: @flags\",\n JSON.stringify(\n {\n DOCUMENT_MODEL_SUBGRAPHS_ENABLED: enableDocumentModelSubgraphs,\n REQUIRE_SIGNATURES: requireSignatures,\n },\n null,\n 2,\n ),\n );\n\n // Initialize Renown if identity options are provided or keypair exists\n let renown: IRenown | null = null;\n try {\n renown = await initRenown(options.identity);\n } catch (e) {\n logger.warn(\"Failed to initialize ConnectCrypto: @error\", e);\n if (options.identity?.requireExisting) {\n throw new Error(\n 'Identity required but failed to initialize. Run \"ph login\" first.',\n );\n }\n }\n\n try {\n return await initServer(serverPort, options, renown);\n } catch (e) {\n Sentry.captureException(e);\n logger.error(\"App crashed: @error\", e);\n throw e;\n }\n};\n\nexport * from \"./types.js\";\n\nif (import.meta.main) {\n await startSwitchboard();\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAGA,eAAsB,mBAAmB;CAEvC,MAAM,WAAW,IAAI,gBAAgB;AAErC,OAAM,YAAY,mBAAmB,SAAS;AAE9C,QAAO,YAAY,WAAW;;;;ACEhC,MAAM,SAAS,YAAY,CAAC,eAAe,SAAS,CAAC;;;;;;AAgBrD,eAAsB,WACpB,UAAyB,EAAE,EACF;CACzB,MAAM,EACJ,aACA,kBAAkB,OAClB,UAAU,uBACR;CAEJ,MAAM,aAAa,IAAI,eAAe,aAAa,EACjD,QACD,CAAC;CAGF,MAAM,kBAAkB,MAAM,WAAW,aAAa;AAEtD,KAAI,CAAC,mBAAmB,gBACtB,OAAM,IAAI,MACR,yFAED;AAGH,KAAI,CAAC,gBACH,QAAO,KAAK,0DAA0D;CAGxE,MAAM,eAAe,MAAM,IAAI,qBAAqB,CACjD,mBAAmB,WAAW,CAC9B,OAAO;CAEV,MAAM,SAAS,MAAM,IAAI,cAAc,eAAe,EAAE,CAAC,CACtD,WAAW,aAAa,CACxB,YAAY,QAAQ,CACpB,OAAO;AAEV,QAAO,KAAK,0CAA0C,aAAa,IAAI;AAEvE,QAAO;;;;;;;;AAST,SAAgB,sBACd,QACA,kBACc;AACd,QAAO;EACL,QAAQ,OAAO;EACf,UAAU,wBAAwB,iBAAiB;EACpD;;;;AC7BH,MAAM,gBAAgB,YAAY,CAAC,cAAc,CAAC;AAElD,MAAM,WAAY,QAAQ,IAAI,aAAuC;AACrE,YAAY,SAAS;AAErB,OAAO,QAAQ;AAGf,MAAM,mCAAmC;AACzC,MAAM,2CAA2C;AACjD,MAAM,qBAAqB;AAC3B,MAAM,6BAA6B;AAEnC,IAAI,QAAQ,IAAI,YAAY;AAC1B,eAAc,KACZ,qCACA,QAAQ,IAAI,WACb;AACD,QAAO,KAAK;EACV,KAAK,QAAQ,IAAI;EACjB,aAAa,QAAQ,IAAI;EAGzB,SACE,QAAQ,IAAI,mBACX,QAAQ,IAAI,sBACT,IAAI,QAAQ,IAAI,wBAChB,KAAA;EACP,CAAC;;AAGJ,MAAM,eAAe,QAAQ,IAAI,OAAO,OAAO,QAAQ,IAAI,KAAK,GAAG;AAEnE,eAAe,WACb,YACA,SACA,QACA;AAKA,KAAI,QAAQ,cACV,SAAQ,uBAAuB,QAAQ,cAAc;CAGvD,MAAM,EACJ,KACA,WAAW,EAAE,EACb,eAAe,EAAE,EACjB,SAAS,kBACP;AACJ,QAAO,QAAQ;CAIf,MAAM,iBAHS,QAAQ,UAAU,QAAQ,IAAI,iBAGb;CAKhC,MAAM,SAAS,UADb,QAAQ,cAAc,KAAK,KAAK,QAAQ,KAAK,EAAE,yBAAyB,CACtC;CACpC,MAAM,cAAc,QAAQ,IAAI,mBAAmB,OAAO;CAC1D,MAAM,mBAAmB,QAAQ,IAAI;CACrC,MAAM,sBACJ,QAAQ,uBAAuB,QAAQ,IAAI,0BAA0B;CACvE,IAAI;AAEJ,KAAI,aAAa;AAEf,WAAS,gBAAgB,OAAO,KAAK,IAAI;AACzC,eAAa,IAAI,kBAAkB,EAAE,aAAa,CAAC;AACnD,oBAAkB,MAAM,IAAI,CAAC,SAAS,MAAM;GAC1C,MAAM,OAAO,EAAE,MAAM;AACrB,OAAI,CAAC,SAAS,SAAS,KAAK,CAC1B,UAAS,KAAK,KAAK;IAErB;;CAGJ,MAAM,gBAAgB,OAAO,MAAM,CAAC,UAAU,CAAC;CAC/C,MAAM,mBAAmB,OAAO,qBAA0C;EACxE,MAAM,WAAW,IAAI,UAAU;EAC/B,MAAM,UAAU,IAAI,gBAAgB,CACjC,aAAa,SAAS,CACtB,mBACC,wBAAwB;GACtB;GACA;GACA,GAAGA;GACH,GAAGC;GACJ,CAAC,CACH,CACA,kBAAkB,cAAc,YAAY,CAC5C,oBAAoB,CACpB,WAAW,cAAc;EAE5B,MAAM,mBAAmB,SAAS,QAAQ,IAAI,sBAAsB,IAAI,GAAG;AAC3E,MAAI,CAAC,MAAM,iBAAiB,IAAI,mBAAmB,GAAG;AACpD,WAAQ,mBAAmB,EAAE,kBAAkB,CAAC;AAChD,UAAO,KAAK,mCAAmC,mBAAmB;;EAGpE,MAAM,eAAe,QAAQ,IAAI;AACjC,MAAI,gBAAgB,cAAc,aAAa,EAAE;GAK/C,MAAM,SAAS,IAAI,OAAiB,EAClC,SAAS,IAAI,gBAAgB,EAAE,MAFpB,IAAI,KAAK,EAAE,kBAHC,aAAa,SAAS,IAAI,GAC/C,eACA,GAAG,aAAa,mBACsB,CAAC,EAEJ,CAAC,EACvC,CAAC;AACF,WAAQ,WAAW,OAAO;AAC1B,UAAO,KAAK,uCAAuC;SAC9C;GAGL,MAAM,SAAS,IAAI,OAAiB,EAClC,SAAS,IAAI,cAFA,IAAI,OADA,wBACkB,CAED,EACnC,CAAC;AACF,WAAQ,WAAW,OAAO;AAC1B,UAAO,KAAK,mCAAmC;;AAGjD,MAAI,cAAc,oBAChB,SAAQ,wBAAwB,WAAW,oBAAoB;EAGjE,MAAM,gBAAgB,IAAI,sBAAsB,CAAC,mBAC/C,QACD;AAED,MAAI,QAAQ;GACV,MAAM,eAAe,sBACnB,QACA,QAAQ,UAAU,kBACnB;AACD,iBAAc,WAAW,aAAa;;EAGxC,MAAM,SAAS,MAAM,cAAc,aAAa;AAEhD,MAAI,OAAO,eAAe;AACA,OAAI,uBAAuB,OAAO,cAAc,CACxD,OAAO;AACvB,iBAAc,KAAK,0CAA0C;;AAG/D,SAAO;;CAGT,IAAI,kBAAsC,KAAA;CAI1C,MAAM,WAAW,QAAQ,KAAK;CAC9B,MAAM,aAAa,iBAAiB,OAAO;CAC3C,MAAM,OAAO,MACT,MAAM,gBAAgB,QAAQ,KAAK,EAAE,WAAW,GAChD,KAAA;AAGJ,KAAI,CAAC,QAAQ,qBACX,UAAS,KAAK,SAAS;CAIzB,MAAM,iBAAmC,EAAE;AAC3C,KAAI,KACF,gBAAe,KAAK,kBAAkB,MAAM,KAAK,CAAC;KAElD,gBAAe,KAAK,IAAI,qBAAqB,CAAC;AAEhD,KAAI,YAAY;AACd,iBAAe,KAAK,WAAW;AAC/B,oBAAkB,MAAM,IAAI,CAAC,SAAS,MAAM;GAC1C,MAAM,OAAO,EAAE,MAAM;AACrB,OAAI,CAAC,SAAS,SAAS,KAAK,CAC1B,UAAS,KAAK,KAAK;IAErB;;CAGJ,MAAM,YAAY,OAAO,MAAM,CAAC,cAAc,CAAC;CAC/C,MAAM,MAAM,MAAM,sBAChB,kBACA;EACE,MAAM;EACN,QAAQ;EACR,OAAO,QAAQ;EACf,gBAAgB,eAAe,SAAS,IAAI,iBAAiB,KAAA;EACnD;EACV,iBAAiB,QAAQ;EACzB,YAAY,EACV,wBAAwB,CAACC,iBAAsB,EAChD;EACD,YACE,QAAQ,cACR,KAAK,KAAK,QAAQ,KAAK,EAAE,yBAAyB;EACpD,KAAK,QAAQ,OAAO;EACpB,QAAQ;EACR,8BAA8B,QAAQ;EACvC,EACD,cACD;AAED,KAAI,QAAQ,IAAI,WAGd,KAAI,YAAY,wBAAwB,OAAO;CAGjD,MAAM,EAAE,QAAQ,gBAAgB,0BAA0B;AAG1D,KAAI,YAAY;EACd,MAAM,2BAA2B,IAAI,yBAAyB;GAC5D,oBAAoB;GACpB;GACA;GACD,CAAC;AAEF,2BAAyB,yBAAyB;AAChD,kBAAe,kCAAkC,CAAC,MAAM,OAAO,MAAM;IACrE;EAEF,MAAM,mBAAmB,IAAI,iBAAiB;GAC5C,cAAc,KAAA;GACd,gBAAgB,KAAA;GAChB,eAAe;GACf;GACA,aAAa,IAAI;GACjB,MAAM,eAAe,aAAa;GAClC;GACD,CAAC;AAEG,iBACF,yBAAyB,kBAAkB,WAAW,MAAM,CAC5D,WAAW,eAAe,cAAc,CAAC,CACzC,OAAO,UAAmB;AACzB,UAAO,MAAM,gDAAgD,MAAM;IACnE;;AAIN,KAAI,QAAQ,OAAO;AACjB,MAAI,CAAC,OACH,OAAM,IAAI,MAAM,sDAAsD;AAGxE,oBAAkB,MAAM,gBAAgB,QAAQ,QAAQ,OAAO,WAAW;;AAI5E,KAAI,KACF,KAAI,YAAY,mBAAmB,KAAK,YAAY;AAItD,KAAI,aAAa,SAAS,EACxB,MAAK,MAAM,kBAAkB,cAAc;EACzC,IAAI;AAEJ,MAAI;GACF,MAAM,EAAE,gBAAgB;GACxB,MAAM,SAAS,cAAc,eAAe;AAC5C,aAAU,OAAO;GACjB,MAAM,aAAa,gBAAgB,QAAQ,GAAG,OAAO,YAAY;AACjE,SAAM,YAAY,IAAI,YAAY,kBAAkB,QAAQ,QAAQ,EAAE;IACpE,MAAM;IACN,YAAY,EAAE,KAAK,OAAO,iBAAiB;IAC5C,CAAC;AACF,UAAO,MAAM,uCAAuC,eAAe;WAC5D,OAAO;AACd,OACE,iBAAiB,SACjB,MAAM,QAAQ,SAAS,iBAAiB,EACxC;AACA,WAAO,MACL,+CACA,eACD;AACD,cAAU,eAAe,MAAM,IAAI,CAAC,KAAK;SAEzC,QAAO,MACL,6DACA,gBACA,MACD;YAEK;AAER,OAAI,CAAC,mBAAmB,QAEtB,mBAAkB,GADD,QAAQ,QAAQ,UAAU,OACb,eAAe,WAAW,KAAK;;;AAMrE,QAAO;EACL;EACA;EACA,SAAS;EACT;EACD;;AAGH,MAAa,mBAAmB,OAC9B,UAA8B,EAAE,KACA;CAChC,MAAM,aAAa,QAAQ,QAAQ;CAGnC,MAAM,eAAe,MAAM,kBAAkB;CAE7C,MAAM,+BAA+B,MAAM,aAAa,gBACtD,kCACA,QAAQ,gCACN,yCACH;AAED,SAAQ,+BAA+B;CAEvC,MAAM,oBACJ,QAAQ,UAAU,qBACjB,MAAM,aAAa,gBAClB,oBACA,2BACD;AACH,SAAQ,WAAW;EAAE,GAAG,QAAQ;EAAU;EAAmB;CAE7D,MAAM,SAAS,QAAQ,UAAU;AAEjC,QAAO,KACL,yBACA,KAAK,UACH;EACE,kCAAkC;EAClC,oBAAoB;EACrB,EACD,MACA,EACD,CACF;CAGD,IAAI,SAAyB;AAC7B,KAAI;AACF,WAAS,MAAM,WAAW,QAAQ,SAAS;UACpC,GAAG;AACV,SAAO,KAAK,8CAA8C,EAAE;AAC5D,MAAI,QAAQ,UAAU,gBACpB,OAAM,IAAI,MACR,sEACD;;AAIL,KAAI;AACF,SAAO,MAAM,WAAW,YAAY,SAAS,OAAO;UAC7C,GAAG;AACV,SAAO,iBAAiB,EAAE;AAC1B,SAAO,MAAM,uBAAuB,EAAE;AACtC,QAAM;;;AAMV,IAAI,OAAO,KAAK,KACd,OAAM,kBAAkB"}
|