@powerhousedao/switchboard 6.0.0-dev.21 → 6.0.0-dev.210

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/Auth.md +45 -27
  2. package/CHANGELOG.md +1638 -5
  3. package/README.md +13 -12
  4. package/dist/index.d.mts +1 -0
  5. package/dist/index.mjs +129 -0
  6. package/dist/index.mjs.map +1 -0
  7. package/dist/install-packages.d.mts +1 -0
  8. package/dist/install-packages.mjs +31 -0
  9. package/dist/install-packages.mjs.map +1 -0
  10. package/dist/migrate.d.mts +1 -0
  11. package/dist/migrate.mjs +55 -0
  12. package/dist/migrate.mjs.map +1 -0
  13. package/dist/server-8U7q7B7r.mjs +493 -0
  14. package/dist/server-8U7q7B7r.mjs.map +1 -0
  15. package/dist/server.d.mts +93 -0
  16. package/dist/server.d.mts.map +1 -0
  17. package/dist/server.mjs +4 -0
  18. package/dist/utils-DFl0ezBT.mjs +44 -0
  19. package/dist/utils-DFl0ezBT.mjs.map +1 -0
  20. package/dist/utils.d.mts +9 -0
  21. package/dist/utils.d.mts.map +1 -0
  22. package/dist/utils.mjs +2 -0
  23. package/package.json +54 -39
  24. package/test/attachments/auth.test.ts +219 -0
  25. package/test/attachments/index.test.ts +119 -0
  26. package/test/attachments/routes-integration.test.ts +103 -0
  27. package/test/attachments/routes.test.ts +501 -0
  28. package/test/metrics.test.ts +202 -0
  29. package/tsconfig.json +12 -3
  30. package/tsdown.config.ts +16 -0
  31. package/vitest.config.ts +11 -0
  32. package/Dockerfile +0 -86
  33. package/dist/src/clients/redis.d.ts +0 -5
  34. package/dist/src/clients/redis.d.ts.map +0 -1
  35. package/dist/src/clients/redis.js +0 -48
  36. package/dist/src/clients/redis.js.map +0 -1
  37. package/dist/src/config.d.ts +0 -12
  38. package/dist/src/config.d.ts.map +0 -1
  39. package/dist/src/config.js +0 -33
  40. package/dist/src/config.js.map +0 -1
  41. package/dist/src/connect-crypto.d.ts +0 -41
  42. package/dist/src/connect-crypto.d.ts.map +0 -1
  43. package/dist/src/connect-crypto.js +0 -127
  44. package/dist/src/connect-crypto.js.map +0 -1
  45. package/dist/src/feature-flags.d.ts +0 -2
  46. package/dist/src/feature-flags.d.ts.map +0 -1
  47. package/dist/src/feature-flags.js +0 -9
  48. package/dist/src/feature-flags.js.map +0 -1
  49. package/dist/src/index.d.ts +0 -3
  50. package/dist/src/index.d.ts.map +0 -1
  51. package/dist/src/index.js +0 -21
  52. package/dist/src/index.js.map +0 -1
  53. package/dist/src/install-packages.d.ts +0 -2
  54. package/dist/src/install-packages.d.ts.map +0 -1
  55. package/dist/src/install-packages.js +0 -36
  56. package/dist/src/install-packages.js.map +0 -1
  57. package/dist/src/migrate.d.ts +0 -3
  58. package/dist/src/migrate.d.ts.map +0 -1
  59. package/dist/src/migrate.js +0 -65
  60. package/dist/src/migrate.js.map +0 -1
  61. package/dist/src/profiler.d.ts +0 -4
  62. package/dist/src/profiler.d.ts.map +0 -1
  63. package/dist/src/profiler.js +0 -17
  64. package/dist/src/profiler.js.map +0 -1
  65. package/dist/src/server.d.ts +0 -6
  66. package/dist/src/server.d.ts.map +0 -1
  67. package/dist/src/server.js +0 -304
  68. package/dist/src/server.js.map +0 -1
  69. package/dist/src/types.d.ts +0 -64
  70. package/dist/src/types.d.ts.map +0 -1
  71. package/dist/src/types.js +0 -2
  72. package/dist/src/types.js.map +0 -1
  73. package/dist/src/utils.d.ts +0 -6
  74. package/dist/src/utils.d.ts.map +0 -1
  75. package/dist/src/utils.js +0 -92
  76. package/dist/src/utils.js.map +0 -1
  77. package/dist/tsconfig.tsbuildinfo +0 -1
  78. package/entrypoint.sh +0 -17
package/README.md CHANGED
@@ -50,6 +50,7 @@ docker compose -f packages/reactor/docker-compose.yml up -d
50
50
  ```
51
51
 
52
52
  This starts:
53
+
53
54
  - PostgreSQL on port `5433` (mapped from container port 5432)
54
55
  - Adminer (database UI) on port `8080`
55
56
 
@@ -94,22 +95,21 @@ pnpm add -g @powerhousedao/switchboard
94
95
 
95
96
  ## 🏃‍♂️ Quick Start
96
97
 
97
-
98
98
  ## ⚙️ Configuration
99
99
 
100
100
  ### Environment Variables
101
101
 
102
- | Variable | Description | Default |
103
- | ---------------------------- | ---------------------------------- | --------------------- |
104
- | `PORT` | Server port | `4001` |
105
- | `DATABASE_URL` | Database connection string | `./.ph/drive-storage` |
106
- | `PH_REACTOR_DATABASE_URL` | PostgreSQL URL (takes precedence) | - |
107
- | `REDIS_URL` | Redis connection URL | - |
108
- | `REDIS_TLS_URL` | Redis TLS connection URL | - |
109
- | `SENTRY_DSN` | Sentry DSN for error tracking | - |
110
- | `SENTRY_ENV` | Sentry environment | - |
111
- | `PYROSCOPE_SERVER_ADDRESS` | Pyroscope server address | - |
112
- | `FEATURE_REACTORV2_ENABLED` | Enable Reactor v2 subgraph feature | `false` |
102
+ | Variable | Description | Default |
103
+ | --------------------------- | ---------------------------------- | --------------------- |
104
+ | `PORT` | Server port | `4001` |
105
+ | `DATABASE_URL` | Database connection string | `./.ph/drive-storage` |
106
+ | `PH_REACTOR_DATABASE_URL` | PostgreSQL URL (takes precedence) | - |
107
+ | `REDIS_URL` | Redis connection URL | - |
108
+ | `REDIS_TLS_URL` | Redis TLS connection URL | - |
109
+ | `SENTRY_DSN` | Sentry DSN for error tracking | - |
110
+ | `SENTRY_ENV` | Sentry environment | - |
111
+ | `PYROSCOPE_SERVER_ADDRESS` | Pyroscope server address | - |
112
+ | `FEATURE_REACTORV2_ENABLED` | Enable Reactor v2 subgraph feature | `false` |
113
113
 
114
114
  ### Authentication Configuration
115
115
 
@@ -245,6 +245,7 @@ ph switchboard --db-path postgresql://user:pass@localhost:5432/db --migrate-stat
245
245
  #### Environment Variables for Migrations
246
246
 
247
247
  The migration commands check for a PostgreSQL URL in this order:
248
+
248
249
  1. `PH_REACTOR_DATABASE_URL`
249
250
  2. `DATABASE_URL`
250
251
  3. Config file (`powerhouse.config.json` -> `switchboard.database.url`)
@@ -0,0 +1 @@
1
+ export { };
package/dist/index.mjs ADDED
@@ -0,0 +1,129 @@
1
+ #!/usr/bin/env node
2
+ import { n as startSwitchboard } from "./server-8U7q7B7r.mjs";
3
+ import "./utils-DFl0ezBT.mjs";
4
+ import * as Sentry from "@sentry/node";
5
+ import { childLogger } from "document-model";
6
+ import dotenv from "dotenv";
7
+ import { getConfig } from "@powerhousedao/config/node";
8
+ import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-http";
9
+ import { Resource } from "@opentelemetry/resources";
10
+ import { MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
11
+ //#region src/config.ts
12
+ dotenv.config();
13
+ const { switchboard } = getConfig();
14
+ const config = {
15
+ database: { url: process.env.PH_SWITCHBOARD_DATABASE_URL ?? switchboard?.database?.url ?? "dev.db" },
16
+ port: process.env.PH_SWITCHBOARD_PORT && !isNaN(Number(process.env.PH_SWITCHBOARD_PORT)) ? Number(process.env.PH_SWITCHBOARD_PORT) : switchboard?.port ?? 4001,
17
+ mcp: true,
18
+ drive: {
19
+ id: "powerhouse",
20
+ slug: "powerhouse",
21
+ global: {
22
+ name: "Powerhouse",
23
+ icon: "https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7"
24
+ },
25
+ local: {
26
+ availableOffline: true,
27
+ listeners: [],
28
+ sharingType: "public",
29
+ triggers: []
30
+ }
31
+ }
32
+ };
33
+ //#endregion
34
+ //#region src/metrics.ts
35
+ const logger$1 = childLogger(["switchboard", "metrics"]);
36
+ function createMeterProviderFromEnv(env) {
37
+ const endpoint = env.OTEL_EXPORTER_OTLP_ENDPOINT;
38
+ if (!endpoint) return void 0;
39
+ const parsed = parseInt(env.OTEL_METRIC_EXPORT_INTERVAL ?? "", 10);
40
+ const exportIntervalMillis = Number.isFinite(parsed) && parsed > 0 ? parsed : 5e3;
41
+ const base = endpoint.replace(/\/$/, "");
42
+ const exporterUrl = base.endsWith("/v1/metrics") ? base : `${base}/v1/metrics`;
43
+ logger$1.info(`Initializing OpenTelemetry metrics exporter at: ${endpoint}`);
44
+ const meterProvider = new MeterProvider({
45
+ resource: new Resource({ "service.name": env.OTEL_SERVICE_NAME ?? "switchboard" }),
46
+ readers: [new PeriodicExportingMetricReader({
47
+ exporter: new OTLPMetricExporter({ url: exporterUrl }),
48
+ exportIntervalMillis,
49
+ exportTimeoutMillis: Math.max(exportIntervalMillis - 250, 1)
50
+ })]
51
+ });
52
+ logger$1.info(`Metrics export enabled (interval: ${exportIntervalMillis}ms)`);
53
+ return meterProvider;
54
+ }
55
+ //#endregion
56
+ //#region src/profiler.ts
57
+ async function initProfilerFromEnv(env) {
58
+ const { PYROSCOPE_SERVER_ADDRESS: serverAddress, PYROSCOPE_APPLICATION_NAME: appName, PYROSCOPE_USER: basicAuthUser, PYROSCOPE_PASSWORD: basicAuthPassword, PYROSCOPE_WALL_ENABLED: wallEnabled, PYROSCOPE_HEAP_ENABLED: heapEnabled } = env;
59
+ return initProfiler({
60
+ serverAddress,
61
+ appName,
62
+ basicAuthUser,
63
+ basicAuthPassword,
64
+ wall: {
65
+ samplingDurationMs: 1e4,
66
+ samplingIntervalMicros: 1e4,
67
+ collectCpuTime: true
68
+ },
69
+ heap: {
70
+ samplingIntervalBytes: 512 * 1024,
71
+ stackDepth: 64
72
+ }
73
+ }, {
74
+ wallEnabled: wallEnabled !== "false",
75
+ heapEnabled: heapEnabled === "true"
76
+ });
77
+ }
78
+ async function initProfiler(options, flags = {
79
+ wallEnabled: true,
80
+ heapEnabled: false
81
+ }) {
82
+ console.log("Initializing Pyroscope profiler at:", options?.serverAddress);
83
+ console.log(" Wall profiling:", flags.wallEnabled ? "enabled" : "disabled");
84
+ console.log(" Heap profiling:", flags.heapEnabled ? "enabled" : "disabled");
85
+ const { default: Pyroscope } = await import("@pyroscope/nodejs");
86
+ Pyroscope.init(options);
87
+ if (flags.wallEnabled) Pyroscope.startWallProfiling();
88
+ Pyroscope.startCpuProfiling();
89
+ if (flags.heapEnabled) Pyroscope.startHeapProfiling();
90
+ }
91
+ //#endregion
92
+ //#region src/index.mts
93
+ const logger = childLogger(["switchboard"]);
94
+ function ensureNodeVersion(minVersion = "24") {
95
+ const version = process.versions.node;
96
+ if (!version) return;
97
+ if (version < minVersion) {
98
+ console.error(`Node version ${minVersion} or higher is required. Current version: ${version}`);
99
+ process.exit(1);
100
+ }
101
+ }
102
+ ensureNodeVersion("24");
103
+ process.setMaxListeners(0);
104
+ const meterProvider = createMeterProviderFromEnv({
105
+ OTEL_EXPORTER_OTLP_ENDPOINT: process.env.OTEL_EXPORTER_OTLP_ENDPOINT,
106
+ OTEL_METRIC_EXPORT_INTERVAL: process.env.OTEL_METRIC_EXPORT_INTERVAL,
107
+ OTEL_SERVICE_NAME: process.env.OTEL_SERVICE_NAME
108
+ });
109
+ async function shutdown() {
110
+ console.log("\nShutting down...");
111
+ await Promise.race([meterProvider?.shutdown().catch(() => void 0), new Promise((resolve) => setTimeout(resolve, 5e3))]);
112
+ process.exit(0);
113
+ }
114
+ process.on("SIGINT", shutdown);
115
+ process.on("SIGTERM", shutdown);
116
+ if (process.env.PYROSCOPE_SERVER_ADDRESS) try {
117
+ await initProfilerFromEnv(process.env);
118
+ } catch (e) {
119
+ Sentry.captureException(e);
120
+ logger.error("Error starting profiler: @error", e);
121
+ }
122
+ startSwitchboard({
123
+ ...config,
124
+ meterProvider
125
+ }).catch(console.error);
126
+ //#endregion
127
+ export {};
128
+
129
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.mjs","names":["logger"],"sources":["../src/config.ts","../src/metrics.ts","../src/profiler.ts","../src/index.mts"],"sourcesContent":["import dotenv from \"dotenv\";\ndotenv.config();\n\nimport { getConfig } from \"@powerhousedao/config/node\";\nimport type { DriveInput } from \"@powerhousedao/shared/document-drive\";\nconst phConfig = getConfig();\nconst { switchboard } = phConfig;\ninterface Config {\n database: {\n url: string;\n };\n port: number;\n mcp: boolean;\n drive: DriveInput;\n}\nexport const config: Config = {\n database: {\n // url: process.env.PH_SWITCHBOARD_DATABASE_URL ?? switchboard?.database?.url ?? \"dev.db\",\n url:\n process.env.PH_SWITCHBOARD_DATABASE_URL ??\n switchboard?.database?.url ??\n \"dev.db\",\n },\n port:\n process.env.PH_SWITCHBOARD_PORT &&\n !isNaN(Number(process.env.PH_SWITCHBOARD_PORT))\n ? Number(process.env.PH_SWITCHBOARD_PORT)\n : (switchboard?.port ?? 4001),\n mcp: true,\n drive: {\n id: \"powerhouse\",\n slug: \"powerhouse\",\n global: {\n name: \"Powerhouse\",\n icon: \"https://ipfs.io/ipfs/QmcaTDBYn8X2psGaXe7iQ6qd8q6oqHLgxvMX9yXf7f9uP7\",\n },\n local: {\n availableOffline: true,\n listeners: [],\n sharingType: \"public\",\n triggers: [],\n },\n },\n};\n","import { OTLPMetricExporter } from \"@opentelemetry/exporter-metrics-otlp-http\";\nimport { Resource } from \"@opentelemetry/resources\";\nimport {\n MeterProvider,\n PeriodicExportingMetricReader,\n} from \"@opentelemetry/sdk-metrics\";\nimport { childLogger } from \"document-model\";\n\nconst logger = childLogger([\"switchboard\", \"metrics\"]);\n\nexport function createMeterProviderFromEnv(env: {\n OTEL_EXPORTER_OTLP_ENDPOINT?: string;\n OTEL_METRIC_EXPORT_INTERVAL?: string;\n OTEL_SERVICE_NAME?: string;\n}): MeterProvider | undefined {\n const endpoint = env.OTEL_EXPORTER_OTLP_ENDPOINT;\n if (!endpoint) return undefined;\n\n const parsed = parseInt(env.OTEL_METRIC_EXPORT_INTERVAL ?? \"\", 10);\n const exportIntervalMillis =\n Number.isFinite(parsed) && parsed > 0 ? parsed : 5_000;\n\n const base = endpoint.replace(/\\/$/, \"\");\n const exporterUrl = base.endsWith(\"/v1/metrics\")\n ? base\n : `${base}/v1/metrics`;\n\n logger.info(`Initializing OpenTelemetry metrics exporter at: ${endpoint}`);\n const meterProvider = new MeterProvider({\n resource: new Resource({\n \"service.name\": env.OTEL_SERVICE_NAME ?? \"switchboard\",\n }),\n readers: [\n new PeriodicExportingMetricReader({\n exporter: new OTLPMetricExporter({\n url: exporterUrl,\n }),\n exportIntervalMillis,\n exportTimeoutMillis: Math.max(exportIntervalMillis - 250, 1),\n }),\n ],\n });\n logger.info(`Metrics export enabled (interval: ${exportIntervalMillis}ms)`);\n return meterProvider;\n}\n","import type { PyroscopeConfig } from \"@pyroscope/nodejs\";\n\nexport async function initProfilerFromEnv(env: typeof process.env) {\n const {\n PYROSCOPE_SERVER_ADDRESS: serverAddress,\n PYROSCOPE_APPLICATION_NAME: appName,\n PYROSCOPE_USER: basicAuthUser,\n PYROSCOPE_PASSWORD: basicAuthPassword,\n PYROSCOPE_WALL_ENABLED: wallEnabled,\n PYROSCOPE_HEAP_ENABLED: heapEnabled,\n } = env;\n\n const options: PyroscopeConfig = {\n serverAddress,\n appName,\n basicAuthUser,\n basicAuthPassword,\n // Wall profiling captures wall-clock time (includes async I/O waits)\n // This shows GraphQL resolvers even when waiting for database\n wall: {\n samplingDurationMs: 10000, // 10 second sampling windows\n samplingIntervalMicros: 10000, // 10ms sampling interval (100 samples/sec)\n collectCpuTime: true, // Also collect CPU time alongside wall time\n },\n // Heap profiling for memory allocation tracking\n heap: {\n samplingIntervalBytes: 512 * 1024, // Sample every 512KB allocated\n stackDepth: 64, // Capture deeper stacks for better context\n },\n };\n return initProfiler(options, {\n wallEnabled: wallEnabled !== \"false\",\n heapEnabled: heapEnabled === \"true\",\n });\n}\n\ninterface ProfilerFlags {\n wallEnabled?: boolean;\n heapEnabled?: boolean;\n}\n\nexport async function initProfiler(\n options?: PyroscopeConfig,\n flags: ProfilerFlags = { wallEnabled: true, heapEnabled: false },\n) {\n console.log(\"Initializing Pyroscope profiler at:\", options?.serverAddress);\n console.log(\" Wall profiling:\", flags.wallEnabled ? \"enabled\" : \"disabled\");\n console.log(\" Heap profiling:\", flags.heapEnabled ? \"enabled\" : \"disabled\");\n\n const { default: Pyroscope } = await import(\"@pyroscope/nodejs\");\n Pyroscope.init(options);\n\n // Start wall profiling (captures async I/O time - shows resolvers)\n if (flags.wallEnabled) {\n Pyroscope.startWallProfiling();\n }\n\n // Start CPU profiling (captures CPU-bound work)\n Pyroscope.startCpuProfiling();\n\n // Optionally start heap profiling (memory allocations)\n if (flags.heapEnabled) {\n Pyroscope.startHeapProfiling();\n }\n}\n","#!/usr/bin/env node\nimport * as Sentry from \"@sentry/node\";\nimport { childLogger } from \"document-model\";\nimport { config } from \"./config.js\";\nimport { createMeterProviderFromEnv } from \"./metrics.js\";\nimport { initProfilerFromEnv } from \"./profiler.js\";\nimport { startSwitchboard } from \"./server.mjs\";\n\nconst logger = childLogger([\"switchboard\"]);\n\nfunction ensureNodeVersion(minVersion = \"24\") {\n const version = process.versions.node;\n if (!version) {\n return;\n }\n\n if (version < minVersion) {\n console.error(\n `Node version ${minVersion} or higher is required. Current version: ${version}`,\n );\n process.exit(1);\n }\n}\n// Ensure minimum Node.js version\nensureNodeVersion(\"24\");\n\n// Each subgraph registers its own SIGINT/SIGTERM listeners, and the count\n// scales with dynamically-loaded document models beyond the default cap of 10.\nprocess.setMaxListeners(0);\n\nconst meterProvider = createMeterProviderFromEnv({\n OTEL_EXPORTER_OTLP_ENDPOINT: process.env.OTEL_EXPORTER_OTLP_ENDPOINT,\n OTEL_METRIC_EXPORT_INTERVAL: process.env.OTEL_METRIC_EXPORT_INTERVAL,\n OTEL_SERVICE_NAME: process.env.OTEL_SERVICE_NAME,\n});\n\nasync function shutdown() {\n console.log(\"\\nShutting down...\");\n // Flush final metrics before exit. Races against a 5s deadline so an\n // unresponsive OTLP endpoint cannot exhaust terminationGracePeriodSeconds.\n await Promise.race([\n meterProvider?.shutdown().catch(() => undefined),\n new Promise<void>((resolve) => setTimeout(resolve, 5_000)),\n ]);\n process.exit(0);\n}\n\n// SIGINT: Ctrl-C in development; SIGTERM: graceful shutdown in Docker/Kubernetes\nprocess.on(\"SIGINT\", shutdown);\nprocess.on(\"SIGTERM\", shutdown);\n\nif (process.env.PYROSCOPE_SERVER_ADDRESS) {\n try {\n await initProfilerFromEnv(process.env);\n } catch (e) {\n Sentry.captureException(e);\n logger.error(\"Error starting profiler: @error\", e);\n }\n}\n\nstartSwitchboard({ ...config, meterProvider }).catch(console.error);\n"],"mappings":";;;;;;;;;;;AACA,OAAO,QAAQ;AAKf,MAAM,EAAE,gBADS,WAAW;AAU5B,MAAa,SAAiB;CAC5B,UAAU,EAER,KACE,QAAQ,IAAI,+BACZ,aAAa,UAAU,OACvB,UACH;CACD,MACE,QAAQ,IAAI,uBACZ,CAAC,MAAM,OAAO,QAAQ,IAAI,oBAAoB,CAAC,GAC3C,OAAO,QAAQ,IAAI,oBAAoB,GACtC,aAAa,QAAQ;CAC5B,KAAK;CACL,OAAO;EACL,IAAI;EACJ,MAAM;EACN,QAAQ;GACN,MAAM;GACN,MAAM;GACP;EACD,OAAO;GACL,kBAAkB;GAClB,WAAW,EAAE;GACb,aAAa;GACb,UAAU,EAAE;GACb;EACF;CACF;;;ACnCD,MAAMA,WAAS,YAAY,CAAC,eAAe,UAAU,CAAC;AAEtD,SAAgB,2BAA2B,KAIb;CAC5B,MAAM,WAAW,IAAI;AACrB,KAAI,CAAC,SAAU,QAAO,KAAA;CAEtB,MAAM,SAAS,SAAS,IAAI,+BAA+B,IAAI,GAAG;CAClE,MAAM,uBACJ,OAAO,SAAS,OAAO,IAAI,SAAS,IAAI,SAAS;CAEnD,MAAM,OAAO,SAAS,QAAQ,OAAO,GAAG;CACxC,MAAM,cAAc,KAAK,SAAS,cAAc,GAC5C,OACA,GAAG,KAAK;AAEZ,UAAO,KAAK,mDAAmD,WAAW;CAC1E,MAAM,gBAAgB,IAAI,cAAc;EACtC,UAAU,IAAI,SAAS,EACrB,gBAAgB,IAAI,qBAAqB,eAC1C,CAAC;EACF,SAAS,CACP,IAAI,8BAA8B;GAChC,UAAU,IAAI,mBAAmB,EAC/B,KAAK,aACN,CAAC;GACF;GACA,qBAAqB,KAAK,IAAI,uBAAuB,KAAK,EAAE;GAC7D,CAAC,CACH;EACF,CAAC;AACF,UAAO,KAAK,qCAAqC,qBAAqB,KAAK;AAC3E,QAAO;;;;ACzCT,eAAsB,oBAAoB,KAAyB;CACjE,MAAM,EACJ,0BAA0B,eAC1B,4BAA4B,SAC5B,gBAAgB,eAChB,oBAAoB,mBACpB,wBAAwB,aACxB,wBAAwB,gBACtB;AAoBJ,QAAO,aAlB0B;EAC/B;EACA;EACA;EACA;EAGA,MAAM;GACJ,oBAAoB;GACpB,wBAAwB;GACxB,gBAAgB;GACjB;EAED,MAAM;GACJ,uBAAuB,MAAM;GAC7B,YAAY;GACb;EACF,EAC4B;EAC3B,aAAa,gBAAgB;EAC7B,aAAa,gBAAgB;EAC9B,CAAC;;AAQJ,eAAsB,aACpB,SACA,QAAuB;CAAE,aAAa;CAAM,aAAa;CAAO,EAChE;AACA,SAAQ,IAAI,uCAAuC,SAAS,cAAc;AAC1E,SAAQ,IAAI,qBAAqB,MAAM,cAAc,YAAY,WAAW;AAC5E,SAAQ,IAAI,qBAAqB,MAAM,cAAc,YAAY,WAAW;CAE5E,MAAM,EAAE,SAAS,cAAc,MAAM,OAAO;AAC5C,WAAU,KAAK,QAAQ;AAGvB,KAAI,MAAM,YACR,WAAU,oBAAoB;AAIhC,WAAU,mBAAmB;AAG7B,KAAI,MAAM,YACR,WAAU,oBAAoB;;;;ACtDlC,MAAM,SAAS,YAAY,CAAC,cAAc,CAAC;AAE3C,SAAS,kBAAkB,aAAa,MAAM;CAC5C,MAAM,UAAU,QAAQ,SAAS;AACjC,KAAI,CAAC,QACH;AAGF,KAAI,UAAU,YAAY;AACxB,UAAQ,MACN,gBAAgB,WAAW,2CAA2C,UACvE;AACD,UAAQ,KAAK,EAAE;;;AAInB,kBAAkB,KAAK;AAIvB,QAAQ,gBAAgB,EAAE;AAE1B,MAAM,gBAAgB,2BAA2B;CAC/C,6BAA6B,QAAQ,IAAI;CACzC,6BAA6B,QAAQ,IAAI;CACzC,mBAAmB,QAAQ,IAAI;CAChC,CAAC;AAEF,eAAe,WAAW;AACxB,SAAQ,IAAI,qBAAqB;AAGjC,OAAM,QAAQ,KAAK,CACjB,eAAe,UAAU,CAAC,YAAY,KAAA,EAAU,EAChD,IAAI,SAAe,YAAY,WAAW,SAAS,IAAM,CAAC,CAC3D,CAAC;AACF,SAAQ,KAAK,EAAE;;AAIjB,QAAQ,GAAG,UAAU,SAAS;AAC9B,QAAQ,GAAG,WAAW,SAAS;AAE/B,IAAI,QAAQ,IAAI,yBACd,KAAI;AACF,OAAM,oBAAoB,QAAQ,IAAI;SAC/B,GAAG;AACV,QAAO,iBAAiB,EAAE;AAC1B,QAAO,MAAM,mCAAmC,EAAE;;AAItD,iBAAiB;CAAE,GAAG;CAAQ;CAAe,CAAC,CAAC,MAAM,QAAQ,MAAM"}
@@ -0,0 +1 @@
1
+ export { };
@@ -0,0 +1,31 @@
1
+ import path from "path";
2
+ import { execSync } from "child_process";
3
+ import fs from "fs";
4
+ //#region src/install-packages.mts
5
+ const pkgs = process.env.PH_PACKAGES?.split(",") || [];
6
+ if (pkgs.length === 0 || pkgs.length === 1 && pkgs[0] === "") process.exit(0);
7
+ try {
8
+ const packageJsonPath = path.join(process.cwd(), "package.json");
9
+ const packageJsonContent = fs.readFileSync(packageJsonPath, "utf-8");
10
+ const packageJson = JSON.parse(packageJsonContent);
11
+ const installedDependencies = {
12
+ ...packageJson.dependencies || {},
13
+ ...packageJson.devDependencies || {}
14
+ };
15
+ for (const pkg of pkgs) {
16
+ if (pkg === "") continue;
17
+ if (installedDependencies[pkg]) {
18
+ console.log(`> Package ${pkg} is already installed, skipping`);
19
+ continue;
20
+ }
21
+ console.log(`> Installing ${pkg}`);
22
+ execSync(`pnpm add ${pkg}@latest`, { stdio: "inherit" });
23
+ }
24
+ } catch (error) {
25
+ console.error("Error in package installation:", error);
26
+ process.exit(1);
27
+ }
28
+ //#endregion
29
+ export {};
30
+
31
+ //# sourceMappingURL=install-packages.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"install-packages.mjs","names":[],"sources":["../src/install-packages.mts"],"sourcesContent":["import { execSync } from \"child_process\";\nimport fs from \"fs\";\nimport path from \"path\";\n\n// Define interface for package.json\ninterface PackageJson {\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n}\n\n// Get the list of packages to install from the environment variable\nconst pkgs = process.env.PH_PACKAGES?.split(\",\") || [];\n\n// Skip if no packages to install\nif (pkgs.length === 0 || (pkgs.length === 1 && pkgs[0] === \"\")) {\n process.exit(0);\n}\n\ntry {\n // Read the package.json file to check existing dependencies\n const packageJsonPath = path.join(process.cwd(), \"package.json\");\n const packageJsonContent = fs.readFileSync(packageJsonPath, \"utf-8\");\n const packageJson = JSON.parse(packageJsonContent) as PackageJson;\n\n // Get all installed dependencies\n const installedDependencies: Record<string, string> = {\n ...(packageJson.dependencies || {}),\n ...(packageJson.devDependencies || {}),\n };\n\n for (const pkg of pkgs) {\n if (pkg === \"\") continue;\n\n // Check if the package is already installed\n if (installedDependencies[pkg]) {\n console.log(`> Package ${pkg} is already installed, skipping`);\n continue;\n }\n\n console.log(`> Installing ${pkg}`);\n execSync(`pnpm add ${pkg}@latest`, { stdio: \"inherit\" });\n }\n} catch (error) {\n console.error(\"Error in package installation:\", error);\n process.exit(1);\n}\n"],"mappings":";;;;AAWA,MAAM,OAAO,QAAQ,IAAI,aAAa,MAAM,IAAI,IAAI,EAAE;AAGtD,IAAI,KAAK,WAAW,KAAM,KAAK,WAAW,KAAK,KAAK,OAAO,GACzD,SAAQ,KAAK,EAAE;AAGjB,IAAI;CAEF,MAAM,kBAAkB,KAAK,KAAK,QAAQ,KAAK,EAAE,eAAe;CAChE,MAAM,qBAAqB,GAAG,aAAa,iBAAiB,QAAQ;CACpE,MAAM,cAAc,KAAK,MAAM,mBAAmB;CAGlD,MAAM,wBAAgD;EACpD,GAAI,YAAY,gBAAgB,EAAE;EAClC,GAAI,YAAY,mBAAmB,EAAE;EACtC;AAED,MAAK,MAAM,OAAO,MAAM;AACtB,MAAI,QAAQ,GAAI;AAGhB,MAAI,sBAAsB,MAAM;AAC9B,WAAQ,IAAI,aAAa,IAAI,iCAAiC;AAC9D;;AAGF,UAAQ,IAAI,gBAAgB,MAAM;AAClC,WAAS,YAAY,IAAI,UAAU,EAAE,OAAO,WAAW,CAAC;;SAEnD,OAAO;AACd,SAAQ,MAAM,kCAAkC,MAAM;AACtD,SAAQ,KAAK,EAAE"}
@@ -0,0 +1 @@
1
+ export { };
@@ -0,0 +1,55 @@
1
+ #!/usr/bin/env node
2
+ import { getConfig } from "@powerhousedao/config/node";
3
+ import { REACTOR_SCHEMA, getMigrationStatus, runMigrations } from "@powerhousedao/reactor";
4
+ import { Kysely, PostgresDialect } from "kysely";
5
+ import { Pool } from "pg";
6
+ //#region src/migrate.mts
7
+ function isPostgresUrl(url) {
8
+ return url.startsWith("postgresql://") || url.startsWith("postgres://");
9
+ }
10
+ async function main() {
11
+ const command = process.argv[2];
12
+ const config = getConfig();
13
+ const dbPath = process.env.PH_REACTOR_DATABASE_URL ?? process.env.DATABASE_URL ?? config.switchboard?.database?.url;
14
+ if (!dbPath || !isPostgresUrl(dbPath)) {
15
+ console.log("No PostgreSQL URL configured. Skipping migrations.");
16
+ console.log("(PGlite migrations are handled automatically on startup)");
17
+ return;
18
+ }
19
+ console.log(`Database: ${dbPath}`);
20
+ const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool({ connectionString: dbPath }) }) });
21
+ try {
22
+ if (command === "status") {
23
+ console.log("\nChecking migration status...");
24
+ const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);
25
+ console.log("\nMigration Status:");
26
+ console.log("=================");
27
+ for (const migration of migrations) {
28
+ const status = migration.executedAt ? `[OK] Executed at ${migration.executedAt.toISOString()}` : "[--] Pending";
29
+ console.log(`${status} - ${migration.name}`);
30
+ }
31
+ } else {
32
+ console.log("\nRunning migrations...");
33
+ const result = await runMigrations(db, REACTOR_SCHEMA);
34
+ if (!result.success) {
35
+ console.error("Migration failed:", result.error?.message);
36
+ process.exit(1);
37
+ }
38
+ if (result.migrationsExecuted.length === 0) console.log("No migrations to run - database is up to date");
39
+ else {
40
+ console.log(`Successfully executed ${result.migrationsExecuted.length} migration(s):`);
41
+ for (const name of result.migrationsExecuted) console.log(` - ${name}`);
42
+ }
43
+ }
44
+ } catch (error) {
45
+ console.error("Error:", error instanceof Error ? error.message : String(error));
46
+ process.exit(1);
47
+ } finally {
48
+ await db.destroy();
49
+ }
50
+ }
51
+ main();
52
+ //#endregion
53
+ export {};
54
+
55
+ //# sourceMappingURL=migrate.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"migrate.mjs","names":[],"sources":["../src/migrate.mts"],"sourcesContent":["#!/usr/bin/env node\nimport { Kysely, PostgresDialect } from \"kysely\";\nimport { Pool } from \"pg\";\nimport {\n runMigrations,\n getMigrationStatus,\n REACTOR_SCHEMA,\n} from \"@powerhousedao/reactor\";\nimport { getConfig } from \"@powerhousedao/config/node\";\n\nfunction isPostgresUrl(url: string): boolean {\n return url.startsWith(\"postgresql://\") || url.startsWith(\"postgres://\");\n}\n\nasync function main() {\n const command = process.argv[2];\n const config = getConfig();\n\n const dbPath =\n process.env.PH_REACTOR_DATABASE_URL ??\n process.env.DATABASE_URL ??\n config.switchboard?.database?.url;\n\n if (!dbPath || !isPostgresUrl(dbPath)) {\n console.log(\"No PostgreSQL URL configured. Skipping migrations.\");\n console.log(\"(PGlite migrations are handled automatically on startup)\");\n return;\n }\n\n console.log(`Database: ${dbPath}`);\n\n const pool = new Pool({ connectionString: dbPath });\n\n const db = new Kysely<any>({\n dialect: new PostgresDialect({ pool }),\n });\n\n try {\n if (command === \"status\") {\n console.log(\"\\nChecking migration status...\");\n const migrations = await getMigrationStatus(db, REACTOR_SCHEMA);\n\n console.log(\"\\nMigration Status:\");\n console.log(\"=================\");\n\n for (const migration of migrations) {\n const status = migration.executedAt\n ? `[OK] Executed at ${migration.executedAt.toISOString()}`\n : \"[--] Pending\";\n console.log(`${status} - ${migration.name}`);\n }\n } else {\n console.log(\"\\nRunning migrations...\");\n const result = await runMigrations(db, REACTOR_SCHEMA);\n\n if (!result.success) {\n console.error(\"Migration failed:\", result.error?.message);\n process.exit(1);\n }\n\n if (result.migrationsExecuted.length === 0) {\n console.log(\"No migrations to run - database is up to date\");\n } else {\n console.log(\n `Successfully executed ${result.migrationsExecuted.length} migration(s):`,\n );\n for (const name of result.migrationsExecuted) {\n console.log(` - ${name}`);\n }\n }\n }\n } catch (error) {\n console.error(\n \"Error:\",\n error instanceof Error ? error.message : String(error),\n );\n process.exit(1);\n } finally {\n await db.destroy();\n }\n}\n\nvoid main();\n"],"mappings":";;;;;;AAUA,SAAS,cAAc,KAAsB;AAC3C,QAAO,IAAI,WAAW,gBAAgB,IAAI,IAAI,WAAW,cAAc;;AAGzE,eAAe,OAAO;CACpB,MAAM,UAAU,QAAQ,KAAK;CAC7B,MAAM,SAAS,WAAW;CAE1B,MAAM,SACJ,QAAQ,IAAI,2BACZ,QAAQ,IAAI,gBACZ,OAAO,aAAa,UAAU;AAEhC,KAAI,CAAC,UAAU,CAAC,cAAc,OAAO,EAAE;AACrC,UAAQ,IAAI,qDAAqD;AACjE,UAAQ,IAAI,2DAA2D;AACvE;;AAGF,SAAQ,IAAI,aAAa,SAAS;CAIlC,MAAM,KAAK,IAAI,OAAY,EACzB,SAAS,IAAI,gBAAgB,EAAE,MAHpB,IAAI,KAAK,EAAE,kBAAkB,QAAQ,CAAC,EAGZ,CAAC,EACvC,CAAC;AAEF,KAAI;AACF,MAAI,YAAY,UAAU;AACxB,WAAQ,IAAI,iCAAiC;GAC7C,MAAM,aAAa,MAAM,mBAAmB,IAAI,eAAe;AAE/D,WAAQ,IAAI,sBAAsB;AAClC,WAAQ,IAAI,oBAAoB;AAEhC,QAAK,MAAM,aAAa,YAAY;IAClC,MAAM,SAAS,UAAU,aACrB,oBAAoB,UAAU,WAAW,aAAa,KACtD;AACJ,YAAQ,IAAI,GAAG,OAAO,KAAK,UAAU,OAAO;;SAEzC;AACL,WAAQ,IAAI,0BAA0B;GACtC,MAAM,SAAS,MAAM,cAAc,IAAI,eAAe;AAEtD,OAAI,CAAC,OAAO,SAAS;AACnB,YAAQ,MAAM,qBAAqB,OAAO,OAAO,QAAQ;AACzD,YAAQ,KAAK,EAAE;;AAGjB,OAAI,OAAO,mBAAmB,WAAW,EACvC,SAAQ,IAAI,gDAAgD;QACvD;AACL,YAAQ,IACN,yBAAyB,OAAO,mBAAmB,OAAO,gBAC3D;AACD,SAAK,MAAM,QAAQ,OAAO,mBACxB,SAAQ,IAAI,OAAO,OAAO;;;UAIzB,OAAO;AACd,UAAQ,MACN,UACA,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;AACD,UAAQ,KAAK,EAAE;WACP;AACR,QAAM,GAAG,SAAS;;;AAIjB,MAAM"}