@aictrl/hush 0.1.0 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,55 @@
1
+ /**
2
+ * Mock ZhipuAI upstream server for E2E testing.
3
+ * Captures the request body sent by the Hush gateway so we can verify PII was redacted.
4
+ */
5
+ import http from 'node:http';
6
+ import fs from 'node:fs';
7
+
8
+ const PORT = parseInt(process.env.MOCK_PORT || '4111');
9
+ const CAPTURE_FILE = process.env.CAPTURE_FILE || '/tmp/hush-e2e-captured-body.json';
10
+
11
+ const server = http.createServer((req, res) => {
12
+ if (req.method === 'POST' && req.url === '/api/paas/v4/chat/completions') {
13
+ let body = '';
14
+ req.on('data', chunk => { body += chunk; });
15
+ req.on('end', () => {
16
+ // Save the captured request body for later verification
17
+ fs.writeFileSync(CAPTURE_FILE, body);
18
+
19
+ const parsed = JSON.parse(body);
20
+ // Echo back the last user message content so we can verify rehydration
21
+ const lastMessage = parsed.messages?.[parsed.messages.length - 1]?.content || '';
22
+
23
+ res.writeHead(200, { 'Content-Type': 'application/json' });
24
+ res.end(JSON.stringify({
25
+ id: 'chatcmpl-e2e-mock-001',
26
+ model: 'glm-5',
27
+ choices: [{
28
+ index: 0,
29
+ message: { role: 'assistant', content: `Echoing back: ${lastMessage}` },
30
+ finish_reason: 'stop'
31
+ }],
32
+ usage: { prompt_tokens: 50, completion_tokens: 20, total_tokens: 70 }
33
+ }));
34
+ });
35
+ } else if (req.method === 'GET' && req.url === '/captured') {
36
+ try {
37
+ const captured = fs.readFileSync(CAPTURE_FILE, 'utf8');
38
+ res.writeHead(200, { 'Content-Type': 'application/json' });
39
+ res.end(captured);
40
+ } catch {
41
+ res.writeHead(404);
42
+ res.end('{}');
43
+ }
44
+ } else if (req.method === 'GET' && req.url === '/health') {
45
+ res.writeHead(200, { 'Content-Type': 'application/json' });
46
+ res.end(JSON.stringify({ status: 'mock-running' }));
47
+ } else {
48
+ res.writeHead(404);
49
+ res.end('Not found');
50
+ }
51
+ });
52
+
53
+ server.listen(PORT, '127.0.0.1', () => {
54
+ console.log(`Mock ZhipuAI upstream listening on http://127.0.0.1:${PORT}`);
55
+ });
@@ -0,0 +1,217 @@
1
+ #!/usr/bin/env bash
2
+ #
3
+ # E2E test: Verify Hush gateway intercepts PII from OpenCode/GLM-5 requests
4
+ #
5
+ # This script:
6
+ # 1. Starts a mock ZhipuAI upstream that captures the request body
7
+ # 2. Starts a Hush gateway harness pointed at the mock upstream
8
+ # 3. Sends a GLM-5 chat completion request containing PII through the gateway
9
+ # 4. Verifies PII was redacted in the request that reached the mock upstream
10
+ # 5. Verifies the response was rehydrated back to original PII
11
+ # 6. Verifies the vault captured tokens (via /health endpoint)
12
+ #
13
+ # Usage: ./scripts/e2e-opencode.sh
14
+ # Requirements: node, npm (dependencies must be installed)
15
+
16
+ set -euo pipefail
17
+
18
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
19
+ PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
20
+
21
+ # Colors
22
+ RED='\033[0;31m'
23
+ GREEN='\033[0;32m'
24
+ YELLOW='\033[1;33m'
25
+ CYAN='\033[0;36m'
26
+ NC='\033[0m'
27
+
28
+ MOCK_PORT=4111
29
+ GATEWAY_PORT=4222
30
+ MOCK_PID=""
31
+ GATEWAY_PID=""
32
+ PASS_COUNT=0
33
+ FAIL_COUNT=0
34
+ CAPTURE_FILE="/tmp/hush-e2e-captured-body.json"
35
+
36
+ cleanup() {
37
+ echo ""
38
+ echo -e "${CYAN}Cleaning up...${NC}"
39
+ [ -n "$MOCK_PID" ] && kill "$MOCK_PID" 2>/dev/null || true
40
+ [ -n "$GATEWAY_PID" ] && kill "$GATEWAY_PID" 2>/dev/null || true
41
+ rm -f "$CAPTURE_FILE"
42
+ wait 2>/dev/null || true
43
+ }
44
+ trap cleanup EXIT
45
+
46
+ pass() {
47
+ PASS_COUNT=$((PASS_COUNT + 1))
48
+ echo -e " ${GREEN}PASS${NC} $1"
49
+ }
50
+
51
+ fail() {
52
+ FAIL_COUNT=$((FAIL_COUNT + 1))
53
+ echo -e " ${RED}FAIL${NC} $1"
54
+ }
55
+
56
+ assert_contains() {
57
+ local haystack="$1" needle="$2" msg="$3"
58
+ if echo "$haystack" | grep -qF "$needle"; then
59
+ pass "$msg"
60
+ else
61
+ fail "$msg (expected to find '$needle')"
62
+ fi
63
+ }
64
+
65
+ assert_not_contains() {
66
+ local haystack="$1" needle="$2" msg="$3"
67
+ if echo "$haystack" | grep -qF "$needle"; then
68
+ fail "$msg (found '$needle' which should have been redacted)"
69
+ else
70
+ pass "$msg"
71
+ fi
72
+ }
73
+
74
+ wait_for_port() {
75
+ local port=$1 label=$2 max_attempts=${3:-20}
76
+ for i in $(seq 1 "$max_attempts"); do
77
+ if curl -sf "http://127.0.0.1:${port}/health" > /dev/null 2>&1; then
78
+ return 0
79
+ fi
80
+ sleep 0.5
81
+ done
82
+ echo -e "${RED}${label} failed to start on :${port}${NC}"
83
+ return 1
84
+ }
85
+
86
+ echo -e "${CYAN}================================================${NC}"
87
+ echo -e "${CYAN} Hush Gateway E2E: OpenCode + GLM-5 PII Test ${NC}"
88
+ echo -e "${CYAN}================================================${NC}"
89
+ echo ""
90
+
91
+ cd "$PROJECT_DIR"
92
+
93
+ # --- Step 1: Start mock ZhipuAI upstream ---
94
+ echo -e "${YELLOW}[1/5] Starting mock ZhipuAI upstream on :${MOCK_PORT}...${NC}"
95
+
96
+ MOCK_PORT=$MOCK_PORT CAPTURE_FILE=$CAPTURE_FILE node scripts/e2e-mock-upstream.mjs &
97
+ MOCK_PID=$!
98
+ sleep 1
99
+
100
+ if ! kill -0 "$MOCK_PID" 2>/dev/null; then
101
+ echo -e "${RED}Mock upstream failed to start${NC}"
102
+ exit 1
103
+ fi
104
+ echo -e " Mock upstream PID: ${MOCK_PID}"
105
+
106
+ # --- Step 2: Start Hush gateway (E2E harness pointing at mock) ---
107
+ echo -e "${YELLOW}[2/5] Starting Hush gateway on :${GATEWAY_PORT} -> mock :${MOCK_PORT}...${NC}"
108
+
109
+ GATEWAY_PORT=$GATEWAY_PORT MOCK_PORT=$MOCK_PORT npx tsx scripts/e2e-gateway-harness.ts &
110
+ GATEWAY_PID=$!
111
+
112
+ wait_for_port "$GATEWAY_PORT" "Gateway" || exit 1
113
+ echo -e " Gateway PID: ${GATEWAY_PID}"
114
+
115
+ # --- Step 3: Send a GLM-5 request with PII through the gateway ---
116
+ echo -e "${YELLOW}[3/5] Sending GLM-5 chat completion with PII through gateway...${NC}"
117
+
118
+ # These are the PII values we'll send (mimicking what an OpenCode session would contain)
119
+ PII_EMAIL="testuser@example-corp.com"
120
+ PII_IP="10.42.99.7"
121
+ PII_SECRET="api_key=secret_test_a1b2c3d4e5f6g7h8i9j0k1l2"
122
+
123
+ RESPONSE=$(curl -sf -X POST "http://127.0.0.1:${GATEWAY_PORT}/api/paas/v4/chat/completions" \
124
+ -H "Content-Type: application/json" \
125
+ -H "Authorization: Bearer test-e2e-key" \
126
+ -d "{
127
+ \"model\": \"glm-5\",
128
+ \"messages\": [{
129
+ \"role\": \"user\",
130
+ \"content\": \"My email is ${PII_EMAIL} and server IP is ${PII_IP}. Credentials: ${PII_SECRET}\"
131
+ }]
132
+ }")
133
+
134
+ echo -e " Response received ($(echo "$RESPONSE" | wc -c) bytes)"
135
+
136
+ # --- Step 4: Verify PII interception ---
137
+ echo ""
138
+ echo -e "${YELLOW}[4/5] Verifying PII interception...${NC}"
139
+ echo ""
140
+
141
+ # 4a. Check what the mock upstream received
142
+ CAPTURED=$(curl -sf "http://127.0.0.1:${MOCK_PORT}/captured" || echo "{}")
143
+
144
+ echo -e " ${CYAN}What ZhipuAI upstream received (should have tokens, NOT real PII):${NC}"
145
+ echo " $(echo "$CAPTURED" | python3 -m json.tool 2>/dev/null | head -20 || echo "$CAPTURED" | head -c 600)"
146
+ echo ""
147
+
148
+ # Verify PII was REDACTED in upstream request
149
+ assert_not_contains "$CAPTURED" "$PII_EMAIL" "Email NOT sent to ZhipuAI upstream"
150
+ assert_not_contains "$CAPTURED" "$PII_IP" "IP address NOT sent to ZhipuAI upstream"
151
+
152
+ # Verify tokens were substituted (format-agnostic: matches [USER_EMAIL_1] or [HUSH_EML_*] etc.)
153
+ if echo "$CAPTURED" | grep -qE '\[.*EMAIL'; then
154
+ pass "Email replaced with redaction token"
155
+ else
156
+ fail "Email replaced with redaction token (no EMAIL token found)"
157
+ fi
158
+
159
+ if echo "$CAPTURED" | grep -qE '\[.*IP'; then
160
+ pass "IP replaced with redaction token"
161
+ else
162
+ fail "IP replaced with redaction token (no IP token found)"
163
+ fi
164
+
165
+ if echo "$CAPTURED" | grep -qE '\[.*SECRET'; then
166
+ pass "Secret replaced with redaction token"
167
+ else
168
+ fail "Secret replaced with redaction token (no SECRET token found)"
169
+ fi
170
+
171
+ echo ""
172
+
173
+ # 4b. Check gateway response (should contain REHYDRATED original PII)
174
+ echo -e " ${CYAN}What the client (OpenCode) receives back (should have original PII):${NC}"
175
+ ASSISTANT_CONTENT=$(echo "$RESPONSE" | python3 -c "
176
+ import sys, json
177
+ data = json.load(sys.stdin)
178
+ print(data['choices'][0]['message']['content'])
179
+ " 2>/dev/null || echo "$RESPONSE")
180
+ echo " ${ASSISTANT_CONTENT:0:300}"
181
+ echo ""
182
+
183
+ assert_contains "$ASSISTANT_CONTENT" "$PII_EMAIL" "Email rehydrated in response to client"
184
+ assert_contains "$ASSISTANT_CONTENT" "$PII_IP" "IP address rehydrated in response to client"
185
+
186
+ echo ""
187
+
188
+ # 4c. Check vault via /health endpoint
189
+ HEALTH=$(curl -sf "http://127.0.0.1:${GATEWAY_PORT}/health")
190
+ VAULT_SIZE=$(echo "$HEALTH" | python3 -c "import sys, json; print(json.load(sys.stdin).get('vaultSize', 0))")
191
+ echo -e " ${CYAN}Gateway vault size: ${VAULT_SIZE}${NC}"
192
+
193
+ if [ "$VAULT_SIZE" -gt 0 ]; then
194
+ pass "Vault contains ${VAULT_SIZE} token(s) - PII intercepted and stored"
195
+ else
196
+ fail "Vault is empty (expected > 0 tokens)"
197
+ fi
198
+
199
+ # --- Step 5: Summary ---
200
+ echo ""
201
+ echo -e "${CYAN}================================================${NC}"
202
+ TOTAL=$((PASS_COUNT + FAIL_COUNT))
203
+ if [ "$FAIL_COUNT" -eq 0 ]; then
204
+ echo -e "${GREEN} ALL ${TOTAL} CHECKS PASSED${NC}"
205
+ echo ""
206
+ echo -e " ${GREEN}PII was intercepted by Hush gateway before${NC}"
207
+ echo -e " ${GREEN}reaching the ZhipuAI/GLM-5 upstream server.${NC}"
208
+ echo -e " ${GREEN}Client received rehydrated original values.${NC}"
209
+ echo ""
210
+ echo -e " This confirms OpenCode + GLM-5 is safe to use"
211
+ echo -e " through the Hush Semantic Security Gateway."
212
+ else
213
+ echo -e "${RED} ${FAIL_COUNT}/${TOTAL} CHECKS FAILED${NC}"
214
+ fi
215
+ echo -e "${CYAN}================================================${NC}"
216
+
217
+ exit "$FAIL_COUNT"
package/src/cli.ts ADDED
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env node
2
+ import { app } from './index.js';
3
+ import { createLogger } from './lib/logger.js';
4
+
5
+ const log = createLogger('hush-cli');
6
+ const PORT = process.env.PORT || 4000;
7
+
8
+ const server = app.listen(PORT, () => {
9
+ log.info(`Hush Semantic Gateway is listening on http://localhost:${PORT}`);
10
+ log.info(`Routes: /v1/messages → Anthropic, /v1/chat/completions → OpenAI, /api/paas/v4/** → ZhipuAI, * → Google`);
11
+ });
12
+
13
+ server.on('error', (err: NodeJS.ErrnoException) => {
14
+ if (err.code === 'EADDRINUSE') {
15
+ log.error(`Port ${PORT} is already in use. Stop the other process or use PORT=<number> hush`);
16
+ } else {
17
+ log.error({ err }, 'Failed to start server');
18
+ }
19
+ process.exit(1);
20
+ });
package/src/index.ts ADDED
@@ -0,0 +1,261 @@
1
+ import express from 'express';
2
+ import cors from 'cors';
3
+ import { createLogger } from './lib/logger.js';
4
+ import { Redactor } from './middleware/redactor.js';
5
+ import { TokenVault } from './vault/token-vault.js';
6
+ import { Dashboard } from './lib/dashboard.js';
7
+
8
+ const log = createLogger('hush-proxy');
9
+ const redactor = new Redactor();
10
+ const vault = new TokenVault();
11
+
12
+ // Lazy-initialize dashboard to ensure it captures flags set by CLI or ENV
13
+ let _dashboard: Dashboard | null = null;
14
+ function getDashboard(): Dashboard | null {
15
+ if (_dashboard) return _dashboard;
16
+ if (process.env.HUSH_DASHBOARD === 'true' || process.argv.includes('--dashboard')) {
17
+ _dashboard = new Dashboard();
18
+ }
19
+ return _dashboard;
20
+ }
21
+
22
+ // Force immediate initialization if dashboard flag is present
23
+ getDashboard();
24
+
25
+ export const app = express();
26
+
27
+ // Security: Bind only to localhost by default to prevent network exposure
28
+ const BIND_ADDRESS = process.env.HUSH_HOST || '127.0.0.1';
29
+
30
+ // Security: Optional Bearer Token for the proxy itself
31
+ const HUSH_TOKEN = process.env.HUSH_AUTH_TOKEN;
32
+
33
+ app.use(cors({ origin: 'http://localhost' })); // Restrict CORS
34
+ app.use(express.json({ limit: '50mb' }));
35
+
36
+ /**
37
+ * Security Middleware: Local Proxy Authentication
38
+ */
39
+ app.use((req, res, next) => {
40
+ if (req.path === '/health') return next();
41
+
42
+ if (HUSH_TOKEN) {
43
+ const authHeader = req.headers['x-hush-token'] || req.headers['authorization'];
44
+ const providedToken = Array.isArray(authHeader) ? authHeader[0] : authHeader;
45
+
46
+ if (!providedToken || (providedToken !== HUSH_TOKEN && providedToken !== `Bearer ${HUSH_TOKEN}`)) {
47
+ log.warn({ ip: req.ip }, 'Unauthorized access attempt');
48
+ return res.status(401).json({ error: 'Unauthorized: Invalid HUSH_AUTH_TOKEN' });
49
+ }
50
+ }
51
+ next();
52
+ });
53
+
54
+ /**
55
+ * Helper to handle proxying with optional streaming
56
+ */
57
+ async function proxyRequest(
58
+ req: express.Request,
59
+ res: express.Response,
60
+ targetUrl: string,
61
+ headers: Record<string, string>
62
+ ) {
63
+ const startTime = performance.now();
64
+ const dashboard = getDashboard();
65
+
66
+ const hasBody = ['POST', 'PUT', 'PATCH'].includes(req.method);
67
+
68
+ // 1. Redact Request Body (Prompts, Tool Results) — only for methods with a body
69
+ let redactedBody: any;
70
+ let tokens = new Map<string, string>();
71
+ let hasRedacted = false;
72
+
73
+ if (hasBody) {
74
+ const result = redactor.redact(req.body);
75
+ redactedBody = result.content;
76
+ tokens = result.tokens;
77
+ hasRedacted = result.hasRedacted;
78
+ }
79
+
80
+ const redactionDuration = Math.round(performance.now() - startTime);
81
+
82
+ // Log all requests to dashboard
83
+ if (dashboard) {
84
+ dashboard.logRequest(req.path, redactionDuration);
85
+ }
86
+
87
+ if (hasRedacted) {
88
+ log.info({ path: req.path, tokenCount: tokens.size, duration: redactionDuration }, 'Redacted sensitive data from request');
89
+ vault.saveTokens(tokens);
90
+
91
+ // Log redaction events
92
+ if (dashboard) {
93
+ tokens.forEach((value, token) => {
94
+ const type = token.split('_')[1] ?? 'UNK'; // Extract type from [HUSH_TYPE_ID]
95
+ dashboard!.logRedaction(type, token);
96
+ });
97
+ }
98
+ }
99
+
100
+ try {
101
+ const fetchHeaders: Record<string, string> = { ...headers };
102
+ if (hasBody) fetchHeaders['Content-Type'] = 'application/json';
103
+
104
+ const response = await fetch(targetUrl, {
105
+ method: req.method,
106
+ headers: fetchHeaders,
107
+ body: hasBody ? JSON.stringify(redactedBody) : undefined,
108
+ signal: AbortSignal.timeout(30000), // 30s timeout
109
+ });
110
+
111
+ // Handle Upstream Errors (4xx, 5xx)
112
+ if (!response.ok) {
113
+ log.error({ status: response.status, path: req.path }, 'Upstream provider returned an error');
114
+ const errorData = await response.text();
115
+ return res.status(response.status).send(errorData);
116
+ }
117
+
118
+ // Case A: Streaming
119
+ const isStreaming = req.body?.stream === true || req.body?.stream === 'true';
120
+ if (isStreaming && response.body) {
121
+ log.info({ path: req.path }, 'Starting stream proxy');
122
+ res.setHeader('Content-Type', 'text/event-stream');
123
+ res.setHeader('Cache-Control', 'no-cache');
124
+ res.setHeader('Connection', 'keep-alive');
125
+
126
+ const reader = response.body.getReader();
127
+ const decoder = new TextDecoder();
128
+
129
+ // Security: Use stateful rehydrator to handle tokens split across chunks
130
+ const rehydrateChunk = vault.createStreamingRehydrator();
131
+
132
+ try {
133
+ while (true) {
134
+ const { done, value } = await reader.read();
135
+ if (done) break;
136
+
137
+ const chunk = decoder.decode(value, { stream: true });
138
+ const rehydratedChunk = rehydrateChunk(chunk);
139
+
140
+ if (rehydratedChunk) {
141
+ const canWrite = res.write(rehydratedChunk);
142
+ // Handle backpressure
143
+ if (!canWrite) {
144
+ await new Promise((resolve) => res.once('drain', resolve));
145
+ }
146
+ }
147
+ }
148
+ } finally {
149
+ reader.releaseLock();
150
+ }
151
+ res.end();
152
+ return;
153
+ }
154
+
155
+ // Case B: Regular JSON
156
+ const data = await response.json();
157
+ const rehydratedData = vault.rehydrate(data);
158
+ res.status(response.status).json(rehydratedData);
159
+
160
+ } catch (error) {
161
+ log.error({ err: error, path: req.path }, 'Failed to forward request');
162
+ res.status(500).json({ error: 'Gateway forwarding failed' });
163
+ }
164
+ }
165
+
166
+ /**
167
+ * Handle Anthropic /messages proxy
168
+ */
169
+ app.post('/v1/messages', async (req, res) => {
170
+ const apiKey = req.headers['x-api-key'];
171
+ const auth = req.headers['authorization'];
172
+ if (!apiKey && !auth) return res.status(401).json({ error: 'Missing Anthropic API Key or Authorization header' });
173
+
174
+ const headers: Record<string, string> = {
175
+ 'anthropic-version': req.headers['anthropic-version'] as string || '2023-06-01',
176
+ };
177
+ if (req.headers['anthropic-beta']) headers['anthropic-beta'] = req.headers['anthropic-beta'] as string;
178
+ if (apiKey) headers['x-api-key'] = apiKey as string;
179
+ if (auth) headers['Authorization'] = auth as string;
180
+
181
+ await proxyRequest(req, res, 'https://api.anthropic.com/v1/messages', headers);
182
+ });
183
+
184
+ /**
185
+ * Handle OpenAI /chat/completions proxy
186
+ */
187
+ app.post('/v1/chat/completions', async (req, res) => {
188
+ const auth = req.headers['authorization'];
189
+ if (!auth) return res.status(401).json({ error: 'Missing OpenAI Authorization' });
190
+
191
+ await proxyRequest(req, res, 'https://api.openai.com/v1/chat/completions', {
192
+ 'Authorization': auth as string,
193
+ });
194
+ });
195
+
196
+ /**
197
+ * Handle ZhipuAI GLM API proxy (OpenCode + GLM-5)
198
+ * Supports both regular and coding plan endpoints:
199
+ * /api/paas/v4/chat/completions → https://api.z.ai/api/paas/v4/chat/completions
200
+ * /api/coding/paas/v4/chat/completions → https://api.z.ai/api/coding/paas/v4/chat/completions
201
+ */
202
+ app.post('/api/paas/v4/chat/completions', async (req, res) => {
203
+ const auth = req.headers['authorization'];
204
+ if (!auth) return res.status(401).json({ error: 'Missing ZhipuAI Authorization' });
205
+
206
+ await proxyRequest(req, res, 'https://api.z.ai/api/paas/v4/chat/completions', {
207
+ 'Authorization': auth as string,
208
+ });
209
+ });
210
+
211
+ app.post('/api/coding/paas/v4/chat/completions', async (req, res) => {
212
+ const auth = req.headers['authorization'];
213
+ if (!auth) return res.status(401).json({ error: 'Missing ZhipuAI Authorization' });
214
+
215
+ await proxyRequest(req, res, 'https://api.z.ai/api/coding/paas/v4/chat/completions', {
216
+ 'Authorization': auth as string,
217
+ });
218
+ });
219
+
220
+ /**
221
+ * Handle Google Gemini API proxy
222
+ * Supports: /v1beta/models/{model}:generateContent
223
+ */
224
+ app.post('/v1beta/models/:modelAndAction', async (req, res) => {
225
+ const apiKey = req.headers['x-goog-api-key'] || req.query.key;
226
+ if (!apiKey) return res.status(401).json({ error: 'Missing Google API Key' });
227
+
228
+ const targetUrl = `https://generativelanguage.googleapis.com/v1beta/models/${req.params.modelAndAction}${req.url.includes('?') ? '?' + req.url.split('?')[1] : ''}`;
229
+
230
+ await proxyRequest(req, res, targetUrl, {
231
+ 'x-goog-api-key': apiKey as string,
232
+ });
233
+ });
234
+
235
+ // Health check (must be before catch-all)
236
+ app.get('/health', (req, res) => {
237
+ const response: any = { status: 'running' };
238
+ if (process.env.DEBUG === 'true') {
239
+ response.vaultSize = vault.size;
240
+ }
241
+ res.json(response);
242
+ });
243
+
244
+ /**
245
+ * Catch-all Handler: Forward unmatched requests with redaction/rehydration.
246
+ * Uses HUSH_UPSTREAM if set, otherwise falls back to Google.
247
+ */
248
+ app.all('/*path', async (req, res) => {
249
+ const targetBase = 'https://generativelanguage.googleapis.com';
250
+ const targetUrl = `${targetBase}${req.url}`;
251
+
252
+ log.info({ path: req.path, method: req.method, upstream: targetBase }, 'Forwarding to upstream');
253
+
254
+ // Collect auth headers to pass through
255
+ const headers: Record<string, string> = {};
256
+ if (req.headers['authorization']) headers['Authorization'] = req.headers['authorization'] as string;
257
+ if (req.headers['x-api-key']) headers['x-api-key'] = req.headers['x-api-key'] as string;
258
+ if (req.headers['x-goog-api-key']) headers['x-goog-api-key'] = req.headers['x-goog-api-key'] as string;
259
+
260
+ await proxyRequest(req, res, targetUrl, headers);
261
+ });