droid-patch 0.8.2 → 0.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.mjs +4 -4
- package/dist/cli.mjs.map +1 -1
- package/package.json +1 -1
package/dist/cli.mjs
CHANGED
|
@@ -3213,8 +3213,8 @@ bin("droid-patch", "CLI tool to patch droid binary with various modifications").
|
|
|
3213
3213
|
patches.push({
|
|
3214
3214
|
name: "reasoningEffortValidationBypass",
|
|
3215
3215
|
description: "Bypass reasoning effort validation (allows xhigh in settings.json)",
|
|
3216
|
-
pattern: Buffer.from("
|
|
3217
|
-
replacement: Buffer.from("
|
|
3216
|
+
pattern: Buffer.from("T!==\"none\"&&T!==\"off\"&&!W.supportedReasoningEfforts.includes(T)"),
|
|
3217
|
+
replacement: Buffer.from("T!=\"none\"&&T!=\"off\"&&0&&W.supportedReasoningEfforts.includes(T)")
|
|
3218
3218
|
});
|
|
3219
3219
|
}
|
|
3220
3220
|
if (noTelemetry) {
|
|
@@ -3450,8 +3450,8 @@ bin("droid-patch", "CLI tool to patch droid binary with various modifications").
|
|
|
3450
3450
|
patches.push({
|
|
3451
3451
|
name: "reasoningEffortValidationBypass",
|
|
3452
3452
|
description: "Bypass reasoning effort validation (allows xhigh in settings.json)",
|
|
3453
|
-
pattern: Buffer.from("
|
|
3454
|
-
replacement: Buffer.from("
|
|
3453
|
+
pattern: Buffer.from("T!==\"none\"&&T!==\"off\"&&!W.supportedReasoningEfforts.includes(T)"),
|
|
3454
|
+
replacement: Buffer.from("T!=\"none\"&&T!=\"off\"&&0&&W.supportedReasoningEfforts.includes(T)")
|
|
3455
3455
|
});
|
|
3456
3456
|
}
|
|
3457
3457
|
if (meta.patches.noTelemetry) {
|
package/dist/cli.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cli.mjs","names":["sessionsScript: string | undefined","patches: Patch[]","unlink","metaList: Awaited<ReturnType<typeof loadAliasMetadata>>[]","readlink","symlink"],"sources":["../src/websearch-patch.ts","../src/statusline-patch.ts","../src/sessions-patch.ts","../src/cli.ts"],"sourcesContent":["import type { Patch } from \"./patcher.ts\";\nimport { writeFile, chmod, mkdir } from \"node:fs/promises\";\nimport { join } from \"node:path\";\nimport { existsSync } from \"node:fs\";\n\n/**\n * WebSearch Patch Generator\n *\n * Since injecting code directly into binary is complex (requires exact byte length matching),\n * we use a more practical approach:\n *\n * 1. --websearch option will:\n * a) Generate a standalone search proxy server script\n * b) Modify droid's API URL to point to local proxy (using --api-base)\n * c) Create a wrapper script to start both proxy and droid\n *\n * Environment variables:\n * - GOOGLE_PSE_API_KEY: Google Programmable Search Engine API Key\n * - GOOGLE_PSE_CX: Google Custom Search Engine ID\n * - If not set, will fallback to DuckDuckGo\n */\n\n/**\n * Generate search proxy server code\n */\nfunction generateSearchProxyServerCode(): string {\n return `#!/usr/bin/env node\n/**\n * Droid WebSearch Proxy Server\n * Auto-generated by droid-patch --websearch\n * \n * Supports:\n * - Google PSE (requires GOOGLE_PSE_API_KEY and GOOGLE_PSE_CX)\n * - DuckDuckGo (free fallback)\n */\n\nconst http = require('http');\nconst https = require('https');\n\nconst FACTORY_API = 'https://api.factory.ai';\n\n// Auto-find available port\nfunction findAvailablePort(startPort = 23119) {\n return new Promise((resolve, reject) => {\n const net = require('net');\n const server = net.createServer();\n \n server.listen(startPort, '127.0.0.1', () => {\n const port = server.address().port;\n server.close(() => resolve(port));\n });\n \n server.on('error', (err) => {\n if (err.code === 'EADDRINUSE') {\n // Port is in use, try next one\n resolve(findAvailablePort(startPort + 1));\n } else {\n reject(err);\n }\n });\n });\n}\n\nlet PORT = process.env.SEARCH_PROXY_PORT || 23119;\n\n// === Search Implementation ===\n\nasync function searchGooglePSE(query, numResults, apiKey, cx) {\n // Use curl command\n const { execSync } = require('child_process');\n \n const url = 'https://www.googleapis.com/customsearch/v1?key=' + apiKey + '&cx=' + cx + '&q=' + encodeURIComponent(query) + '&num=' + Math.min(numResults, 10);\n const curlCmd = \\`curl -s \"\\${url}\"\\`;\n \n try {\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n \n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n snippet: item.snippet,\n publishedDate: null,\n author: null,\n score: null\n }));\n } catch (e) {\n throw new Error('Google PSE error: ' + e.message);\n }\n}\n\nasync function searchDuckDuckGo(query, numResults) {\n // Use curl command, because Node.js fetch may have issues in some environments\n const { execSync } = require('child_process');\n\n // Method 1: Try using DuckDuckGo HTML lite version (via curl)\n try {\n const curlCmd = \\`curl -s -X POST \"https://lite.duckduckgo.com/lite/\" -H \"Content-Type: application/x-www-form-urlencoded\" -H \"User-Agent: Mozilla/5.0\" -d \"q=\\${encodeURIComponent(query)}\"\\`;\n const html = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n\n if (html && html.length > 1000) {\n const results = parseDDGLiteHTML(html, numResults);\n if (results.length > 0) {\n console.error('[search] DDG lite returned ' + results.length + ' results');\n return results;\n }\n }\n } catch (e) {\n console.error('[search] DDG lite (curl) failed:', e.message);\n }\n\n // Method 2: Fallback to Instant Answer API (via curl)\n try {\n const apiUrl = 'https://api.duckduckgo.com/?q=' + encodeURIComponent(query) + '&format=json&no_html=1&skip_disambig=1';\n const curlCmd = \\`curl -s \"\\${apiUrl}\" -H \"User-Agent: Mozilla/5.0\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n \n const results = [];\n\n if (data.Abstract && data.AbstractURL) {\n results.push({\n title: data.Heading || query,\n url: data.AbstractURL,\n snippet: data.Abstract,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n for (const topic of (data.RelatedTopics || [])) {\n if (results.length >= numResults) break;\n if (topic.Text && topic.FirstURL) {\n results.push({\n title: topic.Text.substring(0, 100),\n url: topic.FirstURL,\n snippet: topic.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n if (topic.Topics) {\n for (const st of topic.Topics) {\n if (results.length >= numResults) break;\n if (st.Text && st.FirstURL) {\n results.push({\n title: st.Text.substring(0, 100),\n url: st.FirstURL,\n snippet: st.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n }\n }\n }\n\n if (results.length > 0) {\n console.error('[search] DDG API returned ' + results.length + ' results');\n return results;\n }\n } catch (e) {\n console.error('[search] DDG API (curl) failed:', e.message);\n }\n\n return [];\n}\n\n// Parse DuckDuckGo Lite HTML\nfunction parseDDGLiteHTML(html, maxResults) {\n const results = [];\n\n // Match result links - DuckDuckGo Lite format\n // <a rel=\"nofollow\" href=\"URL\">TITLE</a>\n const linkRegex = /<a[^>]+rel=\"nofollow\"[^>]+href=\"([^\"]+)\"[^>]*>([^<]+)<\\\\/a>/gi;\n const snippetRegex = /<td[^>]*class=\"result-snippet\"[^>]*>([^<]*)<\\\\/td>/gi;\n\n const links = [];\n let match;\n\n // Extract all links\n while ((match = linkRegex.exec(html)) !== null && links.length < maxResults) {\n let url = match[1];\n // Skip DuckDuckGo internal links\n if (url.includes('duckduckgo.com') && !url.includes('uddg=')) continue;\n // Decode redirect URL\n if (url.includes('uddg=')) {\n const uddgMatch = url.match(/uddg=([^&]+)/);\n if (uddgMatch) url = decodeURIComponent(uddgMatch[1]);\n }\n links.push({\n url: url,\n title: decodeHTMLEntities(match[2].trim())\n });\n }\n\n // Extract snippets\n const snippets = [];\n while ((match = snippetRegex.exec(html)) !== null && snippets.length < maxResults) {\n snippets.push(decodeHTMLEntities(match[1].trim()));\n }\n\n // Combine results\n for (let i = 0; i < links.length && results.length < maxResults; i++) {\n results.push({\n title: links[i].title,\n url: links[i].url,\n snippet: snippets[i] || '',\n publishedDate: null,\n author: null,\n score: null\n });\n }\n \n return results;\n}\n\nfunction decodeHTMLEntities(str) {\n return str\n .replace(/&/g, '&')\n .replace(/</g, '<')\n .replace(/>/g, '>')\n .replace(/"/g, '\"')\n .replace(/'/g, \"'\")\n .replace(/ /g, ' ');\n}\n\nasync function search(query, numResults = 10) {\n const googleApiKey = process.env.GOOGLE_PSE_API_KEY;\n const googleCx = process.env.GOOGLE_PSE_CX;\n\n // Try Google PSE first\n if (googleApiKey && googleCx) {\n try {\n console.error('[search] Trying Google PSE...');\n const results = await searchGooglePSE(query, numResults, googleApiKey, googleCx);\n if (results.length > 0) {\n console.error('[search] Google PSE returned ' + results.length + ' results');\n return { results, source: 'google-pse' };\n }\n } catch (e) {\n console.error('[search] Google PSE failed:', e.message);\n }\n }\n\n // Fallback to DuckDuckGo\n try {\n console.error('[search] Using DuckDuckGo...');\n const results = await searchDuckDuckGo(query, numResults);\n console.error('[search] DuckDuckGo returned ' + results.length + ' results');\n return { results, source: 'duckduckgo' };\n } catch (e) {\n console.error('[search] DuckDuckGo failed:', e.message);\n }\n\n return { results: [], source: 'none' };\n}\n\n// === HTTP Server ===\n\nconst server = http.createServer(async (req, res) => {\n const url = new URL(req.url, 'http://' + req.headers.host);\n\n // Health check\n if (url.pathname === '/health') {\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ \n status: 'ok',\n google: !!(process.env.GOOGLE_PSE_API_KEY && process.env.GOOGLE_PSE_CX),\n duckduckgo: true\n }));\n return;\n }\n\n // Search endpoint\n if (url.pathname === '/api/tools/exa/search' && req.method === 'POST') {\n let body = '';\n req.on('data', chunk => body += chunk);\n req.on('end', async () => {\n try {\n const { query, numResults } = JSON.parse(body);\n console.error('[search] Query: \"' + query + '\"');\n \n const { results, source } = await search(query, numResults || 10);\n console.error('[search] ' + results.length + ' results from ' + source);\n \n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ results }));\n } catch (e) {\n console.error('[search] Error:', e);\n res.writeHead(500, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: String(e), results: [] }));\n }\n });\n return;\n }\n\n // Proxy other requests to Factory API\n console.error('[proxy] ' + req.method + ' ' + url.pathname);\n \n const proxyUrl = new URL(FACTORY_API + url.pathname + url.search);\n \n const proxyReq = https.request(proxyUrl, {\n method: req.method,\n headers: { ...req.headers, host: proxyUrl.host }\n }, proxyRes => {\n res.writeHead(proxyRes.statusCode, proxyRes.headers);\n proxyRes.pipe(res);\n });\n\n proxyReq.on('error', e => {\n console.error('[proxy] Error:', e.message);\n res.writeHead(502, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Proxy failed' }));\n });\n\n if (req.method !== 'GET' && req.method !== 'HEAD') {\n req.pipe(proxyReq);\n } else {\n proxyReq.end();\n }\n});\n\n// Start server (async, auto-find available port)\n(async () => {\n const fs = require('fs');\n const path = require('path');\n\n // If port not specified, auto-find available port\n if (!process.env.SEARCH_PROXY_PORT) {\n PORT = await findAvailablePort(23119);\n }\n\n server.listen(PORT, '127.0.0.1', () => {\n const hasGoogle = process.env.GOOGLE_PSE_API_KEY && process.env.GOOGLE_PSE_CX;\n\n // Write port number to temp file for wrapper script to read\n const portFile = process.env.SEARCH_PROXY_PORT_FILE || path.join(require('os').tmpdir(), 'droid-search-proxy-' + process.pid + '.port');\n fs.writeFileSync(portFile, PORT.toString());\n\n // Output port number to stdout (for parent process to capture)\n console.log('PORT=' + PORT);\n \n console.error('');\n console.error('╔═══════════════════════════════════════════════════════════════╗');\n console.error('║ Droid WebSearch Proxy ║');\n console.error('╠═══════════════════════════════════════════════════════════════╣');\n console.error('║ 🔍 Google PSE: ' + (hasGoogle ? 'Configured ✓' : 'Not set (set GOOGLE_PSE_API_KEY & CX)').padEnd(45) + '║');\n console.error('║ 🦆 DuckDuckGo: Always available ║');\n console.error('║ 🚀 Server: http://127.0.0.1:' + PORT + ' ║'.slice(0, 65) + '║');\n console.error('╚═══════════════════════════════════════════════════════════════╝');\n console.error('');\n });\n})();\n\n// Handle graceful shutdown\nprocess.on('SIGTERM', () => server.close());\nprocess.on('SIGINT', () => server.close());\n`;\n}\n\n/**\n * Generate wrapper script, auto-start proxy and droid\n */\nfunction generateWrapperScript(droidPath: string, proxyScriptPath: string): string {\n return `#!/bin/bash\n# Droid with WebSearch Proxy\n# Auto-generated by droid-patch --websearch\n\nPROXY_SCRIPT=\"${proxyScriptPath}\"\nDROID_BIN=\"${droidPath}\"\nPORT_FILE=\"/tmp/droid-search-proxy-$$.port\"\n\n# Start proxy and get dynamic port\nstart_proxy() {\n # Start proxy, capture output to get port\n SEARCH_PROXY_PORT_FILE=\"$PORT_FILE\" node \"$PROXY_SCRIPT\" &\n PROXY_PID=$!\n\n # Wait for proxy to start and get port\n for i in {1..20}; do\n if [ -f \"$PORT_FILE\" ]; then\n PORT=$(cat \"$PORT_FILE\")\n if curl -s \"http://127.0.0.1:$PORT/health\" > /dev/null 2>&1; then\n echo \"[websearch] Proxy started on port $PORT\"\n return 0\n fi\n fi\n sleep 0.2\n done\n\n echo \"[websearch] Failed to start proxy\"\n kill $PROXY_PID 2>/dev/null\n return 1\n}\n\n# Cleanup function\ncleanup() {\n [ -n \"$PROXY_PID\" ] && kill $PROXY_PID 2>/dev/null\n [ -f \"$PORT_FILE\" ] && rm -f \"$PORT_FILE\"\n}\ntrap cleanup EXIT\n\n# Start proxy\nif ! start_proxy; then\n exit 1\nfi\n\n# Run droid\nexport FACTORY_API_BASE_URL_OVERRIDE=\"http://127.0.0.1:$PORT\"\nexec \"$DROID_BIN\" \"$@\"\n`;\n}\n\n/**\n * Generate WebSearch Patch\n *\n * Since injecting code directly into binary is complex, we use the following strategy:\n * 1. Create proxy server script\n * 2. Modify API URL to point to local\n * 3. Return a combined patch\n */\nexport function generateWebSearchPatch(): Patch | null {\n // Return a URL replacement patch\n // Use local proxy port 23119 (idle port)\n const originalUrl = \"https://api.factory.ai\";\n const localUrl = \"http://127.0.0.1:23119\";\n\n // Need to pad to same length\n if (localUrl.length > originalUrl.length) {\n console.error(`[websearch] Local URL too long: ${localUrl.length} > ${originalUrl.length}`);\n return null;\n }\n\n const paddedUrl = localUrl.padEnd(originalUrl.length, \" \");\n\n return {\n name: \"webSearch\",\n description: `Replace API URL with local proxy (${localUrl})`,\n pattern: Buffer.from(originalUrl),\n replacement: Buffer.from(paddedUrl),\n };\n}\n\n/**\n * Create WebSearch proxy files\n */\nexport async function createWebSearchProxyFiles(\n outputDir: string,\n droidPath: string,\n aliasName: string,\n): Promise<{ proxyScript: string; wrapperScript: string }> {\n // Ensure directory exists\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const proxyScriptPath = join(outputDir, `${aliasName}-search-proxy.js`);\n const wrapperScriptPath = join(outputDir, `${aliasName}-with-search`);\n\n // Write proxy server script\n await writeFile(proxyScriptPath, generateSearchProxyServerCode());\n console.log(`[*] Created search proxy: ${proxyScriptPath}`);\n\n // Write wrapper script\n await writeFile(wrapperScriptPath, generateWrapperScript(droidPath, proxyScriptPath));\n await chmod(wrapperScriptPath, 0o755);\n console.log(`[*] Created wrapper script: ${wrapperScriptPath}`);\n\n return {\n proxyScript: proxyScriptPath,\n wrapperScript: wrapperScriptPath,\n };\n}\n\n/**\n * Get proxy server code (for export)\n */\nexport function getSearchProxyCode(): string {\n return generateSearchProxyServerCode();\n}\n\n/**\n * Generate Bun preload script\n * This script executes before droid main program, starts search proxy\n */\nfunction generatePreloadScript(): string {\n return `// Droid WebSearch Preload Script\n// Auto-generated by droid-patch --websearch-preload\n// Start search proxy before droid main program\n\nconst http = require('http');\nconst https = require('https');\nconst { execSync } = require('child_process');\n\nconst PORT = process.env.DROID_SEARCH_PORT || 23119;\nconst FACTORY_API = 'https://api.factory.ai';\n\n// Google PSE search\nasync function searchGooglePSE(query, num) {\n const apiKey = process.env.GOOGLE_PSE_API_KEY;\n const cx = process.env.GOOGLE_PSE_CX;\n if (!apiKey || !cx) return null;\n \n try {\n const url = \\`https://www.googleapis.com/customsearch/v1?key=\\${apiKey}&cx=\\${cx}&q=\\${encodeURIComponent(query)}&num=\\${Math.min(num, 10)}\\`;\n const res = await fetch(url);\n const data = await res.json();\n if (data.error) return null;\n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || ''\n }));\n } catch (e) {\n return null;\n }\n}\n\n// DuckDuckGo search (use curl for reliability)\nfunction searchDuckDuckGo(query, num) {\n try {\n const url = \\`https://api.duckduckgo.com/?q=\\${encodeURIComponent(query)}&format=json&no_html=1&skip_disambig=1\\`;\n const output = execSync(\\`curl -s \"\\${url}\"\\`, { encoding: 'utf8', timeout: 10000 });\n const data = JSON.parse(output);\n const results = [];\n\n if (data.AbstractText && data.AbstractURL) {\n results.push({ title: data.Heading || query, url: data.AbstractURL, content: data.AbstractText });\n }\n\n for (const t of (data.RelatedTopics || [])) {\n if (results.length >= num) break;\n if (t.Text && t.FirstURL) {\n results.push({ title: t.Text.split(' - ')[0], url: t.FirstURL, content: t.Text });\n }\n // Handle subcategories\n if (t.Topics) {\n for (const sub of t.Topics) {\n if (results.length >= num) break;\n if (sub.Text && sub.FirstURL) {\n results.push({ title: sub.Text.split(' - ')[0], url: sub.FirstURL, content: sub.Text });\n }\n }\n }\n }\n return results;\n } catch (e) {\n return [];\n }\n}\n\n// Search function\nasync function search(query, num) {\n // Try Google PSE first\n const googleResults = await searchGooglePSE(query, num);\n if (googleResults && googleResults.length > 0) {\n console.error('[preload-search] Using Google PSE');\n return googleResults;\n }\n\n // Fallback to DuckDuckGo\n console.error('[preload-search] Using DuckDuckGo');\n return searchDuckDuckGo(query, num);\n}\n\n// Check if port is already in use\nfunction isPortInUse(port) {\n try {\n execSync(\\`curl -s http://127.0.0.1:\\${port}/health\\`, { timeout: 1000 });\n return true;\n } catch {\n return false;\n }\n}\n\n// Skip if proxy already running\nif (isPortInUse(PORT)) {\n console.error(\\`[preload] Search proxy already running on port \\${PORT}\\`);\n} else {\n // Start proxy server\n const server = http.createServer(async (req, res) => {\n const url = new URL(req.url, \\`http://\\${req.headers.host}\\`);\n\n // Health check\n if (url.pathname === '/health') {\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ status: 'ok' }));\n return;\n }\n\n // Search endpoint\n if (url.pathname === '/api/tools/exa/search' && req.method === 'POST') {\n let body = '';\n req.on('data', c => body += c);\n req.on('end', async () => {\n try {\n const { query, numResults } = JSON.parse(body);\n console.error(\\`[preload-search] Query: \"\\${query}\"\\`);\n const results = await search(query, numResults || 10);\n console.error(\\`[preload-search] Found \\${results.length} results\\`);\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ results }));\n } catch (e) {\n console.error('[preload-search] Error:', e.message);\n res.writeHead(500, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: String(e), results: [] }));\n }\n });\n return;\n }\n\n // Proxy other requests to Factory API\n const proxyUrl = new URL(FACTORY_API + url.pathname + url.search);\n const proxyReq = https.request(proxyUrl, {\n method: req.method,\n headers: { ...req.headers, host: proxyUrl.host }\n }, proxyRes => {\n res.writeHead(proxyRes.statusCode, proxyRes.headers);\n proxyRes.pipe(res);\n });\n proxyReq.on('error', (e) => {\n console.error('[preload-proxy] Error:', e.message);\n res.writeHead(502, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Proxy failed' }));\n });\n if (req.method !== 'GET' && req.method !== 'HEAD') {\n req.pipe(proxyReq);\n } else {\n proxyReq.end();\n }\n });\n\n server.listen(PORT, '127.0.0.1', () => {\n console.error(\\`[preload] Search proxy started on http://127.0.0.1:\\${PORT}\\`);\n });\n}\n`;\n}\n\n/**\n * Generate bunfig.toml content\n */\nfunction generateBunfigToml(preloadScriptPath: string): string {\n return `# Droid WebSearch Configuration\n# Auto-generated by droid-patch --websearch-preload\n\npreload = [\"${preloadScriptPath}\"]\n`;\n}\n\n/**\n * Generate preload wrapper script\n * This script cd's to the bunfig.toml directory, then executes droid\n */\nfunction generatePreloadWrapperScript(droidPath: string, bunfigDir: string): string {\n return `#!/bin/bash\n# Droid with WebSearch (Preload)\n# Auto-generated by droid-patch --preload\n\nBUNFIG_DIR=\"${bunfigDir}\"\nDROID_BIN=\"${droidPath}\"\nORIGINAL_DIR=\"$(pwd)\"\n\n# cd to bunfig.toml directory (Bun reads bunfig.toml from cwd)\ncd \"$BUNFIG_DIR\"\n\n# Execute droid, pass all arguments, set working directory to original\nexec \"$DROID_BIN\" --cwd \"$ORIGINAL_DIR\" \"$@\"\n`;\n}\n\n/**\n * Create WebSearch files using Preload method\n *\n * Advantages:\n * - No need to modify binary\n * - Uses Bun's native preload mechanism\n *\n * Files created:\n * - preload script (search proxy)\n * - bunfig.toml (Bun configuration)\n * - wrapper script (directly executable command)\n */\nexport async function createWebSearchPreloadFiles(\n droidDir: string,\n droidPath: string,\n aliasName: string,\n): Promise<{\n preloadScript: string;\n bunfigPath: string;\n wrapperScript: string;\n}> {\n // Ensure directory exists\n if (!existsSync(droidDir)) {\n await mkdir(droidDir, { recursive: true });\n }\n\n const preloadScriptPath = join(droidDir, `${aliasName}-search-preload.js`);\n const bunfigPath = join(droidDir, \"bunfig.toml\");\n const wrapperScriptPath = join(droidDir, aliasName);\n\n // Write preload script\n await writeFile(preloadScriptPath, generatePreloadScript());\n console.log(`[*] Created preload script: ${preloadScriptPath}`);\n\n // Write bunfig.toml\n await writeFile(bunfigPath, generateBunfigToml(preloadScriptPath));\n console.log(`[*] Created bunfig.toml: ${bunfigPath}`);\n\n // Write wrapper script\n await writeFile(wrapperScriptPath, generatePreloadWrapperScript(droidPath, droidDir));\n await chmod(wrapperScriptPath, 0o755);\n console.log(`[*] Created wrapper: ${wrapperScriptPath}`);\n\n return {\n preloadScript: preloadScriptPath,\n bunfigPath: bunfigPath,\n wrapperScript: wrapperScriptPath,\n };\n}\n\n/**\n * Get preload script code (for export)\n */\nexport function getPreloadScriptCode(): string {\n return generatePreloadScript();\n}\n\n/**\n * Generate unified Fetch Hook Preload script\n * Directly hooks globalThis.fetch, no proxy server needed\n * @internal Reserved for future use - alternative to proxy server approach\n */\nfunction _generateFetchHookPreload(): string {\n return `// Droid WebSearch Fetch Hook\n// Auto-generated by droid-patch --websearch\n// Hook globalThis.fetch to intercept search requests\n\nconst DEBUG = process.env.DROID_SEARCH_DEBUG === '1';\n\nfunction log(...args) {\n if (DEBUG) console.error('[websearch]', ...args);\n}\n\n// === Search Implementation ===\n\nasync function searchGooglePSE(query, numResults) {\n const apiKey = process.env.GOOGLE_PSE_API_KEY;\n const cx = process.env.GOOGLE_PSE_CX;\n if (!apiKey || !cx) return null;\n\n try {\n const url = \\`https://www.googleapis.com/customsearch/v1?key=\\${apiKey}&cx=\\${cx}&q=\\${encodeURIComponent(query)}&num=\\${Math.min(numResults, 10)}\\`;\n const res = await fetch(url);\n const data = await res.json();\n if (data.error) {\n log('Google PSE error:', data.error.message);\n return null;\n }\n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n } catch (e) {\n log('Google PSE failed:', e.message);\n return null;\n }\n}\n\nasync function searchDuckDuckGo(query, numResults) {\n const { execSync } = require('child_process');\n\n // Method 1: Try DuckDuckGo HTML lite\n try {\n const curlCmd = \\`curl -s -X POST \"https://lite.duckduckgo.com/lite/\" -H \"Content-Type: application/x-www-form-urlencoded\" -H \"User-Agent: Mozilla/5.0\" -d \"q=\\${encodeURIComponent(query)}\"\\`;\n const html = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n\n if (html && html.length > 1000) {\n const results = parseDDGLiteHTML(html, numResults);\n if (results.length > 0) {\n log('DDG lite:', results.length, 'results');\n return results;\n }\n }\n } catch (e) {\n log('DDG lite failed:', e.message);\n }\n\n // Method 2: Fallback to Instant Answer API\n try {\n const apiUrl = \\`https://api.duckduckgo.com/?q=\\${encodeURIComponent(query)}&format=json&no_html=1&skip_disambig=1\\`;\n const curlCmd = \\`curl -s \"\\${apiUrl}\" -H \"User-Agent: Mozilla/5.0\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n const results = [];\n\n if (data.Abstract && data.AbstractURL) {\n results.push({\n title: data.Heading || query,\n url: data.AbstractURL,\n content: data.Abstract,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n for (const topic of (data.RelatedTopics || [])) {\n if (results.length >= numResults) break;\n if (topic.Text && topic.FirstURL) {\n results.push({\n title: topic.Text.substring(0, 100),\n url: topic.FirstURL,\n content: topic.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n if (topic.Topics) {\n for (const st of topic.Topics) {\n if (results.length >= numResults) break;\n if (st.Text && st.FirstURL) {\n results.push({\n title: st.Text.substring(0, 100),\n url: st.FirstURL,\n content: st.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n }\n }\n }\n\n if (results.length > 0) {\n log('DDG API:', results.length, 'results');\n return results;\n }\n } catch (e) {\n log('DDG API failed:', e.message);\n }\n\n return [];\n}\n\nfunction parseDDGLiteHTML(html, maxResults) {\n const results = [];\n const linkRegex = /<a[^>]+rel=\"nofollow\"[^>]+href=\"([^\"]+)\"[^>]*>([^<]+)<\\\\/a>/gi;\n const snippetRegex = /<td[^>]*class=\"result-snippet\"[^>]*>([^<]*)<\\\\/td>/gi;\n\n const links = [];\n let match;\n\n while ((match = linkRegex.exec(html)) !== null && links.length < maxResults) {\n let url = match[1];\n if (url.includes('duckduckgo.com') && !url.includes('uddg=')) continue;\n if (url.includes('uddg=')) {\n const uddgMatch = url.match(/uddg=([^&]+)/);\n if (uddgMatch) url = decodeURIComponent(uddgMatch[1]);\n }\n links.push({\n url: url,\n title: decodeHTMLEntities(match[2].trim())\n });\n }\n\n const snippets = [];\n while ((match = snippetRegex.exec(html)) !== null && snippets.length < maxResults) {\n snippets.push(decodeHTMLEntities(match[1].trim()));\n }\n\n for (let i = 0; i < links.length && results.length < maxResults; i++) {\n results.push({\n title: links[i].title,\n url: links[i].url,\n content: snippets[i] || '',\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n return results;\n}\n\nfunction decodeHTMLEntities(str) {\n return str\n .replace(/&/g, '&')\n .replace(/</g, '<')\n .replace(/>/g, '>')\n .replace(/"/g, '\"')\n .replace(/'/g, \"'\")\n .replace(/ /g, ' ');\n}\n\nasync function search(query, numResults = 10) {\n // Try Google PSE first\n const googleResults = await searchGooglePSE(query, numResults);\n if (googleResults && googleResults.length > 0) {\n log('Using Google PSE');\n return { results: googleResults, source: 'google-pse' };\n }\n\n // Fallback to DuckDuckGo\n log('Using DuckDuckGo');\n const ddgResults = await searchDuckDuckGo(query, numResults);\n return { results: ddgResults, source: 'duckduckgo' };\n}\n\n// === Fetch Hook ===\n\nconst originalFetch = globalThis.fetch;\n\nglobalThis.fetch = async function(input, init) {\n const url = typeof input === 'string' ? input : (input instanceof URL ? input.href : input.url);\n\n // Intercept search requests\n if (url && url.includes('/api/tools/exa/search')) {\n log('Intercepted search request');\n\n try {\n let body = init?.body;\n if (body && typeof body !== 'string') {\n body = await new Response(body).text();\n }\n\n const { query, numResults } = JSON.parse(body || '{}');\n log('Query:', query);\n\n const { results, source } = await search(query, numResults || 10);\n log('Results:', results.length, 'from', source);\n\n return new Response(JSON.stringify({ results }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' }\n });\n } catch (e) {\n log('Search error:', e.message);\n return new Response(JSON.stringify({ error: String(e), results: [] }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' }\n });\n }\n }\n\n // Pass through all other requests\n return originalFetch.apply(this, arguments);\n};\n\n// Also hook Bun.fetch if available\nif (typeof Bun !== 'undefined' && Bun.fetch) {\n const originalBunFetch = Bun.fetch;\n Bun.fetch = globalThis.fetch;\n}\n\nlog('Fetch hook installed');\n`;\n}\n\n/**\n * Generate search proxy server code (runs in background)\n * Since BUN_CONFIG_PRELOAD doesn't work with compiled binaries,\n * use a local proxy server to intercept search requests instead\n *\n * Each droid instance runs its own proxy server.\n * The proxy is killed automatically when droid exits.\n * @param factoryApiUrl - Custom Factory API URL (default: https://api.factory.ai)\n */\nfunction generateSearchProxyServer(factoryApiUrl: string = \"https://api.factory.ai\"): string {\n return `#!/usr/bin/env node\n// Droid WebSearch Proxy Server\n// Auto-generated by droid-patch --websearch\n// This proxy runs as a child process of droid and is killed when droid exits\n\nconst http = require('http');\nconst https = require('https');\nconst { execSync } = require('child_process');\nconst fs = require('fs');\n\nconst DEBUG = process.env.DROID_SEARCH_DEBUG === '1';\nconst PORT = parseInt(process.env.SEARCH_PROXY_PORT || '0'); // 0 = auto-assign\nconst FACTORY_API = '${factoryApiUrl}';\n\nfunction log(...args) {\n if (DEBUG) console.error('[websearch]', ...args);\n}\n\n// === Search Implementation ===\n\n// Smithery Exa MCP - highest priority, requires SMITHERY_API_KEY and SMITHERY_PROFILE\nasync function searchSmitheryExa(query, numResults) {\n const apiKey = process.env.SMITHERY_API_KEY;\n const profile = process.env.SMITHERY_PROFILE;\n if (!apiKey || !profile) return null;\n\n try {\n // Construct URL with authentication\n const serverUrl = \\`https://server.smithery.ai/exa/mcp?api_key=\\${encodeURIComponent(apiKey)}&profile=\\${encodeURIComponent(profile)}\\`;\n log('Smithery Exa request');\n\n // Use MCP protocol to call the search tool via HTTP POST\n const requestBody = JSON.stringify({\n jsonrpc: '2.0',\n id: 1,\n method: 'tools/call',\n params: {\n name: 'web_search_exa',\n arguments: {\n query: query,\n numResults: numResults\n }\n }\n });\n\n const curlCmd = \\`curl -s -X POST \"\\${serverUrl}\" -H \"Content-Type: application/json\" -d '\\${requestBody.replace(/'/g, \"'\\\\\\\\\\\\\\\\''\")}'\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 30000 });\n const response = JSON.parse(jsonStr);\n\n // Parse MCP response\n if (response.result && response.result.content) {\n // MCP returns content as array of text blocks\n const textContent = response.result.content.find(c => c.type === 'text');\n if (textContent && textContent.text) {\n try {\n const searchResults = JSON.parse(textContent.text);\n if (Array.isArray(searchResults) && searchResults.length > 0) {\n return searchResults.slice(0, numResults).map(item => ({\n title: item.title || '',\n url: item.url || '',\n content: item.text || item.snippet || item.highlights?.join(' ') || '',\n publishedDate: item.publishedDate || null,\n author: item.author || null,\n score: item.score || null\n }));\n }\n } catch (parseErr) {\n log('Smithery response parsing failed');\n }\n }\n }\n\n if (response.error) {\n log('Smithery Exa error:', response.error.message || response.error);\n return null;\n }\n } catch (e) {\n log('Smithery Exa failed:', e.message);\n return null;\n }\n return null;\n}\n\nasync function searchGooglePSE(query, numResults) {\n const apiKey = process.env.GOOGLE_PSE_API_KEY;\n const cx = process.env.GOOGLE_PSE_CX;\n if (!apiKey || !cx) return null;\n\n try {\n const url = \\`https://www.googleapis.com/customsearch/v1?key=\\${apiKey}&cx=\\${cx}&q=\\${encodeURIComponent(query)}&num=\\${Math.min(numResults, 10)}\\`;\n log('Google PSE request:', url.replace(apiKey, '***'));\n\n const curlCmd = \\`curl -s \"\\${url}\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.error) {\n log('Google PSE error:', data.error.message);\n return null;\n }\n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n } catch (e) {\n log('Google PSE failed:', e.message);\n return null;\n }\n}\n\n// SearXNG - self-hosted meta search engine\nasync function searchSearXNG(query, numResults) {\n const searxngUrl = process.env.SEARXNG_URL;\n if (!searxngUrl) return null;\n\n try {\n const url = \\`\\${searxngUrl}/search?q=\\${encodeURIComponent(query)}&format=json&engines=google,bing,duckduckgo\\`;\n log('SearXNG request:', url);\n\n const curlCmd = \\`curl -s \"\\${url}\" -H \"Accept: application/json\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.results && data.results.length > 0) {\n return data.results.slice(0, numResults).map(item => ({\n title: item.title,\n url: item.url,\n content: item.content || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n }\n } catch (e) {\n log('SearXNG failed:', e.message);\n }\n return null;\n}\n\n// Serper API - free tier available (2500 queries/month)\nasync function searchSerper(query, numResults) {\n const apiKey = process.env.SERPER_API_KEY;\n if (!apiKey) return null;\n\n try {\n const curlCmd = \\`curl -s \"https://google.serper.dev/search\" -H \"X-API-KEY: \\${apiKey}\" -H \"Content-Type: application/json\" -d '{\"q\":\"\\${query.replace(/\"/g, '\\\\\\\\\"')}\",\"num\":\\${numResults}}'\\`;\n log('Serper request');\n\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.organic && data.organic.length > 0) {\n return data.organic.slice(0, numResults).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n }\n } catch (e) {\n log('Serper failed:', e.message);\n }\n return null;\n}\n\n// Brave Search API - free tier available\nasync function searchBrave(query, numResults) {\n const apiKey = process.env.BRAVE_API_KEY;\n if (!apiKey) return null;\n\n try {\n const url = \\`https://api.search.brave.com/res/v1/web/search?q=\\${encodeURIComponent(query)}&count=\\${numResults}\\`;\n const curlCmd = \\`curl -s \"\\${url}\" -H \"Accept: application/json\" -H \"X-Subscription-Token: \\${apiKey}\"\\`;\n log('Brave request');\n\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.web && data.web.results && data.web.results.length > 0) {\n return data.web.results.slice(0, numResults).map(item => ({\n title: item.title,\n url: item.url,\n content: item.description || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n }\n } catch (e) {\n log('Brave failed:', e.message);\n }\n return null;\n}\n\n// DuckDuckGo - limited reliability due to bot detection\nasync function searchDuckDuckGo(query, numResults) {\n // DuckDuckGo Instant Answer API (limited results but more reliable)\n try {\n const apiUrl = \\`https://api.duckduckgo.com/?q=\\${encodeURIComponent(query)}&format=json&no_html=1&skip_disambig=1\\`;\n const curlCmd = \\`curl -s \"\\${apiUrl}\" -H \"User-Agent: Mozilla/5.0\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n const results = [];\n\n if (data.Abstract && data.AbstractURL) {\n results.push({\n title: data.Heading || query,\n url: data.AbstractURL,\n content: data.Abstract,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n for (const topic of (data.RelatedTopics || [])) {\n if (results.length >= numResults) break;\n if (topic.Text && topic.FirstURL) {\n results.push({\n title: topic.Text.substring(0, 100),\n url: topic.FirstURL,\n content: topic.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n if (topic.Topics) {\n for (const st of topic.Topics) {\n if (results.length >= numResults) break;\n if (st.Text && st.FirstURL) {\n results.push({\n title: st.Text.substring(0, 100),\n url: st.FirstURL,\n content: st.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n }\n }\n }\n\n if (results.length > 0) {\n log('DDG API:', results.length, 'results');\n return results;\n }\n } catch (e) {\n log('DDG API failed:', e.message);\n }\n\n return [];\n}\n\nfunction parseDDGLiteHTML(html, maxResults) {\n const results = [];\n const linkRegex = /<a[^>]+rel=\"nofollow\"[^>]+href=\"([^\"]+)\"[^>]*>([^<]+)<\\\\/a>/gi;\n const snippetRegex = /<td[^>]*class=\"result-snippet\"[^>]*>([^<]*)<\\\\/td>/gi;\n\n const links = [];\n let match;\n\n while ((match = linkRegex.exec(html)) !== null && links.length < maxResults) {\n let url = match[1];\n if (url.includes('duckduckgo.com') && !url.includes('uddg=')) continue;\n if (url.includes('uddg=')) {\n const uddgMatch = url.match(/uddg=([^&]+)/);\n if (uddgMatch) url = decodeURIComponent(uddgMatch[1]);\n }\n links.push({\n url: url,\n title: decodeHTMLEntities(match[2].trim())\n });\n }\n\n const snippets = [];\n while ((match = snippetRegex.exec(html)) !== null && snippets.length < maxResults) {\n snippets.push(decodeHTMLEntities(match[1].trim()));\n }\n\n for (let i = 0; i < links.length && results.length < maxResults; i++) {\n results.push({\n title: links[i].title,\n url: links[i].url,\n content: snippets[i] || '',\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n return results;\n}\n\nfunction decodeHTMLEntities(str) {\n return str\n .replace(/&/g, '&')\n .replace(/</g, '<')\n .replace(/>/g, '>')\n .replace(/"/g, '\"')\n .replace(/'/g, \"'\")\n .replace(/ /g, ' ');\n}\n\nasync function search(query, numResults = 10) {\n // Priority order:\n // 1. Smithery Exa MCP (best quality if configured)\n // 2. Google PSE (most reliable if configured)\n // 3. Serper (free tier: 2500/month)\n // 4. Brave Search (free tier available)\n // 5. SearXNG (self-hosted)\n // 6. DuckDuckGo (limited due to bot detection)\n\n // 1. Smithery Exa MCP (highest priority)\n const smitheryResults = await searchSmitheryExa(query, numResults);\n if (smitheryResults && smitheryResults.length > 0) {\n log('Using Smithery Exa');\n return { results: smitheryResults, source: 'smithery-exa' };\n }\n\n // 2. Google PSE\n const googleResults = await searchGooglePSE(query, numResults);\n if (googleResults && googleResults.length > 0) {\n log('Using Google PSE');\n return { results: googleResults, source: 'google-pse' };\n }\n\n // 3. Serper\n const serperResults = await searchSerper(query, numResults);\n if (serperResults && serperResults.length > 0) {\n log('Using Serper');\n return { results: serperResults, source: 'serper' };\n }\n\n // 4. Brave Search\n const braveResults = await searchBrave(query, numResults);\n if (braveResults && braveResults.length > 0) {\n log('Using Brave Search');\n return { results: braveResults, source: 'brave' };\n }\n\n // 5. SearXNG\n const searxngResults = await searchSearXNG(query, numResults);\n if (searxngResults && searxngResults.length > 0) {\n log('Using SearXNG');\n return { results: searxngResults, source: 'searxng' };\n }\n\n // 6. DuckDuckGo (last resort, limited results)\n log('Using DuckDuckGo (fallback)');\n const ddgResults = await searchDuckDuckGo(query, numResults);\n return { results: ddgResults, source: 'duckduckgo' };\n}\n\n// === HTTP Proxy Server ===\n\nconst server = http.createServer(async (req, res) => {\n const url = new URL(req.url, \\`http://\\${req.headers.host}\\`);\n\n // Health check\n if (url.pathname === '/health') {\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ status: 'ok', port: server.address()?.port || PORT }));\n return;\n }\n\n // Search endpoint - intercept\n if (url.pathname === '/api/tools/exa/search' && req.method === 'POST') {\n let body = '';\n req.on('data', c => body += c);\n req.on('end', async () => {\n try {\n const { query, numResults } = JSON.parse(body);\n log('Search query:', query);\n const { results, source } = await search(query, numResults || 10);\n log('Results:', results.length, 'from', source);\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ results }));\n } catch (e) {\n log('Search error:', e.message);\n res.writeHead(500, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: String(e), results: [] }));\n }\n });\n return;\n }\n\n // === Standalone mode (controlled by STANDALONE_MODE env) ===\n // Whitelist approach: only allow core LLM APIs, mock everything else\n if (process.env.STANDALONE_MODE === '1') {\n const pathname = url.pathname;\n\n // Whitelist: Core APIs that should be forwarded to upstream\n const isCoreLLMApi = pathname.startsWith('/api/llm/a/') || pathname.startsWith('/api/llm/o/');\n // /api/tools/exa/search is already handled above\n\n if (!isCoreLLMApi) {\n // Special handling for specific routes\n if (pathname === '/api/sessions/create') {\n log('Mock (dynamic):', pathname);\n const sessionId = \\`local-\\${Date.now()}-\\${Math.random().toString(36).slice(2, 10)}\\`;\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ id: sessionId }));\n return;\n }\n\n if (pathname === '/api/cli/whoami') {\n log('Mock (401):', pathname);\n res.writeHead(401, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Unauthorized', message: 'Local mode - use token fallback' }));\n return;\n }\n\n if (pathname === '/api/tools/get-url-contents') {\n log('Mock (404):', pathname);\n res.writeHead(404, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Not available', message: 'Use local URL fetch fallback' }));\n return;\n }\n\n // All other non-core APIs: return empty success\n log('Mock (default):', pathname);\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({}));\n return;\n }\n }\n\n // Proxy core LLM requests to upstream API\n log('Proxy:', req.method, url.pathname);\n\n const proxyUrl = new URL(FACTORY_API + url.pathname + url.search);\n // Choose http or https based on target protocol\n const proxyModule = proxyUrl.protocol === 'https:' ? https : http;\n const proxyReq = proxyModule.request(proxyUrl, {\n method: req.method,\n headers: { ...req.headers, host: proxyUrl.host }\n }, proxyRes => {\n res.writeHead(proxyRes.statusCode, proxyRes.headers);\n proxyRes.pipe(res);\n });\n\n proxyReq.on('error', e => {\n log('Proxy error:', e.message);\n res.writeHead(502, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Proxy failed: ' + e.message }));\n });\n\n if (req.method !== 'GET' && req.method !== 'HEAD') {\n req.pipe(proxyReq);\n } else {\n proxyReq.end();\n }\n});\n\n// If port is 0, system will automatically assign an available port\nserver.listen(PORT, '127.0.0.1', () => {\n const actualPort = server.address().port;\n const hasGoogle = process.env.GOOGLE_PSE_API_KEY && process.env.GOOGLE_PSE_CX;\n\n // Write port file for parent process to read\n const portFile = process.env.SEARCH_PROXY_PORT_FILE;\n if (portFile) {\n fs.writeFileSync(portFile, String(actualPort));\n }\n\n // Output PORT= line for wrapper script to parse\n console.log('PORT=' + actualPort);\n\n const hasSmithery = process.env.SMITHERY_API_KEY && process.env.SMITHERY_PROFILE;\n log('Search proxy started on http://127.0.0.1:' + actualPort);\n log('Smithery Exa:', hasSmithery ? 'configured (priority 1)' : 'not set');\n log('Google PSE:', hasGoogle ? 'configured' : 'not set');\n log('Serper:', process.env.SERPER_API_KEY ? 'configured' : 'not set');\n log('Brave:', process.env.BRAVE_API_KEY ? 'configured' : 'not set');\n log('SearXNG:', process.env.SEARXNG_URL || 'not set');\n});\n\nprocess.on('SIGTERM', () => { server.close(); process.exit(0); });\nprocess.on('SIGINT', () => { server.close(); process.exit(0); });\n`;\n}\n\n/**\n * Generate unified Wrapper script\n * Each droid instance runs its own proxy:\n * - Uses port 0 to let system auto-assign available port\n * - Proxy runs as child process\n * - Proxy is killed when droid exits\n * - Supports multiple droid instances running simultaneously\n */\n/* eslint-disable no-useless-escape */\nfunction generateUnifiedWrapper(\n droidPath: string,\n proxyScriptPath: string,\n standalone: boolean = false,\n): string {\n const standaloneEnv = standalone ? \"STANDALONE_MODE=1 \" : \"\";\n return `#!/bin/bash\n# Droid with WebSearch\n# Auto-generated by droid-patch --websearch\n# Each instance runs its own proxy on a system-assigned port\n\nPROXY_SCRIPT=\"${proxyScriptPath}\"\nDROID_BIN=\"${droidPath}\"\nPROXY_PID=\"\"\nPORT_FILE=\"/tmp/droid-websearch-\\$\\$.port\"\nSTANDALONE=\"${standalone ? \"1\" : \"0\"}\"\n\n# Passthrough for non-interactive/meta commands (avoid starting a proxy for help/version/etc)\nshould_passthrough() {\n # Any help/version flags before \"--\"\n for arg in \"\\$@\"; do\n if [ \"\\$arg\" = \"--\" ]; then\n break\n fi\n case \"\\$arg\" in\n --help|-h|--version|-V)\n return 0\n ;;\n esac\n done\n\n # Top-level command token\n local end_opts=0\n for arg in \"\\$@\"; do\n if [ \"\\$arg\" = \"--\" ]; then\n end_opts=1\n continue\n fi\n if [ \"\\$end_opts\" -eq 0 ] && [[ \"\\$arg\" == -* ]]; then\n continue\n fi\n case \"\\$arg\" in\n help|version|completion|completions|exec)\n return 0\n ;;\n esac\n break\n done\n\n return 1\n}\n\nif should_passthrough \"\\$@\"; then\n exec \"\\$DROID_BIN\" \"\\$@\"\nfi\n\n# Cleanup function - kill proxy when droid exits\ncleanup() {\n if [ -n \"\\$PROXY_PID\" ] && kill -0 \"\\$PROXY_PID\" 2>/dev/null; then\n [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Stopping proxy (PID: \\$PROXY_PID)\" >&2\n kill \"\\$PROXY_PID\" 2>/dev/null\n wait \"\\$PROXY_PID\" 2>/dev/null\n fi\n rm -f \"\\$PORT_FILE\"\n}\n\n# Set up trap to cleanup on exit\ntrap cleanup EXIT INT TERM\n\n[ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Starting proxy...\" >&2\n[ \"\\$STANDALONE\" = \"1\" ] && [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Standalone mode enabled\" >&2\n\n# Start proxy with port 0 (system will assign available port)\n# Proxy writes actual port to PORT_FILE\nif [ -n \"\\$DROID_SEARCH_DEBUG\" ]; then\n ${standaloneEnv}SEARCH_PROXY_PORT=0 SEARCH_PROXY_PORT_FILE=\"\\$PORT_FILE\" node \"\\$PROXY_SCRIPT\" 2>&1 &\nelse\n ${standaloneEnv}SEARCH_PROXY_PORT=0 SEARCH_PROXY_PORT_FILE=\"\\$PORT_FILE\" node \"\\$PROXY_SCRIPT\" >/dev/null 2>&1 &\nfi\nPROXY_PID=\\$!\n\n# Wait for proxy to start and get actual port (max 5 seconds)\nfor i in {1..50}; do\n # Check if proxy process is still running\n if ! kill -0 \"\\$PROXY_PID\" 2>/dev/null; then\n [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Proxy process died\" >&2\n break\n fi\n if [ -f \"\\$PORT_FILE\" ]; then\n ACTUAL_PORT=\\$(cat \"\\$PORT_FILE\" 2>/dev/null)\n if [ -n \"\\$ACTUAL_PORT\" ] && curl -s \"http://127.0.0.1:\\$ACTUAL_PORT/health\" > /dev/null 2>&1; then\n [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Proxy ready on port \\$ACTUAL_PORT (PID: \\$PROXY_PID)\" >&2\n break\n fi\n fi\n sleep 0.1\ndone\n\n# Check if proxy started successfully\nif [ ! -f \"\\$PORT_FILE\" ] || [ -z \"\\$(cat \"\\$PORT_FILE\" 2>/dev/null)\" ]; then\n echo \"[websearch] Failed to start proxy, running without websearch\" >&2\n cleanup\n exec \"\\$DROID_BIN\" \"\\$@\"\nfi\n\nACTUAL_PORT=\\$(cat \"\\$PORT_FILE\")\nrm -f \"\\$PORT_FILE\"\n\n# Run droid with proxy\nexport FACTORY_API_BASE_URL_OVERRIDE=\"http://127.0.0.1:\\$ACTUAL_PORT\"\n\"\\$DROID_BIN\" \"\\$@\"\nDROID_EXIT_CODE=\\$?\n\n# Cleanup will be called by trap\nexit \\$DROID_EXIT_CODE\n`;\n}\n/* eslint-enable no-useless-escape */\n\n/**\n * Create unified WebSearch files\n *\n * Approach: Proxy server mode\n * - wrapper script starts local proxy server\n * - proxy server intercepts search requests, passes through other requests\n * - uses FACTORY_API_BASE_URL_OVERRIDE env var to point to proxy\n * - alias works directly, no extra steps needed\n *\n * @param outputDir - Directory to write files to\n * @param droidPath - Path to droid binary\n * @param aliasName - Alias name for the wrapper\n * @param apiBase - Custom API base URL for proxy to forward requests to\n * @param standalone - Standalone mode: mock non-LLM Factory APIs\n */\nexport async function createWebSearchUnifiedFiles(\n outputDir: string,\n droidPath: string,\n aliasName: string,\n apiBase?: string,\n standalone: boolean = false,\n): Promise<{ wrapperScript: string; preloadScript: string }> {\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const proxyScriptPath = join(outputDir, `${aliasName}-proxy.js`);\n const wrapperScriptPath = join(outputDir, aliasName);\n\n // Write proxy server script with custom API base if provided\n const factoryApiUrl = apiBase || \"https://api.factory.ai\";\n await writeFile(proxyScriptPath, generateSearchProxyServer(factoryApiUrl));\n console.log(`[*] Created proxy script: ${proxyScriptPath}`);\n\n // Write unified wrapper\n await writeFile(\n wrapperScriptPath,\n generateUnifiedWrapper(droidPath, proxyScriptPath, standalone),\n );\n await chmod(wrapperScriptPath, 0o755);\n console.log(`[*] Created wrapper: ${wrapperScriptPath}`);\n\n if (standalone) {\n console.log(`[*] Standalone mode enabled`);\n }\n\n return {\n wrapperScript: wrapperScriptPath,\n preloadScript: proxyScriptPath, // Keep interface compatible\n };\n}\n","import { chmod, mkdir, writeFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport { join } from \"node:path\";\n\nfunction generateStatuslineMonitorScript(): string {\n // Keep this script dependency-free (Node built-ins only). It runs from the wrapper via `node`.\n return `#!/usr/bin/env node\n/* Auto-generated by droid-patch --statusline */\n\nconst fs = require('fs');\nconst os = require('os');\nconst path = require('path');\nconst { spawn, spawnSync } = require('child_process');\n\n// This monitor does NOT draw directly to the terminal. It emits newline-delimited\n// statusline frames to stdout. A wrapper (PTY proxy) is responsible for rendering\n// the latest frame on a reserved bottom row to avoid flicker.\n\nconst FACTORY_HOME = path.join(os.homedir(), '.factory');\n\nconst SESSIONS_ROOT = path.join(FACTORY_HOME, 'sessions');\nconst LOG_PATH = path.join(FACTORY_HOME, 'logs', 'droid-log-single.log');\nconst CONFIG_PATH = path.join(FACTORY_HOME, 'config.json');\nconst GLOBAL_SETTINGS_PATH = path.join(FACTORY_HOME, 'settings.json');\n\nconst IS_APPLE_TERMINAL = process.env.TERM_PROGRAM === 'Apple_Terminal';\nconst MIN_RENDER_INTERVAL_MS = IS_APPLE_TERMINAL ? 1000 : 500;\n\nconst START_MS = Date.now();\nconst ARGS = process.argv.slice(2);\nconst PGID = Number(process.env.DROID_STATUSLINE_PGID || '');\nconst SESSION_ID_RE = /\"sessionId\":\"([0-9a-f-]{36})\"/i;\n\nfunction sleep(ms) {\n return new Promise((r) => setTimeout(r, ms));\n}\n\nfunction isPositiveInt(n) {\n return Number.isFinite(n) && n > 0;\n}\n\nfunction extractSessionIdFromLine(line) {\n if (!line) return null;\n const m = String(line).match(SESSION_ID_RE);\n return m ? m[1] : null;\n}\n\nfunction parseLineTimestampMs(line) {\n const s = String(line || '');\n if (!s || s[0] !== '[') return null;\n const end = s.indexOf(']');\n if (end <= 1) return null;\n const raw = s.slice(1, end);\n const ms = Date.parse(raw);\n return Number.isFinite(ms) ? ms : null;\n}\n\nfunction safeStatMtimeMs(p) {\n try {\n const stat = fs.statSync(p);\n const ms = Number(stat?.mtimeMs ?? 0);\n return Number.isFinite(ms) ? ms : 0;\n } catch {\n return 0;\n }\n}\n\nfunction nextCompactionState(line, current) {\n if (!line) return current;\n if (line.includes('[Compaction] Start')) return true;\n const endMarkers = ['End', 'Done', 'Finish', 'Finished', 'Complete', 'Completed'];\n if (endMarkers.some(m => line.includes('[Compaction] ' + m))) return false;\n return current;\n}\n\nfunction firstNonNull(promises) {\n const list = Array.isArray(promises) ? promises : [];\n if (list.length === 0) return Promise.resolve(null);\n return new Promise((resolve) => {\n let pending = list.length;\n let done = false;\n for (const p of list) {\n Promise.resolve(p)\n .then((value) => {\n if (done) return;\n if (value) {\n done = true;\n resolve(value);\n return;\n }\n pending -= 1;\n if (pending <= 0) resolve(null);\n })\n .catch(() => {\n if (done) return;\n pending -= 1;\n if (pending <= 0) resolve(null);\n });\n }\n });\n}\n\nfunction listPidsInProcessGroup(pgid) {\n if (!isPositiveInt(pgid)) return [];\n try {\n const res = spawnSync('ps', ['-ax', '-o', 'pid=,pgid='], {\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 800,\n });\n if (!res || res.status !== 0) return [];\n const out = String(res.stdout || '');\n const pids = [];\n for (const line of out.split('\\\\n')) {\n const parts = line.trim().split(/\\\\s+/);\n if (parts.length < 2) continue;\n const pid = Number(parts[0]);\n const g = Number(parts[1]);\n if (Number.isFinite(pid) && g === pgid) pids.push(pid);\n }\n return pids;\n } catch {\n return [];\n }\n}\n\nfunction resolveOpenSessionFromPids(pids) {\n if (!Array.isArray(pids) || pids.length === 0) return null;\n // lsof prints file names as lines prefixed with \"n\" when using -Fn\n try {\n const res = spawnSync('lsof', ['-p', pids.join(','), '-Fn'], {\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 1200,\n });\n if (!res || res.status !== 0) return null;\n const out = String(res.stdout || '');\n for (const line of out.split('\\\\n')) {\n if (!line || line[0] !== 'n') continue;\n const name = line.slice(1);\n if (!name.startsWith(SESSIONS_ROOT + path.sep)) continue;\n const m = name.match(/([0-9a-f-]{36})\\\\.(jsonl|settings\\\\.json)$/i);\n if (!m) continue;\n const id = m[1];\n const workspaceDir = path.dirname(name);\n if (path.dirname(workspaceDir) !== SESSIONS_ROOT) continue;\n return { workspaceDir, id };\n }\n } catch {\n return null;\n }\n return null;\n}\n\nasync function resolveSessionFromProcessGroup(shouldAbort, maxTries = 20) {\n if (!isPositiveInt(PGID)) return null;\n // Wait a little for droid to create/open the session files.\n for (let i = 0; i < maxTries; i++) {\n if (shouldAbort && shouldAbort()) return null;\n const pids = listPidsInProcessGroup(PGID);\n const found = resolveOpenSessionFromPids(pids);\n if (found) return found;\n await sleep(100);\n }\n return null;\n}\n\nfunction safeReadFile(filePath) {\n try {\n return fs.readFileSync(filePath, 'utf8');\n } catch {\n return null;\n }\n}\n\nfunction safeJsonParse(text) {\n if (!text) return null;\n try {\n // Factory settings/config files can contain comments. Strip them safely without\n // breaking URLs like \"http://...\" which contain \"//\" inside strings.\n const stripComments = (input) => {\n let out = '';\n let inString = false;\n let escape = false;\n for (let i = 0; i < input.length; i++) {\n const ch = input[i];\n const next = input[i + 1];\n\n if (inString) {\n out += ch;\n if (escape) {\n escape = false;\n continue;\n }\n if (ch === '\\\\\\\\') {\n escape = true;\n continue;\n }\n if (ch === '\"') {\n inString = false;\n }\n continue;\n }\n\n if (ch === '\"') {\n inString = true;\n out += ch;\n continue;\n }\n\n // Line comment\n if (ch === '/' && next === '/') {\n while (i < input.length && input[i] !== '\\\\n') i++;\n out += '\\\\n';\n continue;\n }\n\n // Block comment\n if (ch === '/' && next === '*') {\n i += 2;\n while (i < input.length && !(input[i] === '*' && input[i + 1] === '/')) i++;\n i += 1;\n continue;\n }\n\n out += ch;\n }\n return out;\n };\n\n return JSON.parse(stripComments(text));\n } catch {\n return null;\n }\n}\n\nfunction readJsonFile(filePath) {\n return safeJsonParse(safeReadFile(filePath));\n}\n\nfunction isUuid(text) {\n return /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(text);\n}\n\nfunction parseResume(args) {\n for (let i = 0; i < args.length; i++) {\n const a = args[i];\n if (a === '-r' || a === '--resume') {\n const next = args[i + 1];\n if (next && isUuid(next)) return { resumeFlag: true, resumeId: next };\n return { resumeFlag: true, resumeId: null };\n }\n if (a.startsWith('--resume=')) {\n const value = a.slice('--resume='.length);\n return { resumeFlag: true, resumeId: isUuid(value) ? value : null };\n }\n }\n return { resumeFlag: false, resumeId: null };\n}\n\nfunction sanitizeWorkspaceDirName(cwd) {\n return String(cwd)\n .replace(/[:]/g, '')\n .replace(/[\\\\\\\\/]/g, '-')\n .replace(/\\\\s+/g, '-');\n}\n\nfunction listSessionCandidates(workspaceDir) {\n let files = [];\n try {\n files = fs.readdirSync(workspaceDir);\n } catch {\n return [];\n }\n const candidates = [];\n for (const file of files) {\n const m = file.match(/^([0-9a-f-]{36})\\\\.(jsonl|settings\\\\.json)$/i);\n if (!m) continue;\n const id = m[1];\n const fullPath = path.join(workspaceDir, file);\n try {\n const stat = fs.statSync(fullPath);\n candidates.push({ id, fullPath, mtimeMs: stat.mtimeMs });\n } catch {\n // ignore\n }\n }\n return candidates;\n}\n\nfunction findWorkspaceDirForSessionId(workspaceDirs, sessionId) {\n for (const dir of workspaceDirs) {\n try {\n const settingsPath = path.join(dir, sessionId + '.settings.json');\n if (fs.existsSync(settingsPath)) return dir;\n } catch {\n // ignore\n }\n }\n return null;\n}\n\nfunction pickLatestSessionAcross(workspaceDirs) {\n let best = null;\n for (const dir of workspaceDirs) {\n const candidates = listSessionCandidates(dir);\n for (const c of candidates) {\n if (!best || c.mtimeMs > best.mtimeMs) {\n best = { workspaceDir: dir, id: c.id, mtimeMs: c.mtimeMs };\n }\n }\n }\n return best ? { workspaceDir: best.workspaceDir, id: best.id } : null;\n}\n\nasync function waitForNewSessionAcross(workspaceDirs, knownIdsByWorkspace, startMs, shouldAbort) {\n for (let i = 0; i < 80; i++) {\n if (shouldAbort && shouldAbort()) return null;\n let best = null;\n for (const dir of workspaceDirs) {\n const known = knownIdsByWorkspace.get(dir) || new Set();\n const candidates = listSessionCandidates(dir);\n for (const c of candidates) {\n if (!(c.mtimeMs >= startMs - 50 || !known.has(c.id))) continue;\n if (!best || c.mtimeMs > best.mtimeMs) {\n best = { workspaceDir: dir, id: c.id, mtimeMs: c.mtimeMs };\n }\n }\n }\n if (best?.id) return { workspaceDir: best.workspaceDir, id: best.id };\n await sleep(100);\n }\n return null;\n}\n\nfunction safeRealpath(p) {\n try {\n return fs.realpathSync(p);\n } catch {\n return null;\n }\n}\n\nfunction resolveWorkspaceDirs(cwd) {\n const logical = cwd;\n const real = safeRealpath(cwd);\n const dirs = [];\n for (const value of [logical, real]) {\n if (!value || typeof value !== 'string') continue;\n dirs.push(path.join(SESSIONS_ROOT, sanitizeWorkspaceDirName(value)));\n }\n return Array.from(new Set(dirs));\n}\n\nfunction resolveSessionSettings(workspaceDir, sessionId) {\n const settingsPath = path.join(workspaceDir, sessionId + '.settings.json');\n const settings = readJsonFile(settingsPath) || {};\n return { settingsPath, settings };\n}\n\nfunction resolveGlobalSettingsModel() {\n const global = readJsonFile(GLOBAL_SETTINGS_PATH);\n return global && typeof global.model === 'string' ? global.model : null;\n}\n\nfunction resolveCustomModelIndex(modelId) {\n if (typeof modelId !== 'string') return null;\n if (!modelId.startsWith('custom:')) return null;\n const m = modelId.match(/-(\\\\d+)$/);\n if (!m) return null;\n const idx = Number(m[1]);\n return Number.isFinite(idx) ? idx : null;\n}\n\nfunction resolveUnderlyingModelId(modelId, factoryConfig) {\n const idx = resolveCustomModelIndex(modelId);\n if (idx == null) return modelId;\n const entry = factoryConfig?.custom_models?.[idx];\n if (entry && typeof entry.model === 'string') return entry.model;\n return modelId;\n}\n\nfunction resolveProvider(modelId, factoryConfig) {\n const idx = resolveCustomModelIndex(modelId);\n if (idx != null) {\n const entry = factoryConfig?.custom_models?.[idx];\n if (entry && typeof entry.provider === 'string') return entry.provider;\n }\n if (typeof modelId === 'string' && modelId.startsWith('claude-')) return 'anthropic';\n return '';\n}\n\nfunction formatInt(n) {\n if (!Number.isFinite(n)) return '0';\n return Math.round(n).toString();\n}\n\nfunction formatTokens(n) {\n if (!Number.isFinite(n)) return '0';\n const sign = n < 0 ? '-' : '';\n const abs = Math.abs(n);\n if (abs >= 1_000_000) {\n const v = abs / 1_000_000;\n const s = v >= 10 ? v.toFixed(0) : v.toFixed(1);\n return sign + s.replace(/\\\\.0$/, '') + 'M';\n }\n if (abs >= 10_000) {\n const v = abs / 1_000;\n const s = v >= 100 ? v.toFixed(0) : v.toFixed(1);\n return sign + s.replace(/\\\\.0$/, '') + 'k';\n }\n return sign + Math.round(abs).toString();\n}\n\nfunction emitFrame(line) {\n try {\n process.stdout.write(String(line || '') + '\\\\n');\n } catch {\n // ignore\n }\n}\n\nfunction seg(bg, fg, text) {\n if (!text) return '';\n return '\\\\x1b[48;5;' + bg + 'm' + '\\\\x1b[38;5;' + fg + 'm' + ' ' + text + ' ' + '\\\\x1b[0m';\n}\n\nfunction resolveGitBranch(cwd) {\n try {\n const res = spawnSync('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {\n cwd,\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 800,\n });\n if (res && res.status === 0) {\n const branch = String(res.stdout || '').trim();\n if (branch && branch !== 'HEAD') return branch;\n }\n } catch {}\n try {\n const headPath = path.join(cwd, '.git', 'HEAD');\n const head = safeReadFile(headPath);\n if (head && head.startsWith('ref: ')) {\n const ref = head.slice('ref: '.length).trim();\n const m = ref.match(/refs\\\\/heads\\\\/(.+)$/);\n if (m) return m[1];\n }\n } catch {}\n return '';\n}\n\nfunction resolveGitDiffSummary(cwd) {\n try {\n const res = spawnSync('git', ['diff', '--shortstat'], {\n cwd,\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 800,\n });\n if (!res || res.status !== 0) return '';\n const text = String(res.stdout || '').trim();\n if (!text) return '';\n const ins = (text.match(/(\\\\d+)\\\\sinsertions?\\\\(\\\\+\\\\)/) || [])[1];\n const del = (text.match(/(\\\\d+)\\\\sdeletions?\\\\(-\\\\)/) || [])[1];\n const i = ins ? Number(ins) : 0;\n const d = del ? Number(del) : 0;\n if (!Number.isFinite(i) && !Number.isFinite(d)) return '';\n if (i === 0 && d === 0) return '';\n return '(+' + formatInt(i) + ',-' + formatInt(d) + ')';\n } catch {\n return '';\n }\n}\n\nfunction buildLine(params) {\n const {\n provider,\n model,\n cwdBase,\n gitBranch,\n gitDiff,\n usedTokens,\n cacheRead,\n deltaInput,\n lastOutputTokens,\n sessionUsage,\n compacting,\n } = params;\n\n let ctxPart = 'Ctx: ' + formatTokens(usedTokens);\n\n const cachePart =\n cacheRead > 0 || deltaInput > 0\n ? ' c' + formatTokens(cacheRead) + '+n' + formatTokens(deltaInput)\n : '';\n\n const compactPart = compacting ? ' COMPACT' : '';\n\n const usagePart = (() => {\n const u = sessionUsage || {};\n const input = Number(u.inputTokens ?? 0);\n const output = Number(u.outputTokens ?? 0);\n const cacheCreation = Number(u.cacheCreationTokens ?? 0);\n const cacheReadTotal = Number(u.cacheReadTokens ?? 0);\n const thinking = Number(u.thinkingTokens ?? 0);\n if (!(input || output || cacheCreation || cacheReadTotal || thinking)) return '';\n const parts = [];\n if (input) parts.push('In:' + formatTokens(input));\n if (output) parts.push('Out:' + formatTokens(output));\n if (cacheCreation) parts.push('Cre:' + formatTokens(cacheCreation));\n if (cacheReadTotal) parts.push('Read:' + formatTokens(cacheReadTotal));\n if (thinking) parts.push('Think:' + formatTokens(thinking));\n if (lastOutputTokens > 0) parts.push('LastOut:' + formatTokens(lastOutputTokens));\n return parts.join(' ');\n })();\n\n const modelPart = model ? 'Model: ' + model : '';\n const providerPart = provider ? 'Prov: ' + provider : '';\n const cwdPart = cwdBase ? 'cwd: ' + cwdBase : '';\n const branchPart = gitBranch ? '\\\\uE0A0 ' + gitBranch : '';\n const diffPart = gitDiff || '';\n\n // Background segments (powerline-like blocks)\n const sModel = seg(88, 15, modelPart); // dark red\n const sProvider = seg(160, 15, providerPart); // red\n const sCtx = seg(220, 0, ctxPart + (cachePart ? ' (' + cachePart.trim() + ')' : '')); // yellow\n const sUsage = seg(173, 0, usagePart); // orange-ish\n const sBranch = seg(24, 15, branchPart); // blue\n const sDiff = seg(34, 0, diffPart); // green\n const sCwd = seg(238, 15, cwdPart); // gray\n const sExtra = seg(99, 15, compactPart.trim()); // purple-ish\n\n return [sModel, sProvider, sCtx, sUsage, sBranch, sDiff, sCwd, sExtra].filter(Boolean).join('');\n}\n\nasync function main() {\n let factoryConfig = readJsonFile(CONFIG_PATH) || {};\n\n const cwd = process.cwd();\n const cwdBase = path.basename(cwd) || cwd;\n const workspaceDirs = resolveWorkspaceDirs(cwd);\n const knownIdsByWorkspace = new Map();\n for (const dir of workspaceDirs) {\n const set = new Set();\n for (const c of listSessionCandidates(dir)) set.add(c.id);\n knownIdsByWorkspace.set(dir, set);\n }\n\n const { resumeFlag, resumeId } = parseResume(ARGS);\n\n let sessionId = null;\n let workspaceDir = null;\n if (resumeId) {\n sessionId = resumeId;\n workspaceDir = findWorkspaceDirForSessionId(workspaceDirs, sessionId) || workspaceDirs[0] || null;\n } else {\n let abortResolve = false;\n const shouldAbort = () => abortResolve;\n\n const byProcPromise = resolveSessionFromProcessGroup(shouldAbort, 20);\n\n let picked = null;\n if (resumeFlag) {\n // For --resume without an explicit id, don't block startup too long on ps/lsof.\n // Prefer process-group resolution when it is fast; otherwise fall back to latest.\n picked = await Promise.race([\n byProcPromise,\n sleep(400).then(() => null),\n ]);\n if (!picked) picked = pickLatestSessionAcross(workspaceDirs);\n } else {\n const freshPromise = waitForNewSessionAcross(workspaceDirs, knownIdsByWorkspace, START_MS, shouldAbort);\n picked = await firstNonNull([byProcPromise, freshPromise]);\n if (!picked) picked = pickLatestSessionAcross(workspaceDirs);\n }\n\n abortResolve = true;\n\n sessionId = picked?.id || null;\n workspaceDir = picked?.workspaceDir || workspaceDirs[0] || null;\n }\n\n if (!sessionId || !workspaceDir) return;\n let sessionIdLower = String(sessionId).toLowerCase();\n\n let settingsPath = '';\n let sessionSettings = {};\n ({ settingsPath, settings: sessionSettings } = resolveSessionSettings(workspaceDir, sessionId));\n\n let configMtimeMs = safeStatMtimeMs(CONFIG_PATH);\n let globalSettingsMtimeMs = safeStatMtimeMs(GLOBAL_SETTINGS_PATH);\n let globalSettingsModel = resolveGlobalSettingsModel();\n\n let modelId =\n (sessionSettings && typeof sessionSettings.model === 'string' ? sessionSettings.model : null) ||\n globalSettingsModel ||\n null;\n\n let provider =\n sessionSettings && typeof sessionSettings.providerLock === 'string'\n ? sessionSettings.providerLock\n : resolveProvider(modelId, factoryConfig);\n let underlyingModel = resolveUnderlyingModelId(modelId, factoryConfig) || modelId || 'unknown';\n\n function refreshModel() {\n const nextModelId =\n (sessionSettings && typeof sessionSettings.model === 'string' ? sessionSettings.model : null) ||\n globalSettingsModel ||\n null;\n\n // Use providerLock if set, otherwise resolve from model/config (same logic as initialization)\n const nextProvider =\n sessionSettings && typeof sessionSettings.providerLock === 'string'\n ? sessionSettings.providerLock\n : resolveProvider(nextModelId, factoryConfig);\n const nextUnderlying = resolveUnderlyingModelId(nextModelId, factoryConfig) || nextModelId || 'unknown';\n\n let changed = false;\n if (nextModelId !== modelId) {\n modelId = nextModelId;\n changed = true;\n }\n if (nextProvider !== provider) {\n provider = nextProvider;\n changed = true;\n }\n if (nextUnderlying !== underlyingModel) {\n underlyingModel = nextUnderlying;\n changed = true;\n }\n\n if (changed) renderNow();\n }\n\n let last = { cacheReadInputTokens: 0, contextCount: 0, outputTokens: 0 };\n let sessionUsage =\n sessionSettings && typeof sessionSettings.tokenUsage === 'object' && sessionSettings.tokenUsage\n ? sessionSettings.tokenUsage\n : {};\n let compacting = false;\n let lastRenderAt = 0;\n let lastRenderedLine = '';\n let gitBranch = '';\n let gitDiff = '';\n let lastContextMs = 0;\n\n function renderNow() {\n const usedTokens = (last.cacheReadInputTokens || 0) + (last.contextCount || 0);\n const line = buildLine({\n provider,\n model: underlyingModel,\n cwdBase,\n gitBranch,\n gitDiff,\n usedTokens,\n cacheRead: last.cacheReadInputTokens || 0,\n deltaInput: last.contextCount || 0,\n lastOutputTokens: last.outputTokens || 0,\n sessionUsage,\n compacting,\n });\n if (line !== lastRenderedLine) {\n lastRenderedLine = line;\n emitFrame(line);\n }\n }\n\n // Initial render.\n renderNow();\n\n // Resolve git info asynchronously so startup isn't blocked on large repos.\n setTimeout(() => {\n try {\n gitBranch = resolveGitBranch(cwd);\n gitDiff = resolveGitDiffSummary(cwd);\n renderNow();\n } catch {}\n }, 0).unref();\n\n let reseedInProgress = false;\n let reseedQueued = false;\n\n function updateLastFromContext(ctx, updateOutputTokens, tsMs) {\n const ts = Number.isFinite(tsMs) ? tsMs : null;\n if (ts != null && lastContextMs && ts < lastContextMs) return false;\n const cacheRead = Number(ctx?.cacheReadInputTokens);\n const contextCount = Number(ctx?.contextCount);\n const out = Number(ctx?.outputTokens);\n if (Number.isFinite(cacheRead)) last.cacheReadInputTokens = cacheRead;\n if (Number.isFinite(contextCount)) last.contextCount = contextCount;\n if (updateOutputTokens && Number.isFinite(out)) last.outputTokens = out;\n if (ts != null) lastContextMs = ts;\n return true;\n }\n\n function seedLastContextFromLog(options) {\n const opts = options || {};\n const maxScanBytes = Number.isFinite(opts.maxScanBytes) ? opts.maxScanBytes : 64 * 1024 * 1024;\n const preferStreaming = !!opts.preferStreaming;\n const minTimestampMs = Number.isFinite(lastContextMs) && lastContextMs > 0 ? lastContextMs : 0;\n const earlyStopAfterBestBytes = Math.min(2 * 1024 * 1024, Math.max(256 * 1024, maxScanBytes));\n\n if (reseedInProgress) {\n reseedQueued = true;\n return;\n }\n reseedInProgress = true;\n\n setTimeout(() => {\n try {\n // Backward scan to find the most recent context entry for this session.\n // Prefer streaming context if requested; otherwise accept any context line\n // that includes cacheReadInputTokens/contextCount fields.\n const CHUNK_BYTES = 1024 * 1024; // 1 MiB\n\n const fd = fs.openSync(LOG_PATH, 'r');\n try {\n const stat = fs.fstatSync(fd);\n const size = Number(stat?.size ?? 0);\n let pos = size;\n let scanned = 0;\n let remainder = '';\n let bestCtx = null;\n let bestIsStreaming = false;\n let bestTs = null;\n let bestHasTs = false;\n let bytesSinceBest = 0;\n\n while (pos > 0 && scanned < maxScanBytes && (!bestHasTs || bytesSinceBest < earlyStopAfterBestBytes)) {\n const readSize = Math.min(CHUNK_BYTES, pos);\n const start = pos - readSize;\n const buf = Buffer.alloc(readSize);\n fs.readSync(fd, buf, 0, readSize, start);\n pos = start;\n scanned += readSize;\n bytesSinceBest += readSize;\n\n let text = buf.toString('utf8') + remainder;\n let lines = String(text).split('\\\\n');\n remainder = lines.shift() || '';\n if (pos === 0 && remainder) {\n lines.unshift(remainder);\n remainder = '';\n }\n\n for (let i = lines.length - 1; i >= 0; i--) {\n const line = String(lines[i] || '').trimEnd();\n if (!line) continue;\n if (!line.includes('Context:')) continue;\n const sid = extractSessionIdFromLine(line);\n if (!sid || String(sid).toLowerCase() !== sessionIdLower) continue;\n\n const isStreaming = line.includes('[Agent] Streaming result');\n if (preferStreaming && !isStreaming) continue;\n\n const ctxIndex = line.indexOf('Context: ');\n if (ctxIndex === -1) continue;\n const jsonStr = line.slice(ctxIndex + 'Context: '.length).trim();\n let ctx;\n try {\n ctx = JSON.parse(jsonStr);\n } catch {\n continue;\n }\n\n const cacheRead = Number(ctx?.cacheReadInputTokens);\n const contextCount = Number(ctx?.contextCount);\n const hasUsage = Number.isFinite(cacheRead) || Number.isFinite(contextCount);\n if (!hasUsage) continue;\n\n const ts = parseLineTimestampMs(line);\n if (ts != null && minTimestampMs && ts < minTimestampMs) {\n continue;\n }\n\n if (ts != null) {\n if (!bestHasTs || ts > bestTs) {\n bestCtx = ctx;\n bestIsStreaming = isStreaming;\n bestTs = ts;\n bestHasTs = true;\n bytesSinceBest = 0;\n }\n } else if (!bestHasTs && !bestCtx) {\n // No timestamps available yet: the first match when scanning backward\n // is the most recent in file order.\n bestCtx = ctx;\n bestIsStreaming = isStreaming;\n bestTs = null;\n }\n }\n\n if (remainder.length > 8192) remainder = remainder.slice(-8192);\n }\n\n if (bestCtx) {\n updateLastFromContext(bestCtx, bestIsStreaming, bestTs);\n }\n } finally {\n try {\n fs.closeSync(fd);\n } catch {}\n }\n } catch {\n // ignore\n } finally {\n reseedInProgress = false;\n if (reseedQueued) {\n reseedQueued = false;\n seedLastContextFromLog({ maxScanBytes, preferStreaming });\n return;\n }\n renderNow();\n }\n }, 0).unref();\n }\n\n // Seed prompt-context usage from existing logs (important for resumed sessions).\n // Do this asynchronously to avoid delaying the first statusline frame.\n let initialSeedDone = false;\n if (resumeFlag || resumeId) {\n initialSeedDone = true;\n seedLastContextFromLog({ maxScanBytes: 64 * 1024 * 1024, preferStreaming: true });\n }\n\n // Watch session settings for autonomy/reasoning changes (cheap polling with mtime).\n let settingsMtimeMs = 0;\n let lastCtxPollMs = 0;\n setInterval(() => {\n // Refresh config/global settings if they changed (model display depends on these).\n const configMtime = safeStatMtimeMs(CONFIG_PATH);\n if (configMtime && configMtime !== configMtimeMs) {\n configMtimeMs = configMtime;\n factoryConfig = readJsonFile(CONFIG_PATH) || {};\n refreshModel();\n }\n\n const globalMtime = safeStatMtimeMs(GLOBAL_SETTINGS_PATH);\n if (globalMtime && globalMtime !== globalSettingsMtimeMs) {\n globalSettingsMtimeMs = globalMtime;\n globalSettingsModel = resolveGlobalSettingsModel();\n refreshModel();\n }\n\n try {\n const stat = fs.statSync(settingsPath);\n if (stat.mtimeMs === settingsMtimeMs) return;\n settingsMtimeMs = stat.mtimeMs;\n const next = readJsonFile(settingsPath) || {};\n sessionSettings = next;\n\n // Keep session token usage in sync (used by /status).\n if (next && typeof next.tokenUsage === 'object' && next.tokenUsage) {\n sessionUsage = next.tokenUsage;\n }\n\n // Keep model/provider in sync (model can change during a running session).\n refreshModel();\n\n const now = Date.now();\n if (now - lastRenderAt >= MIN_RENDER_INTERVAL_MS) {\n lastRenderAt = now;\n renderNow();\n }\n } catch {\n // ignore\n }\n }, 750).unref();\n\n // Fallback: periodically rescan log if context is still zero after startup.\n // This handles cases where tail misses early log entries.\n setInterval(() => {\n const now = Date.now();\n if (now - START_MS < 3000) return; // wait 3s after startup\n if (last.contextCount > 0 || last.cacheReadInputTokens > 0) return; // already have data\n if (now - lastCtxPollMs < 5000) return; // throttle to every 5s\n lastCtxPollMs = now;\n seedLastContextFromLog({ maxScanBytes: 4 * 1024 * 1024, preferStreaming: false });\n }, 2000).unref();\n\n function switchToSession(nextSessionId) {\n if (!nextSessionId || !isUuid(nextSessionId)) return;\n const nextLower = String(nextSessionId).toLowerCase();\n if (nextLower === sessionIdLower) return;\n\n sessionId = nextSessionId;\n sessionIdLower = nextLower;\n\n const resolved = resolveSessionSettings(workspaceDir, nextSessionId);\n settingsPath = resolved.settingsPath;\n sessionSettings = resolved.settings || {};\n\n sessionUsage =\n sessionSettings && typeof sessionSettings.tokenUsage === 'object' && sessionSettings.tokenUsage\n ? sessionSettings.tokenUsage\n : {};\n\n // Reset cached state for the new session.\n last = { cacheReadInputTokens: 0, contextCount: 0, outputTokens: 0 };\n lastContextMs = 0;\n compacting = false;\n settingsMtimeMs = 0;\n lastCtxPollMs = 0;\n\n refreshModel();\n renderNow();\n\n // Best-effort: if the new session already has Context lines in the log, seed quickly.\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }\n\n // Follow the Factory log and update based on session-scoped events.\n const tail = spawn('tail', ['-n', '0', '-F', LOG_PATH], {\n stdio: ['ignore', 'pipe', 'ignore'],\n });\n\n let buffer = '';\n tail.stdout.on('data', (chunk) => {\n buffer += String(chunk);\n while (true) {\n const idx = buffer.indexOf('\\\\n');\n if (idx === -1) break;\n const line = buffer.slice(0, idx).trimEnd();\n buffer = buffer.slice(idx + 1);\n\n const tsMs = parseLineTimestampMs(line);\n const lineSessionId = extractSessionIdFromLine(line);\n const isSessionLine =\n lineSessionId && String(lineSessionId).toLowerCase() === sessionIdLower;\n\n // /compress (aka /compact) can create a new session ID. Follow it so ctx/model keep updating.\n if (line.includes('oldSessionId') && line.includes('newSessionId') && line.includes('Context:')) {\n const ctxIndex = line.indexOf('Context: ');\n if (ctxIndex !== -1) {\n const jsonStr = line.slice(ctxIndex + 'Context: '.length).trim();\n try {\n const meta = JSON.parse(jsonStr);\n const oldId = meta?.oldSessionId;\n const newId = meta?.newSessionId;\n if (\n isUuid(oldId) &&\n isUuid(newId) &&\n String(oldId).toLowerCase() === sessionIdLower &&\n String(newId).toLowerCase() !== sessionIdLower\n ) {\n switchToSession(String(newId));\n continue;\n }\n } catch {\n // ignore\n }\n }\n }\n\n let compactionChanged = false;\n let compactionEnded = false;\n if (line.includes('[Compaction]')) {\n // Accept session-scoped compaction lines; allow end markers to clear even\n // if the line lacks a session id (some builds omit Context on end lines).\n if (isSessionLine || (compacting && !lineSessionId)) {\n const next = nextCompactionState(line, compacting);\n if (next !== compacting) {\n compacting = next;\n compactionChanged = true;\n if (!compacting) compactionEnded = true;\n }\n }\n }\n\n if (compactionChanged && compacting) {\n // Compaction can start after a context-limit error. Ensure we display the latest\n // pre-compaction ctx by reseeding from log (tail can miss bursts).\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: true });\n }\n\n if (compactionEnded) {\n // ctx usage changes dramatically after compaction, but the next Context line\n // can be delayed. Clear displayed ctx immediately to avoid showing stale numbers.\n last.cacheReadInputTokens = 0;\n last.contextCount = 0;\n if (tsMs != null) lastContextMs = tsMs;\n }\n\n if (!line.includes('Context:')) {\n if (compactionChanged) {\n lastRenderAt = Date.now();\n renderNow();\n }\n if (compactionEnded) {\n // Compaction often completes between turns. Refresh ctx numbers promptly\n // by rescanning the most recent Context entry for this session.\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 250).unref();\n }\n continue;\n }\n if (!isSessionLine) {\n if (compactionChanged) {\n lastRenderAt = Date.now();\n renderNow();\n }\n if (compactionEnded) {\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 250).unref();\n }\n continue;\n }\n\n const ctxIndex = line.indexOf('Context: ');\n if (ctxIndex === -1) continue;\n const jsonStr = line.slice(ctxIndex + 'Context: '.length).trim();\n let ctx;\n try {\n ctx = JSON.parse(jsonStr);\n } catch {\n if (compactionChanged) {\n lastRenderAt = Date.now();\n renderNow();\n }\n continue;\n }\n\n // Context usage can appear on multiple session-scoped log lines; update whenever present.\n // (Streaming is still the best source for outputTokens / LastOut.)\n updateLastFromContext(ctx, false, tsMs);\n\n // For new sessions: if this is the first valid Context line and ctx is still 0,\n // trigger a reseed to catch any earlier log entries we might have missed.\n if (!initialSeedDone && last.contextCount === 0) {\n initialSeedDone = true;\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 100).unref();\n }\n\n if (line.includes('[Agent] Streaming result')) {\n updateLastFromContext(ctx, true, tsMs);\n }\n\n const now = Date.now();\n if (compactionChanged || now - lastRenderAt >= MIN_RENDER_INTERVAL_MS) {\n lastRenderAt = now;\n renderNow();\n }\n\n if (compactionEnded) {\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 250).unref();\n }\n }\n });\n\n const stop = () => {\n try { tail.kill('SIGTERM'); } catch {}\n process.exit(0);\n };\n\n process.on('SIGTERM', stop);\n process.on('SIGINT', stop);\n process.on('SIGHUP', stop);\n}\n\nmain().catch(() => {});\n`;\n}\n\nfunction generateStatuslineWrapperScript(\n execTargetPath: string,\n monitorScriptPath: string,\n sessionsScriptPath?: string,\n): string {\n return generateStatuslineWrapperScriptBun(execTargetPath, monitorScriptPath, sessionsScriptPath);\n}\n\nfunction generateStatuslineWrapperScriptBun(\n execTargetPath: string,\n monitorScriptPath: string,\n sessionsScriptPath?: string,\n): string {\n const execTargetJson = JSON.stringify(execTargetPath);\n const monitorScriptJson = JSON.stringify(monitorScriptPath);\n const sessionsScriptJson = sessionsScriptPath ? JSON.stringify(sessionsScriptPath) : \"null\";\n\n // Notes:\n // - Requires Bun >= 1.3.5 (Bun.Terminal via Bun.spawn({ terminal }))\n // - Keep dependencies zero; this file is written as a standalone executable.\n return `#!/usr/bin/env bun\n// Droid with Statusline (Bun PTY proxy)\n// Auto-generated by droid-patch --statusline\n\nconst EXEC_TARGET = ${execTargetJson};\nconst STATUSLINE_MONITOR = ${monitorScriptJson};\nconst SESSIONS_SCRIPT = ${sessionsScriptJson};\n\nconst IS_APPLE_TERMINAL = process.env.TERM_PROGRAM === \"Apple_Terminal\";\nconst MIN_RENDER_INTERVAL_MS = IS_APPLE_TERMINAL ? 800 : 400;\nconst QUIET_MS = 50;\nconst FORCE_REPAINT_INTERVAL_MS = 2000;\nconst RESERVED_ROWS = 1;\n\nconst BYPASS_FLAGS = new Set([\"--help\", \"-h\", \"--version\", \"-V\"]);\nconst BYPASS_COMMANDS = new Set([\"help\", \"version\", \"completion\", \"completions\", \"exec\"]);\n\nfunction shouldPassthrough(argv) {\n for (const a of argv) {\n if (a === \"--\") break;\n if (BYPASS_FLAGS.has(a)) return true;\n }\n let endOpts = false;\n let cmd = null;\n for (const a of argv) {\n if (a === \"--\") {\n endOpts = true;\n continue;\n }\n if (!endOpts && a.startsWith(\"-\")) continue;\n cmd = a;\n break;\n }\n return cmd && BYPASS_COMMANDS.has(cmd);\n}\n\nfunction isSessionsCommand(argv) {\n for (const a of argv) {\n if (a === \"--\") return false;\n if (a === \"--sessions\") return true;\n }\n return false;\n}\n\nasync function execPassthrough(argv) {\n const proc = Bun.spawn([EXEC_TARGET, ...argv], {\n stdin: \"inherit\",\n stdout: \"inherit\",\n stderr: \"inherit\",\n });\n const code = await proc.exited;\n process.exit(code ?? 0);\n}\n\nasync function runSessions() {\n if (SESSIONS_SCRIPT) {\n const proc = Bun.spawn([\"node\", String(SESSIONS_SCRIPT)], {\n stdin: \"inherit\",\n stdout: \"inherit\",\n stderr: \"inherit\",\n });\n const code = await proc.exited;\n process.exit(code ?? 0);\n }\n process.stderr.write(\"[statusline] sessions script not found\\\\n\");\n process.exit(1);\n}\n\nfunction writeStdout(s) {\n try {\n process.stdout.write(s);\n } catch {\n // ignore\n }\n}\n\nfunction termSize() {\n const rows = Number(process.stdout.rows || 24);\n const cols = Number(process.stdout.columns || 80);\n return { rows: Number.isFinite(rows) ? rows : 24, cols: Number.isFinite(cols) ? cols : 80 };\n}\n\nconst ANSI_RE = /\\\\x1b\\\\[[0-9;]*m/g;\nconst RESET_SGR = \"\\\\x1b[0m\";\n\nfunction visibleWidth(text) {\n return String(text || \"\").replace(ANSI_RE, \"\").length;\n}\n\nfunction clampAnsi(text, cols) {\n if (!cols || cols <= 0) return String(text || \"\");\n cols = cols > 1 ? cols - 1 : cols; // avoid last-column wrap\n if (cols < 10) return String(text || \"\");\n const s = String(text || \"\");\n let visible = 0;\n let i = 0;\n const out = [];\n while (i < s.length) {\n const ch = s[i];\n if (ch === \"\\\\x1b\") {\n const m = s.indexOf(\"m\", i);\n if (m !== -1) {\n out.push(s.slice(i, m + 1));\n i = m + 1;\n continue;\n }\n out.push(ch);\n i += 1;\n continue;\n }\n if (visible >= cols) break;\n out.push(ch);\n i += 1;\n visible += 1;\n }\n if (i < s.length && cols >= 1) {\n if (visible >= cols) {\n if (out.length) out[out.length - 1] = \"…\";\n else out.push(\"…\");\n } else {\n out.push(\"…\");\n }\n out.push(RESET_SGR);\n }\n return out.join(\"\");\n}\n\nfunction splitSegments(text) {\n if (!text) return [];\n const s = String(text);\n const segments = [];\n let start = 0;\n while (true) {\n const idx = s.indexOf(RESET_SGR, start);\n if (idx === -1) {\n const tail = s.slice(start);\n if (tail) segments.push(tail);\n break;\n }\n const seg = s.slice(start, idx + RESET_SGR.length);\n if (seg) segments.push(seg);\n start = idx + RESET_SGR.length;\n }\n return segments;\n}\n\nfunction wrapSegments(segments, cols) {\n if (!segments || segments.length === 0) return [\"\"];\n if (!cols || cols <= 0) return [segments.join(\"\")];\n\n const lines = [];\n let cur = [];\n let curW = 0;\n\n for (let seg of segments) {\n let segW = visibleWidth(seg);\n if (segW <= 0) continue;\n\n if (cur.length === 0) {\n if (segW > cols) {\n seg = clampAnsi(seg, cols);\n segW = visibleWidth(seg);\n }\n cur = [seg];\n curW = segW;\n continue;\n }\n\n if (curW + segW <= cols) {\n cur.push(seg);\n curW += segW;\n } else {\n lines.push(cur.join(\"\"));\n if (segW > cols) {\n seg = clampAnsi(seg, cols);\n segW = visibleWidth(seg);\n }\n cur = [seg];\n curW = segW;\n }\n }\n\n if (cur.length) lines.push(cur.join(\"\"));\n return lines.length ? lines : [\"\"];\n}\n\nclass StatusRenderer {\n constructor() {\n this.raw = \"\";\n this.segments = [];\n this.lines = [\"\"];\n this.activeReservedRows = RESERVED_ROWS;\n this.force = false;\n this.urgent = false;\n this.lastRenderMs = 0;\n this.lastChildOutMs = 0;\n this.cursorVisible = true;\n }\n noteChildOutput() {\n this.lastChildOutMs = Date.now();\n }\n setCursorVisible(v) {\n this.cursorVisible = !!v;\n }\n forceRepaint(urgent = false) {\n this.force = true;\n if (urgent) this.urgent = true;\n }\n setActiveReservedRows(n) {\n const v = Number(n || 1);\n this.activeReservedRows = Number.isFinite(v) ? Math.max(1, Math.trunc(v)) : 1;\n }\n setLine(line) {\n const next = String(line || \"\");\n if (next !== this.raw) {\n this.raw = next;\n this.segments = splitSegments(next);\n this.force = true;\n }\n }\n desiredReservedRows(physicalRows, cols, minReserved) {\n let rows = Number(physicalRows || 24);\n rows = Number.isFinite(rows) ? rows : 24;\n cols = Number(cols || 80);\n cols = Number.isFinite(cols) ? cols : 80;\n\n const maxReserved = Math.max(1, rows - 4);\n const segs = this.segments.length ? this.segments : (this.raw ? [this.raw] : []);\n let lines = segs.length ? wrapSegments(segs, cols) : [\"\"];\n\n const needed = Math.min(lines.length, maxReserved);\n let desired = Math.max(Number(minReserved || 1), needed);\n desired = Math.min(desired, maxReserved);\n\n if (lines.length < desired) lines = new Array(desired - lines.length).fill(\"\").concat(lines);\n if (lines.length > desired) lines = lines.slice(-desired);\n\n this.lines = lines;\n return desired;\n }\n clearReservedArea(physicalRows, cols, reservedRows, restoreRow = 1, restoreCol = 1) {\n let rows = Number(physicalRows || 24);\n rows = Number.isFinite(rows) ? rows : 24;\n cols = Number(cols || 80);\n cols = Number.isFinite(cols) ? cols : 80;\n let reserved = Number(reservedRows || 1);\n reserved = Number.isFinite(reserved) ? Math.max(1, Math.trunc(reserved)) : 1;\n\n reserved = Math.min(reserved, rows);\n const startRow = rows - reserved + 1;\n const parts = [\"\\\\x1b[?2026h\", \"\\\\x1b[?25l\", RESET_SGR];\n for (let i = 0; i < reserved; i++) parts.push(\"\\\\x1b[\" + (startRow + i) + \";1H\\\\x1b[2K\");\n parts.push(\"\\\\x1b[\" + restoreRow + \";\" + restoreCol + \"H\");\n parts.push(this.cursorVisible ? \"\\\\x1b[?25h\" : \"\\\\x1b[?25l\");\n parts.push(\"\\\\x1b[?2026l\");\n writeStdout(parts.join(\"\"));\n }\n render(physicalRows, cols, restoreRow = 1, restoreCol = 1) {\n if (!this.force) return;\n if (!this.raw) {\n this.force = false;\n this.urgent = false;\n return;\n }\n const now = Date.now();\n if (!this.urgent && now - this.lastRenderMs < MIN_RENDER_INTERVAL_MS) return;\n if (!this.urgent && QUIET_MS > 0 && now - this.lastChildOutMs < QUIET_MS) return;\n\n let rows = Number(physicalRows || 24);\n rows = Number.isFinite(rows) ? rows : 24;\n cols = Number(cols || 80);\n cols = Number.isFinite(cols) ? cols : 80;\n if (cols <= 0) cols = 80;\n\n const reserved = Math.max(1, Math.min(this.activeReservedRows, Math.max(1, rows - 4)));\n const startRow = rows - reserved + 1;\n const childRows = rows - reserved;\n\n let lines = this.lines.length ? this.lines.slice() : [\"\"];\n if (lines.length < reserved) lines = new Array(reserved - lines.length).fill(\"\").concat(lines);\n if (lines.length > reserved) lines = lines.slice(-reserved);\n\n const parts = [\"\\\\x1b[?2026h\", \"\\\\x1b[?25l\"];\n parts.push(\"\\\\x1b[1;\" + childRows + \"r\");\n for (let i = 0; i < reserved; i++) {\n const row = startRow + i;\n const text = clampAnsi(lines[i], cols);\n parts.push(\"\\\\x1b[\" + row + \";1H\" + RESET_SGR + \"\\\\x1b[2K\");\n parts.push(\"\\\\x1b[\" + row + \";1H\" + text + RESET_SGR);\n }\n parts.push(\"\\\\x1b[\" + restoreRow + \";\" + restoreCol + \"H\");\n parts.push(this.cursorVisible ? \"\\\\x1b[?25h\" : \"\\\\x1b[?25l\");\n parts.push(\"\\\\x1b[?2026l\");\n writeStdout(parts.join(\"\"));\n\n this.lastRenderMs = now;\n this.force = false;\n this.urgent = false;\n }\n clear() {\n const { rows, cols } = termSize();\n this.clearReservedArea(rows, cols, Math.max(this.activeReservedRows, RESERVED_ROWS));\n }\n}\n\nclass OutputRewriter {\n constructor() {\n this.buf = new Uint8Array(0);\n }\n feed(chunk, maxRow) {\n if (!chunk || chunk.length === 0) return chunk;\n const merged = new Uint8Array(this.buf.length + chunk.length);\n merged.set(this.buf, 0);\n merged.set(chunk, this.buf.length);\n this.buf = new Uint8Array(0);\n\n const out = [];\n let i = 0;\n\n const isFinal = (v) => v >= 0x40 && v <= 0x7e;\n\n while (i < merged.length) {\n const b = merged[i];\n if (b !== 0x1b) {\n out.push(b);\n i += 1;\n continue;\n }\n if (i + 1 >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const nxt = merged[i + 1];\n if (nxt !== 0x5b) {\n out.push(b);\n i += 1;\n continue;\n }\n\n let j = i + 2;\n while (j < merged.length && !isFinal(merged[j])) j += 1;\n if (j >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const final = merged[j];\n let seq = merged.slice(i, j + 1);\n\n if ((final === 0x48 || final === 0x66) && maxRow > 0) {\n const params = merged.slice(i + 2, j);\n const s = new TextDecoder().decode(params);\n if (!s || /^[0-9;]/.test(s)) {\n const parts = s ? s.split(\";\") : [];\n const row = Number(parts[0] || 1);\n const col = Number(parts[1] || 1);\n let r = Number.isFinite(row) ? row : 1;\n let c = Number.isFinite(col) ? col : 1;\n if (r === 999 || r > maxRow) r = maxRow;\n if (r < 1) r = 1;\n if (c < 1) c = 1;\n const newParams = new TextEncoder().encode(String(r) + \";\" + String(c));\n const ns = new Uint8Array(2 + newParams.length + 1);\n ns[0] = 0x1b;\n ns[1] = 0x5b;\n ns.set(newParams, 2);\n ns[ns.length - 1] = final;\n seq = ns;\n }\n } else if (final === 0x72 && maxRow > 0) {\n const params = merged.slice(i + 2, j);\n const s = new TextDecoder().decode(params);\n if (!s || /^[0-9;]/.test(s)) {\n const parts = s ? s.split(\";\") : [];\n const top = Number(parts[0] || 1);\n const bottom = Number(parts[1] || maxRow);\n let t = Number.isFinite(top) ? top : 1;\n let btm = Number.isFinite(bottom) ? bottom : maxRow;\n if (t <= 0) t = 1;\n if (btm <= 0 || btm === 999 || btm > maxRow) btm = maxRow;\n if (t > btm) t = 1;\n const str = \"\\\\x1b[\" + String(t) + \";\" + String(btm) + \"r\";\n seq = new TextEncoder().encode(str);\n }\n }\n\n for (const bb of seq) out.push(bb);\n i = j + 1;\n }\n\n return new Uint8Array(out);\n }\n}\n\nclass CursorTracker {\n constructor() {\n this.row = 1;\n this.col = 1;\n this.savedRow = 1;\n this.savedCol = 1;\n this.buf = new Uint8Array(0);\n this.inOsc = false;\n this.utf8Cont = 0;\n this.wrapPending = false;\n }\n position() {\n return { row: this.row, col: this.col };\n }\n feed(chunk, maxRow, maxCol) {\n if (!chunk || chunk.length === 0) return;\n maxRow = Math.max(1, Number(maxRow || 1));\n maxCol = Math.max(1, Number(maxCol || 1));\n\n const merged = new Uint8Array(this.buf.length + chunk.length);\n merged.set(this.buf, 0);\n merged.set(chunk, this.buf.length);\n this.buf = new Uint8Array(0);\n\n const clamp = () => {\n if (this.row < 1) this.row = 1;\n else if (this.row > maxRow) this.row = maxRow;\n if (this.col < 1) this.col = 1;\n else if (this.col > maxCol) this.col = maxCol;\n };\n\n const parseIntDefault = (v, d) => {\n const n = Number(v);\n return Number.isFinite(n) && n > 0 ? Math.trunc(n) : d;\n };\n\n let i = 0;\n const isFinal = (v) => v >= 0x40 && v <= 0x7e;\n\n while (i < merged.length) {\n const b = merged[i];\n\n if (this.inOsc) {\n if (b === 0x07) {\n this.inOsc = false;\n i += 1;\n continue;\n }\n if (b === 0x1b) {\n if (i + 1 >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n if (merged[i + 1] === 0x5c) {\n this.inOsc = false;\n i += 2;\n continue;\n }\n }\n i += 1;\n continue;\n }\n\n if (this.utf8Cont > 0) {\n if (b >= 0x80 && b <= 0xbf) {\n this.utf8Cont -= 1;\n i += 1;\n continue;\n }\n this.utf8Cont = 0;\n }\n\n if (b === 0x1b) {\n this.wrapPending = false;\n if (i + 1 >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const nxt = merged[i + 1];\n\n if (nxt === 0x5b) {\n let j = i + 2;\n while (j < merged.length && !isFinal(merged[j])) j += 1;\n if (j >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const final = merged[j];\n const params = merged.slice(i + 2, j);\n const s = new TextDecoder().decode(params);\n if (s && !/^[0-9;]/.test(s)) {\n i = j + 1;\n continue;\n }\n const parts = s ? s.split(\";\") : [];\n const p0 = parseIntDefault(parts[0] || \"\", 1);\n const p1 = parseIntDefault(parts[1] || \"\", 1);\n\n if (final === 0x48 || final === 0x66) {\n this.row = p0;\n this.col = p1;\n clamp();\n } else if (final === 0x41) {\n this.row = Math.max(1, this.row - p0);\n } else if (final === 0x42) {\n this.row = Math.min(maxRow, this.row + p0);\n } else if (final === 0x43) {\n this.col = Math.min(maxCol, this.col + p0);\n } else if (final === 0x44) {\n this.col = Math.max(1, this.col - p0);\n } else if (final === 0x45) {\n this.row = Math.min(maxRow, this.row + p0);\n this.col = 1;\n } else if (final === 0x46) {\n this.row = Math.max(1, this.row - p0);\n this.col = 1;\n } else if (final === 0x47) {\n this.col = p0;\n clamp();\n } else if (final === 0x64) {\n this.row = p0;\n clamp();\n } else if (final === 0x72) {\n this.row = 1;\n this.col = 1;\n } else if (final === 0x73) {\n this.savedRow = this.row;\n this.savedCol = this.col;\n } else if (final === 0x75) {\n this.row = this.savedRow;\n this.col = this.savedCol;\n clamp();\n }\n\n i = j + 1;\n continue;\n }\n\n if (nxt === 0x5d || nxt === 0x50 || nxt === 0x5e || nxt === 0x5f || nxt === 0x58) {\n this.inOsc = true;\n i += 2;\n continue;\n }\n\n if (nxt === 0x37) {\n this.savedRow = this.row;\n this.savedCol = this.col;\n i += 2;\n continue;\n }\n if (nxt === 0x38) {\n this.row = this.savedRow;\n this.col = this.savedCol;\n clamp();\n i += 2;\n continue;\n }\n\n i += 2;\n continue;\n }\n\n if (b === 0x0d) {\n this.col = 1;\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b === 0x0a || b === 0x0b || b === 0x0c) {\n this.row = Math.min(maxRow, this.row + 1);\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b === 0x08) {\n this.col = Math.max(1, this.col - 1);\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b === 0x09) {\n const nextStop = Math.floor((this.col - 1) / 8 + 1) * 8 + 1;\n this.col = Math.min(maxCol, nextStop);\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b < 0x20 || b === 0x7f) {\n i += 1;\n continue;\n }\n\n if (this.wrapPending) {\n this.row = Math.min(maxRow, this.row + 1);\n this.col = 1;\n this.wrapPending = false;\n }\n\n if (b >= 0x80) {\n if ((b & 0xe0) === 0xc0) this.utf8Cont = 1;\n else if ((b & 0xf0) === 0xe0) this.utf8Cont = 2;\n else if ((b & 0xf8) === 0xf0) this.utf8Cont = 3;\n else this.utf8Cont = 0;\n }\n\n if (this.col < maxCol) this.col += 1;\n else {\n this.col = maxCol;\n this.wrapPending = true;\n }\n i += 1;\n }\n }\n}\n\nasync function main() {\n const argv = process.argv.slice(2);\n\n if (isSessionsCommand(argv)) await runSessions();\n\n if (!process.stdin.isTTY || !process.stdout.isTTY || shouldPassthrough(argv)) {\n await execPassthrough(argv);\n return;\n }\n\n // Clean viewport.\n writeStdout(\"\\\\x1b[?2026h\\\\x1b[0m\\\\x1b[r\\\\x1b[2J\\\\x1b[H\\\\x1b[?2026l\");\n\n const renderer = new StatusRenderer();\n renderer.setLine(\"\\\\x1b[48;5;238m\\\\x1b[38;5;15m Statusline: starting… \\\\x1b[0m\");\n renderer.forceRepaint(true);\n\n let { rows: physicalRows, cols: physicalCols } = termSize();\n let effectiveReservedRows = renderer.desiredReservedRows(physicalRows, physicalCols, RESERVED_ROWS);\n renderer.setActiveReservedRows(effectiveReservedRows);\n let childRows = Math.max(4, physicalRows - effectiveReservedRows);\n let childCols = Math.max(10, physicalCols);\n\n // Reserve the bottom rows early, before the child starts writing.\n writeStdout(\n \"\\\\x1b[?2026h\\\\x1b[?25l\\\\x1b[1;\" + childRows + \"r\\\\x1b[1;1H\\\\x1b[?25h\\\\x1b[?2026l\",\n );\n renderer.forceRepaint(true);\n renderer.render(physicalRows, physicalCols, 1, 1);\n\n // Spawn child with terminal support.\n let child;\n try {\n child = Bun.spawn([EXEC_TARGET, ...argv], {\n cwd: process.cwd(),\n env: process.env,\n detached: true,\n terminal: {\n cols: childCols,\n rows: childRows,\n data(_terminal, data) {\n onChildData(data);\n },\n },\n onExit(_proc, exitCode, signal, _error) {\n onChildExit(exitCode, signal);\n },\n });\n } catch (e) {\n process.stderr.write(\"[statusline] failed to spawn child: \" + String(e?.message || e) + \"\\\\n\");\n process.exit(1);\n }\n\n const terminal = child.terminal;\n\n // Best-effort PGID resolution (matches Python wrapper behavior).\n // This improves session resolution (ps/lsof scanning) and signal forwarding.\n let pgid = child.pid;\n try {\n const res = Bun.spawnSync([\"ps\", \"-o\", \"pgid=\", \"-p\", String(child.pid)], {\n stdin: \"ignore\",\n stdout: \"pipe\",\n stderr: \"ignore\",\n });\n if (res && res.exitCode === 0 && res.stdout) {\n const text = new TextDecoder().decode(res.stdout).trim();\n const n = Number(text);\n if (Number.isFinite(n) && n > 0) pgid = Math.trunc(n);\n }\n } catch {}\n\n // Spawn monitor (Node).\n const monitorEnv = { ...process.env, DROID_STATUSLINE_PGID: String(pgid) };\n const monitor = Bun.spawn([\"node\", STATUSLINE_MONITOR, ...argv], {\n stdin: \"ignore\",\n stdout: \"pipe\",\n stderr: \"ignore\",\n env: monitorEnv,\n });\n\n let shouldStop = false;\n const rewriter = new OutputRewriter();\n const cursor = new CursorTracker();\n\n let detectBuf = new Uint8Array(0);\n let detectStr = \"\";\n let cursorVisible = true;\n let scrollRegionDirty = true;\n let lastForceRepaintMs = Date.now();\n let lastPhysicalRows = 0;\n let lastPhysicalCols = 0;\n\n function appendDetect(chunk) {\n const max = 128;\n const merged = new Uint8Array(Math.min(max, detectBuf.length + chunk.length));\n const takePrev = Math.max(0, merged.length - chunk.length);\n if (takePrev > 0) merged.set(detectBuf.slice(Math.max(0, detectBuf.length - takePrev)), 0);\n merged.set(chunk.slice(Math.max(0, chunk.length - (merged.length - takePrev))), takePrev);\n detectBuf = merged;\n try {\n detectStr = Buffer.from(detectBuf).toString(\"latin1\");\n } catch {\n detectStr = \"\";\n }\n }\n\n function includesBytes(needle) {\n return detectStr.includes(needle);\n }\n\n function lastIndexOfBytes(needle) {\n return detectStr.lastIndexOf(needle);\n }\n\n function includesScrollRegionCSI() {\n return /\\\\x1b\\\\[[0-9]*;?[0-9]*r/.test(detectStr);\n }\n\n function updateCursorVisibility() {\n const show = includesBytes(\"\\\\x1b[?25h\");\n const hide = includesBytes(\"\\\\x1b[?25l\");\n if (show || hide) {\n // best-effort: if both present, whichever appears later \"wins\"\n const h = lastIndexOfBytes(\"\\\\x1b[?25h\");\n const l = lastIndexOfBytes(\"\\\\x1b[?25l\");\n cursorVisible = h > l;\n renderer.setCursorVisible(cursorVisible);\n }\n }\n\n function needsScrollRegionReset() {\n return (\n includesBytes(\"\\\\x1b[?1049\") ||\n includesBytes(\"\\\\x1b[?1047\") ||\n includesBytes(\"\\\\x1b[?47\") ||\n includesBytes(\"\\\\x1b[J\") ||\n includesBytes(\"\\\\x1b[0J\") ||\n includesBytes(\"\\\\x1b[1J\") ||\n includesBytes(\"\\\\x1b[2J\") ||\n includesBytes(\"\\\\x1b[3J\") ||\n includesBytes(\"\\\\x1b[r\") ||\n includesScrollRegionCSI()\n );\n }\n\n function onChildData(data) {\n if (shouldStop) return;\n const chunk = data instanceof Uint8Array ? data : new Uint8Array(data);\n appendDetect(chunk);\n if (needsScrollRegionReset()) scrollRegionDirty = true;\n updateCursorVisibility();\n\n renderer.noteChildOutput();\n const rewritten = rewriter.feed(chunk, childRows);\n cursor.feed(rewritten, childRows, childCols);\n writeStdout(Buffer.from(rewritten));\n }\n\n function onChildExit(exitCode, signal) {\n if (shouldStop) return;\n shouldStop = true;\n const code = exitCode ?? (signal != null ? 128 + signal : 0);\n cleanup().finally(() => process.exit(code));\n }\n\n async function readMonitor() {\n if (!monitor.stdout) return;\n const reader = monitor.stdout.getReader();\n let buf = \"\";\n while (!shouldStop) {\n const { value, done } = await reader.read();\n if (done || !value) break;\n buf += new TextDecoder().decode(value);\n while (true) {\n const idx = buf.indexOf(\"\\\\n\");\n if (idx === -1) break;\n const line = buf.slice(0, idx).replace(/\\\\r$/, \"\");\n buf = buf.slice(idx + 1);\n if (!line) continue;\n renderer.setLine(line);\n renderer.forceRepaint(false);\n }\n }\n }\n readMonitor().catch(() => {});\n\n function repaintStatusline(forceUrgent = false) {\n const { row, col } = cursor.position();\n let r = Math.max(1, Math.min(childRows, row));\n let c = Math.max(1, Math.min(childCols, col));\n\n if (scrollRegionDirty) {\n const seq =\n \"\\\\x1b[?2026h\\\\x1b[?25l\\\\x1b[1;\" +\n childRows +\n \"r\\\\x1b[\" +\n r +\n \";\" +\n c +\n \"H\" +\n (cursorVisible ? \"\\\\x1b[?25h\" : \"\\\\x1b[?25l\") +\n \"\\\\x1b[?2026l\";\n writeStdout(seq);\n scrollRegionDirty = false;\n }\n\n renderer.forceRepaint(forceUrgent);\n renderer.render(physicalRows, physicalCols, r, c);\n }\n\n function handleSizeChange(nextRows, nextCols, forceUrgent = false) {\n physicalRows = nextRows;\n physicalCols = nextCols;\n\n const desired = renderer.desiredReservedRows(physicalRows, physicalCols, RESERVED_ROWS);\n const { row, col } = cursor.position();\n if (desired < effectiveReservedRows) {\n renderer.clearReservedArea(physicalRows, physicalCols, effectiveReservedRows, row, col);\n }\n effectiveReservedRows = desired;\n renderer.setActiveReservedRows(effectiveReservedRows);\n\n childRows = Math.max(4, physicalRows - effectiveReservedRows);\n childCols = Math.max(10, physicalCols);\n try {\n terminal.resize(childCols, childRows);\n } catch {}\n try {\n process.kill(-child.pid, \"SIGWINCH\");\n } catch {\n try { process.kill(child.pid, \"SIGWINCH\"); } catch {}\n }\n\n scrollRegionDirty = true;\n renderer.forceRepaint(true);\n repaintStatusline(forceUrgent);\n }\n\n process.on(\"SIGWINCH\", () => {\n const next = termSize();\n handleSizeChange(next.rows, next.cols, true);\n });\n\n // Forward signals to child's process group when possible.\n const forward = (sig) => {\n try {\n process.kill(-pgid, sig);\n } catch {\n try {\n process.kill(child.pid, sig);\n } catch {}\n }\n };\n for (const s of [\"SIGTERM\", \"SIGINT\", \"SIGHUP\"]) {\n try {\n process.on(s, () => forward(s));\n } catch {}\n }\n\n // Raw stdin -> PTY.\n try {\n process.stdin.setRawMode(true);\n } catch {}\n process.stdin.resume();\n process.stdin.on(\"data\", (buf) => {\n try {\n if (typeof buf === \"string\") terminal.write(buf);\n else {\n // Prefer bytes when supported; fall back to UTF-8 decoding.\n try {\n // Bun.Terminal.write may accept Uint8Array in newer versions.\n terminal.write(buf);\n } catch {\n terminal.write(new TextDecoder().decode(buf));\n }\n }\n } catch {}\n });\n\n const tick = setInterval(() => {\n if (shouldStop) return;\n const next = termSize();\n const sizeChanged = next.rows !== lastPhysicalRows || next.cols !== lastPhysicalCols;\n const desired = renderer.desiredReservedRows(next.rows, next.cols, RESERVED_ROWS);\n if (sizeChanged || desired !== effectiveReservedRows) {\n handleSizeChange(next.rows, next.cols, true);\n lastPhysicalRows = next.rows;\n lastPhysicalCols = next.cols;\n lastForceRepaintMs = Date.now();\n return;\n }\n const now = Date.now();\n if (now - lastForceRepaintMs >= FORCE_REPAINT_INTERVAL_MS) {\n repaintStatusline(false);\n lastForceRepaintMs = now;\n } else {\n const { row, col } = cursor.position();\n renderer.render(physicalRows, physicalCols, row, col);\n }\n }, 50);\n\n async function cleanup() {\n clearInterval(tick);\n try {\n process.stdin.setRawMode(false);\n } catch {}\n try {\n const { row, col } = cursor.position();\n renderer.clearReservedArea(physicalRows, physicalCols, effectiveReservedRows, row, col);\n } catch {}\n try {\n writeStdout(\"\\\\x1b[r\\\\x1b[0m\\\\x1b[?25h\");\n } catch {}\n try {\n monitor.kill();\n } catch {}\n try {\n terminal.close();\n } catch {}\n }\n\n // Keep process alive until child exits.\n await child.exited;\n await cleanup();\n}\n\nmain().catch(() => process.exit(1));\n`;\n}\n\nexport async function createStatuslineFiles(\n outputDir: string,\n execTargetPath: string,\n aliasName: string,\n sessionsScriptPath?: string,\n): Promise<{ wrapperScript: string; monitorScript: string }> {\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const monitorScriptPath = join(outputDir, `${aliasName}-statusline.js`);\n const wrapperScriptPath = join(outputDir, aliasName);\n\n await writeFile(monitorScriptPath, generateStatuslineMonitorScript());\n await chmod(monitorScriptPath, 0o755);\n\n const wrapper = generateStatuslineWrapperScript(\n execTargetPath,\n monitorScriptPath,\n sessionsScriptPath,\n );\n\n await writeFile(wrapperScriptPath, wrapper);\n await chmod(wrapperScriptPath, 0o755);\n\n return { wrapperScript: wrapperScriptPath, monitorScript: monitorScriptPath };\n}\n","import { chmod, mkdir, writeFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport { join } from \"node:path\";\n\n/**\n * Generate sessions browser script (Node.js)\n */\nfunction generateSessionsBrowserScript(aliasName: string): string {\n const aliasJson = JSON.stringify(aliasName);\n return `#!/usr/bin/env node\n// Droid Sessions Browser - Interactive selector\n// Auto-generated by droid-patch\n\nconst fs = require('fs');\nconst path = require('path');\nconst readline = require('readline');\nconst { execSync, spawn } = require('child_process');\n\nconst FACTORY_HOME = path.join(require('os').homedir(), '.factory');\nconst SESSIONS_ROOT = path.join(FACTORY_HOME, 'sessions');\nconst ALIAS_NAME = ${aliasJson};\n\n// ANSI\nconst CYAN = '\\\\x1b[36m';\nconst GREEN = '\\\\x1b[32m';\nconst YELLOW = '\\\\x1b[33m';\nconst RED = '\\\\x1b[31m';\nconst DIM = '\\\\x1b[2m';\nconst RESET = '\\\\x1b[0m';\nconst BOLD = '\\\\x1b[1m';\nconst CLEAR = '\\\\x1b[2J\\\\x1b[H';\nconst HIDE_CURSOR = '\\\\x1b[?25l';\nconst SHOW_CURSOR = '\\\\x1b[?25h';\n\nfunction sanitizePath(p) {\n return p.replace(/:/g, '').replace(/[\\\\\\\\/]/g, '-');\n}\n\nfunction parseSessionFile(jsonlPath, settingsPath) {\n const sessionId = path.basename(jsonlPath, '.jsonl');\n const stats = fs.statSync(jsonlPath);\n \n const result = {\n id: sessionId,\n title: '',\n mtime: stats.mtimeMs,\n model: '',\n firstUserMsg: '',\n lastUserMsg: '',\n messageCount: 0,\n lastTimestamp: '',\n };\n\n try {\n const content = fs.readFileSync(jsonlPath, 'utf-8');\n const lines = content.split('\\\\n').filter(l => l.trim());\n const userMessages = [];\n \n for (const line of lines) {\n try {\n const obj = JSON.parse(line);\n if (obj.type === 'session_start') {\n result.title = obj.title || '';\n } else if (obj.type === 'message') {\n result.messageCount++;\n if (obj.timestamp) result.lastTimestamp = obj.timestamp;\n \n const msg = obj.message || {};\n if (msg.role === 'user' && Array.isArray(msg.content)) {\n for (const c of msg.content) {\n if (c && c.type === 'text' && c.text && !c.text.startsWith('<system-reminder>')) {\n userMessages.push(c.text.slice(0, 150).replace(/\\\\n/g, ' ').trim());\n break;\n }\n }\n }\n }\n } catch {}\n }\n \n if (userMessages.length > 0) {\n result.firstUserMsg = userMessages[0];\n result.lastUserMsg = userMessages.length > 1 ? userMessages[userMessages.length - 1] : '';\n }\n } catch {}\n\n if (fs.existsSync(settingsPath)) {\n try {\n const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf-8'));\n result.model = settings.model || '';\n } catch {}\n }\n\n return result;\n}\n\nfunction collectSessions() {\n const cwd = process.cwd();\n const cwdSanitized = sanitizePath(cwd);\n const sessions = [];\n\n if (!fs.existsSync(SESSIONS_ROOT)) return sessions;\n\n for (const wsDir of fs.readdirSync(SESSIONS_ROOT)) {\n if (wsDir !== cwdSanitized) continue;\n \n const wsPath = path.join(SESSIONS_ROOT, wsDir);\n if (!fs.statSync(wsPath).isDirectory()) continue;\n\n for (const file of fs.readdirSync(wsPath)) {\n if (!file.endsWith('.jsonl')) continue;\n \n const sessionId = file.slice(0, -6);\n const jsonlPath = path.join(wsPath, file);\n const settingsPath = path.join(wsPath, sessionId + '.settings.json');\n\n try {\n const session = parseSessionFile(jsonlPath, settingsPath);\n if (session.messageCount === 0 || !session.firstUserMsg) continue;\n sessions.push(session);\n } catch {}\n }\n }\n\n sessions.sort((a, b) => b.mtime - a.mtime);\n return sessions.slice(0, 50);\n}\n\nfunction formatTime(ts) {\n if (!ts) return '';\n try {\n const d = new Date(ts);\n return d.toLocaleString('zh-CN', { month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit' });\n } catch {\n return ts.slice(0, 16);\n }\n}\n\nfunction truncate(s, len) {\n if (!s) return '';\n s = s.replace(/\\\\n/g, ' ');\n return s.length > len ? s.slice(0, len - 3) + '...' : s;\n}\n\nfunction render(sessions, selected, offset, rows) {\n const cwd = process.cwd();\n const pageSize = rows - 6;\n const visible = sessions.slice(offset, offset + pageSize);\n \n let out = CLEAR;\n out += BOLD + 'Sessions: ' + RESET + DIM + cwd + RESET + '\\\\n';\n out += DIM + '[↑/↓] Select [Enter] Resume [q] Quit' + RESET + '\\\\n\\\\n';\n\n for (let i = 0; i < visible.length; i++) {\n const s = visible[i];\n const idx = offset + i;\n const isSelected = idx === selected;\n const prefix = isSelected ? GREEN + '▶ ' + RESET : ' ';\n \n const title = truncate(s.title || '(no title)', 35);\n const time = formatTime(s.lastTimestamp);\n const model = truncate(s.model, 20);\n \n if (isSelected) {\n out += prefix + YELLOW + title + RESET + '\\\\n';\n out += ' ' + DIM + 'ID: ' + RESET + CYAN + s.id + RESET + '\\\\n';\n out += ' ' + DIM + 'Last: ' + time + ' | Model: ' + model + ' | ' + s.messageCount + ' msgs' + RESET + '\\\\n';\n out += ' ' + DIM + 'First input: ' + RESET + truncate(s.firstUserMsg, 60) + '\\\\n';\n if (s.lastUserMsg && s.lastUserMsg !== s.firstUserMsg) {\n out += ' ' + DIM + 'Last input: ' + RESET + truncate(s.lastUserMsg, 60) + '\\\\n';\n }\n } else {\n out += prefix + title + DIM + ' (' + time + ')' + RESET + '\\\\n';\n }\n }\n\n out += '\\\\n' + DIM + 'Page ' + (Math.floor(offset / pageSize) + 1) + '/' + Math.ceil(sessions.length / pageSize) + ' (' + sessions.length + ' sessions)' + RESET;\n \n process.stdout.write(out);\n}\n\nasync function main() {\n const sessions = collectSessions();\n \n if (sessions.length === 0) {\n console.log(RED + 'No sessions with interactions found in current directory' + RESET);\n process.exit(0);\n }\n\n if (!process.stdin.isTTY) {\n for (const s of sessions) {\n console.log(s.id + ' ' + (s.title || '') + ' ' + formatTime(s.lastTimestamp));\n }\n process.exit(0);\n }\n\n const rows = process.stdout.rows || 24;\n const pageSize = rows - 6;\n let selected = 0;\n let offset = 0;\n\n function restoreTerminal() {\n try { process.stdout.write(SHOW_CURSOR); } catch {}\n try { process.stdin.setRawMode(false); } catch {}\n try { process.stdin.pause(); } catch {}\n }\n\n function clearScreen() {\n try { process.stdout.write(CLEAR); } catch {}\n }\n\n process.stdin.setRawMode(true);\n process.stdin.resume();\n process.stdout.write(HIDE_CURSOR);\n \n render(sessions, selected, offset, rows);\n\n const onKey = (key) => {\n const k = key.toString();\n \n if (k === 'q' || k === '\\\\x03') { // q or Ctrl+C\n restoreTerminal();\n clearScreen();\n process.exit(0);\n }\n \n if (k === '\\\\r' || k === '\\\\n') { // Enter\n // Stop reading input / stop reacting to arrow keys before handing off to droid.\n process.stdin.off('data', onKey);\n restoreTerminal();\n clearScreen();\n const session = sessions[selected];\n console.log(GREEN + 'Resuming session: ' + session.id + RESET);\n console.log(DIM + 'Using: ' + ALIAS_NAME + ' --resume ' + session.id + RESET + '\\\\n');\n\n // Avoid the sessions browser reacting to signals while droid is running.\n try { process.removeAllListeners('SIGINT'); } catch {}\n try { process.removeAllListeners('SIGTERM'); } catch {}\n try { process.on('SIGINT', () => {}); } catch {}\n try { process.on('SIGTERM', () => {}); } catch {}\n\n const child = spawn(ALIAS_NAME, ['--resume', session.id], { stdio: 'inherit' });\n child.on('exit', (code) => process.exit(code || 0));\n child.on('error', () => process.exit(1));\n return;\n }\n \n if (k === '\\\\x1b[A' || k === 'k') { // Up\n if (selected > 0) {\n selected--;\n if (selected < offset) offset = Math.max(0, offset - 1);\n }\n } else if (k === '\\\\x1b[B' || k === 'j') { // Down\n if (selected < sessions.length - 1) {\n selected++;\n if (selected >= offset + pageSize) offset++;\n }\n } else if (k === '\\\\x1b[5~') { // Page Up\n selected = Math.max(0, selected - pageSize);\n offset = Math.max(0, offset - pageSize);\n } else if (k === '\\\\x1b[6~') { // Page Down\n selected = Math.min(sessions.length - 1, selected + pageSize);\n offset = Math.min(Math.max(0, sessions.length - pageSize), offset + pageSize);\n }\n \n render(sessions, selected, offset, rows);\n };\n\n process.stdin.on('data', onKey);\n\n process.on('SIGINT', () => {\n restoreTerminal();\n clearScreen();\n process.exit(0);\n });\n}\n\nmain();\n`;\n}\n\n/**\n * Create sessions browser script file\n */\nexport async function createSessionsScript(\n outputDir: string,\n aliasName: string,\n): Promise<{ sessionsScript: string }> {\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const sessionsScriptPath = join(outputDir, `${aliasName}-sessions.js`);\n\n await writeFile(sessionsScriptPath, generateSessionsBrowserScript(aliasName));\n await chmod(sessionsScriptPath, 0o755);\n\n return { sessionsScript: sessionsScriptPath };\n}\n","import bin from \"tiny-bin\";\nimport { styleText } from \"node:util\";\nimport { existsSync, readFileSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport { homedir } from \"node:os\";\nimport { fileURLToPath } from \"node:url\";\nimport { execSync } from \"node:child_process\";\nimport { patchDroid, type Patch } from \"./patcher.ts\";\nimport {\n createAlias,\n removeAlias,\n listAliases,\n createAliasForWrapper,\n clearAllAliases,\n removeAliasesByFilter,\n type FilterFlag,\n} from \"./alias.ts\";\nimport { createWebSearchUnifiedFiles } from \"./websearch-patch.ts\";\nimport { createStatuslineFiles } from \"./statusline-patch.ts\";\nimport { createSessionsScript } from \"./sessions-patch.ts\";\nimport {\n saveAliasMetadata,\n createMetadata,\n loadAliasMetadata,\n listAllMetadata,\n formatPatches,\n} from \"./metadata.ts\";\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nfunction getVersion(): string {\n try {\n const pkgPath = join(__dirname, \"..\", \"package.json\");\n const pkg = JSON.parse(readFileSync(pkgPath, \"utf-8\"));\n return pkg.version || \"0.0.0\";\n } catch {\n return \"0.0.0\";\n }\n}\n\nconst version = getVersion();\n\nfunction getDroidVersion(droidPath: string): string | undefined {\n try {\n const result = execSync(`\"${droidPath}\" --version`, {\n encoding: \"utf-8\",\n stdio: [\"pipe\", \"pipe\", \"pipe\"],\n timeout: 5000,\n }).trim();\n // Parse version from output like \"droid 1.2.3\" or just \"1.2.3\"\n const match = result.match(/(\\d+\\.\\d+\\.\\d+)/);\n return match ? match[1] : result || undefined;\n } catch {\n return undefined;\n }\n}\n\nfunction findDefaultDroidPath(): string {\n const home = homedir();\n\n // Try `which droid` first to find droid in PATH\n try {\n const result = execSync(\"which droid\", {\n encoding: \"utf-8\",\n stdio: [\"pipe\", \"pipe\", \"pipe\"],\n }).trim();\n if (result && existsSync(result)) {\n return result;\n }\n } catch {\n // which command failed, continue with fallback paths\n }\n\n // Common installation paths\n const paths = [\n // Default sh install location\n join(home, \".droid\", \"bin\", \"droid\"),\n // Homebrew on Apple Silicon\n \"/opt/homebrew/bin/droid\",\n // Homebrew on Intel Mac / Linux\n \"/usr/local/bin/droid\",\n // Linux system-wide\n \"/usr/bin/droid\",\n // Current directory\n \"./droid\",\n ];\n\n for (const p of paths) {\n if (existsSync(p)) return p;\n }\n\n // Return default path even if not found (will error later with helpful message)\n return join(home, \".droid\", \"bin\", \"droid\");\n}\n\nbin(\"droid-patch\", \"CLI tool to patch droid binary with various modifications\")\n .package(\"droid-patch\", version)\n .option(\n \"--is-custom\",\n \"Patch isCustom:!0 to isCustom:!1 (enable context compression for custom models)\",\n )\n .option(\n \"--skip-login\",\n \"Inject a fake FACTORY_API_KEY to bypass login requirement (no real key needed)\",\n )\n .option(\n \"--api-base <url>\",\n \"Replace API URL (standalone: binary patch, max 22 chars; with --websearch: proxy forward target, no limit)\",\n )\n .option(\n \"--websearch\",\n \"Enable local WebSearch proxy (each instance runs own proxy, auto-cleanup on exit)\",\n )\n .option(\"--statusline\", \"Enable a Claude-style statusline (terminal UI)\")\n .option(\"--sessions\", \"Enable sessions browser (--sessions flag in alias)\")\n .option(\"--standalone\", \"Standalone mode: mock non-LLM Factory APIs (use with --websearch)\")\n .option(\n \"--reasoning-effort\",\n \"Enable reasoning effort for custom models (set to high, enable UI selector)\",\n )\n .option(\n \"--disable-telemetry\",\n \"Disable telemetry and Sentry error reporting (block data uploads)\",\n )\n .option(\"--dry-run\", \"Verify patches without actually modifying the binary\")\n .option(\"-p, --path <path>\", \"Path to the droid binary\")\n .option(\"-o, --output <dir>\", \"Output directory for patched binary\")\n .option(\"--no-backup\", \"Do not create backup of original binary\")\n .option(\"-v, --verbose\", \"Enable verbose output\")\n .argument(\"[alias]\", \"Alias name for the patched binary\")\n .action(async (options, args) => {\n const alias = args?.[0] as string | undefined;\n const isCustom = options[\"is-custom\"] as boolean;\n const skipLogin = options[\"skip-login\"] as boolean;\n const apiBase = options[\"api-base\"] as string | undefined;\n const websearch = options[\"websearch\"] as boolean;\n const statusline = options[\"statusline\"] as boolean;\n const sessions = options[\"sessions\"] as boolean;\n const standalone = options[\"standalone\"] as boolean;\n // When --websearch is used with --api-base, forward to custom URL\n // Otherwise forward to official Factory API\n const websearchTarget = websearch ? apiBase || \"https://api.factory.ai\" : undefined;\n const reasoningEffort = options[\"reasoning-effort\"] as boolean;\n const noTelemetry = options[\"disable-telemetry\"] as boolean;\n const dryRun = options[\"dry-run\"] as boolean;\n const path = (options.path as string) || findDefaultDroidPath();\n const outputDir = options.output as string | undefined;\n const backup = options.backup !== false;\n const verbose = options.verbose as boolean;\n\n // If -o is specified with alias, output to that directory with alias name\n const outputPath = outputDir && alias ? join(outputDir, alias) : undefined;\n\n const needsBinaryPatch =\n !!isCustom || !!skipLogin || !!reasoningEffort || !!noTelemetry || (!!apiBase && !websearch);\n\n const statuslineEnabled = statusline;\n\n // Wrapper-only mode (no binary patching needed):\n // - --websearch (optional --standalone)\n // - --statusline\n // - both combined (statusline wraps websearch)\n if (!needsBinaryPatch && (websearch || statuslineEnabled)) {\n if (!alias) {\n console.log(styleText(\"red\", \"Error: Alias name required for --websearch/--statusline\"));\n console.log(styleText(\"gray\", \"Usage: npx droid-patch --websearch <alias>\"));\n console.log(styleText(\"gray\", \"Usage: npx droid-patch --statusline <alias>\"));\n process.exit(1);\n }\n\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log(styleText([\"cyan\", \"bold\"], \" Droid Wrapper Setup\"));\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log();\n if (websearch) {\n console.log(styleText(\"white\", `WebSearch: enabled`));\n console.log(styleText(\"white\", `Forward target: ${websearchTarget}`));\n if (standalone) {\n console.log(styleText(\"white\", `Standalone mode: enabled`));\n }\n }\n if (statuslineEnabled) {\n console.log(styleText(\"white\", `Statusline: enabled`));\n }\n console.log();\n\n let execTargetPath = path;\n if (websearch) {\n // Create websearch proxy files (proxy script + wrapper)\n const proxyDir = join(homedir(), \".droid-patch\", \"proxy\");\n const { wrapperScript } = await createWebSearchUnifiedFiles(\n proxyDir,\n execTargetPath,\n alias,\n websearchTarget,\n standalone,\n );\n execTargetPath = wrapperScript;\n }\n\n if (statuslineEnabled) {\n const statuslineDir = join(homedir(), \".droid-patch\", \"statusline\");\n // Create sessions script only if --sessions is enabled\n let sessionsScript: string | undefined;\n if (sessions) {\n const result = await createSessionsScript(statuslineDir, alias);\n sessionsScript = result.sessionsScript;\n }\n const { wrapperScript } = await createStatuslineFiles(\n statuslineDir,\n execTargetPath,\n alias,\n sessionsScript,\n );\n execTargetPath = wrapperScript;\n }\n\n // Create alias pointing to outer wrapper\n const aliasResult = await createAliasForWrapper(execTargetPath, alias, verbose);\n\n // Save metadata for update command\n const droidVersion = getDroidVersion(path);\n const metadata = createMetadata(\n alias,\n path,\n {\n isCustom: false,\n skipLogin: false,\n apiBase: apiBase || null,\n websearch: !!websearch,\n statusline: !!statuslineEnabled,\n sessions: !!sessions,\n reasoningEffort: false,\n noTelemetry: false,\n standalone: standalone,\n },\n {\n droidPatchVersion: version,\n droidVersion,\n aliasPath: aliasResult.aliasPath,\n },\n );\n await saveAliasMetadata(metadata);\n\n console.log();\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log(styleText([\"green\", \"bold\"], \" Wrapper Ready!\"));\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log();\n console.log(\"Run directly:\");\n console.log(styleText(\"yellow\", ` ${alias}`));\n console.log();\n if (websearch) {\n console.log(styleText(\"cyan\", \"Auto-shutdown:\"));\n console.log(\n styleText(\"gray\", \" Proxy auto-shuts down after 5 min idle (no manual cleanup needed)\"),\n );\n console.log(styleText(\"gray\", \" To disable: export DROID_PROXY_IDLE_TIMEOUT=0\"));\n console.log();\n console.log(\"Search providers (in priority order):\");\n console.log(styleText(\"yellow\", \" 1. Smithery Exa (best quality):\"));\n console.log(styleText(\"gray\", \" export SMITHERY_API_KEY=your_api_key\"));\n console.log(styleText(\"gray\", \" export SMITHERY_PROFILE=your_profile\"));\n console.log(styleText(\"gray\", \" 2. Google PSE:\"));\n console.log(styleText(\"gray\", \" export GOOGLE_PSE_API_KEY=your_api_key\"));\n console.log(styleText(\"gray\", \" export GOOGLE_PSE_CX=your_search_engine_id\"));\n console.log(styleText(\"gray\", \" 3-6. Serper, Brave, SearXNG, DuckDuckGo (fallbacks)\"));\n console.log();\n console.log(\"Debug mode:\");\n console.log(styleText(\"gray\", \" export DROID_SEARCH_DEBUG=1\"));\n }\n return;\n }\n\n if (\n !isCustom &&\n !skipLogin &&\n !apiBase &&\n !websearch &&\n !statuslineEnabled &&\n !reasoningEffort &&\n !noTelemetry\n ) {\n console.log(styleText(\"yellow\", \"No patch flags specified. Available patches:\"));\n console.log(styleText(\"gray\", \" --is-custom Patch isCustom for custom models\"));\n console.log(\n styleText(\"gray\", \" --skip-login Bypass login by injecting a fake API key\"),\n );\n console.log(\n styleText(\n \"gray\",\n \" --api-base Replace API URL (standalone: max 22 chars; with --websearch: no limit)\",\n ),\n );\n console.log(styleText(\"gray\", \" --websearch Enable local WebSearch proxy\"));\n console.log(styleText(\"gray\", \" --statusline Enable Claude-style statusline\"));\n console.log(\n styleText(\"gray\", \" --reasoning-effort Set reasoning effort level for custom models\"),\n );\n console.log(\n styleText(\"gray\", \" --disable-telemetry Disable telemetry and Sentry error reporting\"),\n );\n console.log(\n styleText(\"gray\", \" --standalone Standalone mode: mock non-LLM Factory APIs\"),\n );\n console.log();\n console.log(\"Usage examples:\");\n console.log(styleText(\"cyan\", \" npx droid-patch --is-custom droid-custom\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --skip-login droid-nologin\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --is-custom --skip-login droid-patched\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --websearch droid-search\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --websearch --standalone droid-local\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --statusline droid-status\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --websearch --statusline droid-search-ui\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --disable-telemetry droid-private\"));\n console.log(\n styleText(\n \"cyan\",\n \" npx droid-patch --websearch --api-base=http://127.0.0.1:20002 my-droid\",\n ),\n );\n process.exit(1);\n }\n\n if (!alias && !dryRun) {\n console.log(styleText(\"red\", \"Error: alias name is required\"));\n console.log(\n styleText(\n \"gray\",\n \"Usage: droid-patch [--is-custom] [--skip-login] [-o <dir>] <alias-name>\",\n ),\n );\n process.exit(1);\n }\n\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log(styleText([\"cyan\", \"bold\"], \" Droid Binary Patcher\"));\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log();\n\n const patches: Patch[] = [];\n if (isCustom) {\n patches.push({\n name: \"isCustom\",\n description: \"Change isCustom:!0 to isCustom:!1\",\n pattern: Buffer.from(\"isCustom:!0\"),\n replacement: Buffer.from(\"isCustom:!1\"),\n });\n }\n\n // Add skip-login patch: replace process.env.FACTORY_API_KEY with a fixed fake key\n // \"process.env.FACTORY_API_KEY\" is 27 chars, we replace with \"fk-droid-patch-skip-00000\" (25 chars + quotes = 27)\n if (skipLogin) {\n patches.push({\n name: \"skipLogin\",\n description: 'Replace process.env.FACTORY_API_KEY with \"fk-droid-patch-skip-00000\"',\n pattern: Buffer.from(\"process.env.FACTORY_API_KEY\"),\n replacement: Buffer.from('\"fk-droid-patch-skip-00000\"'),\n });\n }\n\n // Add api-base patch: replace the Factory API base URL\n // Original: \"https://api.factory.ai\" (22 chars)\n // We need to pad the replacement URL to be exactly 22 chars\n // Note: When --websearch is used, --api-base sets the forward target instead of binary patching\n if (apiBase && !websearch) {\n const originalUrl = \"https://api.factory.ai\";\n const originalLength = originalUrl.length; // 22 chars\n\n // Validate and normalize the URL\n let normalizedUrl = apiBase.replace(/\\/+$/, \"\"); // Remove trailing slashes\n\n if (normalizedUrl.length > originalLength) {\n console.log(\n styleText(\"red\", `Error: API base URL must be ${originalLength} characters or less`),\n );\n console.log(\n styleText(\"gray\", ` Your URL: \"${normalizedUrl}\" (${normalizedUrl.length} chars)`),\n );\n console.log(styleText(\"gray\", ` Maximum: ${originalLength} characters`));\n console.log();\n console.log(styleText(\"yellow\", \"Tip: Use a shorter URL or set up a local redirect.\"));\n console.log(styleText(\"gray\", \" Examples:\"));\n console.log(styleText(\"gray\", \" http://127.0.0.1:3000 (19 chars)\"));\n console.log(styleText(\"gray\", \" http://localhost:80 (19 chars)\"));\n process.exit(1);\n }\n\n // Pad the URL with spaces at the end to match original length\n // Note: trailing spaces in URL are generally ignored\n const paddedUrl = normalizedUrl.padEnd(originalLength, \" \");\n\n patches.push({\n name: \"apiBase\",\n description: `Replace Factory API URL with \"${normalizedUrl}\"`,\n pattern: Buffer.from(originalUrl),\n replacement: Buffer.from(paddedUrl),\n });\n }\n\n // Add reasoning-effort patch: set custom models to use \"high\" reasoning\n // Also modify UI conditions to show reasoning selector for custom models\n if (reasoningEffort) {\n // [\"none\"] is 8 chars, [\"high\"] is 8 chars - perfect match!\n patches.push({\n name: \"reasoningEffortSupported\",\n description: 'Change supportedReasoningEfforts:[\"none\"] to [\"high\"]',\n pattern: Buffer.from('supportedReasoningEfforts:[\"none\"]'),\n replacement: Buffer.from('supportedReasoningEfforts:[\"high\"]'),\n });\n\n // \"none\" is 4 chars, \"high\" is 4 chars - perfect match!\n patches.push({\n name: \"reasoningEffortDefault\",\n description: 'Change defaultReasoningEffort:\"none\" to \"high\"',\n pattern: Buffer.from('defaultReasoningEffort:\"none\"'),\n replacement: Buffer.from('defaultReasoningEffort:\"high\"'),\n });\n\n // Change UI condition from length>1 to length>0\n // This allows custom models with single reasoning option to show the selector\n patches.push({\n name: \"reasoningEffortUIShow\",\n description: \"Change supportedReasoningEfforts.length>1 to length>0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length>1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length>0\"),\n });\n\n // Change UI condition from length<=1 to length<=0\n // This enables the reasoning setting in /settings menu for custom models\n patches.push({\n name: \"reasoningEffortUIEnable\",\n description: \"Change supportedReasoningEfforts.length<=1 to length<=0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length<=1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length<=0\"),\n });\n\n // Bypass reasoning effort validation to allow settings.json override\n // This allows \"xhigh\" in settings.json to work even though default is \"high\"\n // Original: if(R&&!B.supportedReasoningEfforts.includes(R)) throw error\n // Changed: if(0&&...) - never throws, any value is accepted\n patches.push({\n name: \"reasoningEffortValidationBypass\",\n description: \"Bypass reasoning effort validation (allows xhigh in settings.json)\",\n pattern: Buffer.from(\"if(R&&!B.supportedReasoningEfforts.includes(R))\"),\n replacement: Buffer.from(\"if(0&&!B.supportedReasoningEfforts.includes(R))\"),\n });\n }\n\n // Add no-telemetry patches: disable telemetry uploads and Sentry error reporting\n // Strategy:\n // 1. Break environment variable names so Sentry is never initialized (Q1() returns false)\n // 2. Invert flushToWeb condition so it returns early without making any fetch request\n if (noTelemetry) {\n // Patch 1: Break Sentry environment variable checks\n // Q1() function checks: VITE_VERCEL_ENV, ENABLE_SENTRY, NEXT_PUBLIC_ENABLE_SENTRY, FACTORY_ENABLE_SENTRY\n // By changing first letter to X, the env vars will never match, so Q1() returns false\n // and Sentry is never initialized\n patches.push({\n name: \"noTelemetrySentryEnv1\",\n description: \"Break ENABLE_SENTRY env var check (E->X)\",\n pattern: Buffer.from(\"ENABLE_SENTRY\"),\n replacement: Buffer.from(\"XNABLE_SENTRY\"),\n });\n\n patches.push({\n name: \"noTelemetrySentryEnv2\",\n description: \"Break VITE_VERCEL_ENV env var check (V->X)\",\n pattern: Buffer.from(\"VITE_VERCEL_ENV\"),\n replacement: Buffer.from(\"XITE_VERCEL_ENV\"),\n });\n\n // Patch 2: Make flushToWeb always return early to prevent ANY fetch request\n // Original: if(this.webEvents.length===0)return; // returns only when empty\n // Changed: if(!0||this.webEvents.length)return; // !0=true, ALWAYS returns\n // Result: Function always exits immediately, no telemetry is ever sent\n patches.push({\n name: \"noTelemetryFlushBlock\",\n description: \"Make flushToWeb always return (!0|| = always true)\",\n pattern: Buffer.from(\"this.webEvents.length===0\"),\n replacement: Buffer.from(\"!0||this.webEvents.length\"),\n });\n }\n\n try {\n const result = await patchDroid({\n inputPath: path,\n outputPath: outputPath,\n patches,\n dryRun,\n backup,\n verbose,\n });\n\n if (dryRun) {\n console.log();\n console.log(styleText(\"blue\", \"═\".repeat(60)));\n console.log(styleText([\"blue\", \"bold\"], \" DRY RUN COMPLETE\"));\n console.log(styleText(\"blue\", \"═\".repeat(60)));\n console.log();\n console.log(styleText(\"gray\", \"To apply the patches, run without --dry-run:\"));\n console.log(styleText(\"cyan\", ` npx droid-patch --is-custom ${alias || \"<alias-name>\"}`));\n process.exit(0);\n }\n\n // If -o is specified, just output the file without creating alias\n if (outputDir && result.success && result.outputPath) {\n console.log();\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log(styleText([\"green\", \"bold\"], \" PATCH SUCCESSFUL\"));\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log();\n console.log(styleText(\"white\", `Patched binary saved to: ${result.outputPath}`));\n process.exit(0);\n }\n\n if (result.success && result.outputPath && alias) {\n console.log();\n\n let execTargetPath = result.outputPath;\n\n if (websearch) {\n const proxyDir = join(homedir(), \".droid-patch\", \"proxy\");\n const { wrapperScript } = await createWebSearchUnifiedFiles(\n proxyDir,\n execTargetPath,\n alias,\n websearchTarget,\n standalone,\n );\n execTargetPath = wrapperScript;\n\n console.log();\n console.log(styleText(\"cyan\", \"WebSearch enabled\"));\n console.log(styleText(\"white\", ` Forward target: ${websearchTarget}`));\n if (standalone) {\n console.log(styleText(\"white\", ` Standalone mode: enabled`));\n }\n }\n\n if (statuslineEnabled) {\n const statuslineDir = join(homedir(), \".droid-patch\", \"statusline\");\n let sessionsScript: string | undefined;\n if (sessions) {\n const result = await createSessionsScript(statuslineDir, alias);\n sessionsScript = result.sessionsScript;\n }\n const { wrapperScript } = await createStatuslineFiles(\n statuslineDir,\n execTargetPath,\n alias,\n sessionsScript,\n );\n execTargetPath = wrapperScript;\n console.log();\n console.log(styleText(\"cyan\", \"Statusline enabled\"));\n }\n\n let aliasResult;\n if (websearch || statuslineEnabled) {\n aliasResult = await createAliasForWrapper(execTargetPath, alias, verbose);\n } else {\n aliasResult = await createAlias(result.outputPath, alias, verbose);\n }\n\n // Save metadata for update command\n const droidVersion = getDroidVersion(path);\n const metadata = createMetadata(\n alias,\n path,\n {\n isCustom: !!isCustom,\n skipLogin: !!skipLogin,\n apiBase: apiBase || null,\n websearch: !!websearch,\n statusline: !!statuslineEnabled,\n sessions: !!sessions,\n reasoningEffort: !!reasoningEffort,\n noTelemetry: !!noTelemetry,\n standalone: !!standalone,\n },\n {\n droidPatchVersion: version,\n droidVersion,\n aliasPath: aliasResult.aliasPath,\n },\n );\n await saveAliasMetadata(metadata);\n }\n\n if (result.success) {\n console.log();\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log(styleText([\"green\", \"bold\"], \" PATCH SUCCESSFUL\"));\n console.log(styleText(\"green\", \"═\".repeat(60)));\n }\n\n process.exit(result.success ? 0 : 1);\n } catch (error) {\n console.error(styleText(\"red\", `Error: ${(error as Error).message}`));\n if (verbose) console.error((error as Error).stack);\n process.exit(1);\n }\n })\n .command(\"list\", \"List all droid-patch aliases\")\n .action(async () => {\n await listAliases();\n })\n .command(\"remove\", \"Remove alias(es) by name or filter\")\n .argument(\"[alias-or-path]\", \"Alias name or file path to remove\")\n .option(\"--patch-version <version>\", \"Remove aliases created by this droid-patch version\")\n .option(\"--droid-version <version>\", \"Remove aliases for this droid version\")\n .option(\n \"--flag <flag>\",\n \"Remove aliases with this flag (is-custom, skip-login, websearch, statusline, api-base, reasoning-effort, disable-telemetry, standalone)\",\n )\n .action(async (options, args) => {\n const target = args?.[0] as string | undefined;\n const patchVersion = options[\"patch-version\"] as string | undefined;\n const droidVersion = options[\"droid-version\"] as string | undefined;\n const flag = options.flag as FilterFlag | undefined;\n\n // If filter options are provided, use filter mode\n if (patchVersion || droidVersion || flag) {\n await removeAliasesByFilter({\n patchVersion,\n droidVersion,\n flags: flag ? [flag] : undefined,\n });\n return;\n }\n\n // If no target and no filter, show error\n if (!target) {\n console.error(\n styleText(\n \"red\",\n \"Error: Provide an alias name or use filter options (--patch-version, --droid-version, --flag)\",\n ),\n );\n process.exit(1);\n }\n\n // Check if it's a file path (contains / or .)\n if (target.includes(\"/\") || existsSync(target)) {\n // It's a file path, delete directly\n const { unlink } = await import(\"node:fs/promises\");\n try {\n await unlink(target);\n console.log(styleText(\"green\", `[*] Removed: ${target}`));\n } catch (error) {\n console.error(styleText(\"red\", `Error: ${(error as Error).message}`));\n process.exit(1);\n }\n } else {\n // It's an alias name\n await removeAlias(target);\n }\n })\n .command(\"version\", \"Print droid-patch version\")\n .action(() => {\n console.log(`droid-patch v${version}`);\n })\n .command(\"clear\", \"Remove all droid-patch aliases and related files\")\n .action(async () => {\n await clearAllAliases();\n })\n .command(\"update\", \"Update aliases with latest droid binary\")\n .argument(\"[alias]\", \"Specific alias to update (optional, updates all if not specified)\")\n .option(\"--dry-run\", \"Preview without making changes\")\n .option(\"-p, --path <path>\", \"Path to new droid binary\")\n .option(\"-v, --verbose\", \"Enable verbose output\")\n .action(async (options, args) => {\n const aliasName = args?.[0] as string | undefined;\n const dryRun = options[\"dry-run\"] as boolean;\n const newBinaryPath = (options.path as string) || findDefaultDroidPath();\n const verbose = options.verbose as boolean;\n\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log(styleText([\"cyan\", \"bold\"], \" Droid-Patch Update\"));\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log();\n\n // Verify the new binary exists\n if (!existsSync(newBinaryPath)) {\n console.log(styleText(\"red\", `Error: Droid binary not found at ${newBinaryPath}`));\n console.log(styleText(\"gray\", \"Use -p to specify a different path\"));\n process.exit(1);\n }\n\n // Get aliases to update\n let metaList: Awaited<ReturnType<typeof loadAliasMetadata>>[];\n if (aliasName) {\n const meta = await loadAliasMetadata(aliasName);\n if (!meta) {\n console.log(styleText(\"red\", `Error: No metadata found for alias \"${aliasName}\"`));\n console.log(\n styleText(\"gray\", \"This alias may have been created before update tracking was added.\"),\n );\n console.log(styleText(\"gray\", \"Remove and recreate the alias to enable update support.\"));\n process.exit(1);\n }\n metaList = [meta];\n } else {\n metaList = await listAllMetadata();\n if (metaList.length === 0) {\n console.log(styleText(\"yellow\", \"No aliases with metadata found.\"));\n console.log(styleText(\"gray\", \"Create aliases with droid-patch to enable update support.\"));\n process.exit(0);\n }\n }\n\n console.log(styleText(\"white\", `Using droid binary: ${newBinaryPath}`));\n console.log(styleText(\"white\", `Found ${metaList.length} alias(es) to update`));\n if (dryRun) {\n console.log(styleText(\"blue\", \"(DRY RUN - no changes will be made)\"));\n }\n console.log();\n\n let successCount = 0;\n let failCount = 0;\n\n for (const meta of metaList) {\n if (!meta) continue;\n\n console.log(styleText(\"cyan\", `─`.repeat(40)));\n console.log(styleText(\"white\", `Updating: ${styleText([\"cyan\", \"bold\"], meta.name)}`));\n console.log(styleText(\"gray\", ` Patches: ${formatPatches(meta.patches)}`));\n\n if (dryRun) {\n console.log(styleText(\"blue\", ` [DRY RUN] Would re-apply patches`));\n successCount++;\n continue;\n }\n\n try {\n // Build patch list based on metadata\n const patches: Patch[] = [];\n\n if (meta.patches.isCustom) {\n patches.push({\n name: \"isCustom\",\n description: \"Change isCustom:!0 to isCustom:!1\",\n pattern: Buffer.from(\"isCustom:!0\"),\n replacement: Buffer.from(\"isCustom:!1\"),\n });\n }\n\n if (meta.patches.skipLogin) {\n patches.push({\n name: \"skipLogin\",\n description: \"Replace process.env.FACTORY_API_KEY with fake key\",\n pattern: Buffer.from(\"process.env.FACTORY_API_KEY\"),\n replacement: Buffer.from('\"fk-droid-patch-skip-00000\"'),\n });\n }\n\n // Only apply apiBase binary patch when NOT using websearch\n // When websearch is enabled, apiBase is used as forward target, not binary patch\n if (meta.patches.apiBase && !meta.patches.websearch) {\n const originalUrl = \"https://api.factory.ai\";\n const paddedUrl = meta.patches.apiBase.padEnd(originalUrl.length, \" \");\n patches.push({\n name: \"apiBase\",\n description: `Replace Factory API URL with \"${meta.patches.apiBase}\"`,\n pattern: Buffer.from(originalUrl),\n replacement: Buffer.from(paddedUrl),\n });\n }\n\n if (meta.patches.reasoningEffort) {\n patches.push({\n name: \"reasoningEffortSupported\",\n description: 'Change supportedReasoningEfforts:[\"none\"] to [\"high\"]',\n pattern: Buffer.from('supportedReasoningEfforts:[\"none\"]'),\n replacement: Buffer.from('supportedReasoningEfforts:[\"high\"]'),\n });\n patches.push({\n name: \"reasoningEffortDefault\",\n description: 'Change defaultReasoningEffort:\"none\" to \"high\"',\n pattern: Buffer.from('defaultReasoningEffort:\"none\"'),\n replacement: Buffer.from('defaultReasoningEffort:\"high\"'),\n });\n patches.push({\n name: \"reasoningEffortUIShow\",\n description: \"Change supportedReasoningEfforts.length>1 to length>0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length>1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length>0\"),\n });\n patches.push({\n name: \"reasoningEffortUIEnable\",\n description: \"Change supportedReasoningEfforts.length<=1 to length<=0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length<=1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length<=0\"),\n });\n patches.push({\n name: \"reasoningEffortValidationBypass\",\n description: \"Bypass reasoning effort validation (allows xhigh in settings.json)\",\n pattern: Buffer.from(\"if(R&&!B.supportedReasoningEfforts.includes(R))\"),\n replacement: Buffer.from(\"if(0&&!B.supportedReasoningEfforts.includes(R))\"),\n });\n }\n\n if (meta.patches.noTelemetry) {\n patches.push({\n name: \"noTelemetrySentryEnv1\",\n description: \"Break ENABLE_SENTRY env var check (E->X)\",\n pattern: Buffer.from(\"ENABLE_SENTRY\"),\n replacement: Buffer.from(\"XNABLE_SENTRY\"),\n });\n patches.push({\n name: \"noTelemetrySentryEnv2\",\n description: \"Break VITE_VERCEL_ENV env var check (V->X)\",\n pattern: Buffer.from(\"VITE_VERCEL_ENV\"),\n replacement: Buffer.from(\"XITE_VERCEL_ENV\"),\n });\n patches.push({\n name: \"noTelemetryFlushBlock\",\n description: \"Make flushToWeb always return (!0|| = always true)\",\n pattern: Buffer.from(\"this.webEvents.length===0\"),\n replacement: Buffer.from(\"!0||this.webEvents.length\"),\n });\n }\n\n // Determine output path based on whether this is a websearch alias\n const binsDir = join(homedir(), \".droid-patch\", \"bins\");\n const outputPath = join(binsDir, `${meta.name}-patched`);\n\n // Apply patches (only if there are binary patches to apply)\n if (patches.length > 0) {\n const result = await patchDroid({\n inputPath: newBinaryPath,\n outputPath,\n patches,\n dryRun: false,\n backup: false,\n verbose,\n });\n\n if (!result.success) {\n console.log(styleText(\"red\", ` ✗ Failed to apply patches`));\n failCount++;\n continue;\n }\n\n // Re-sign on macOS\n if (process.platform === \"darwin\") {\n try {\n const { execSync } = await import(\"node:child_process\");\n execSync(`codesign --force --deep --sign - \"${outputPath}\"`, {\n stdio: \"pipe\",\n });\n if (verbose) {\n console.log(styleText(\"gray\", ` Re-signed binary`));\n }\n } catch {\n console.log(styleText(\"yellow\", ` [!] Could not re-sign binary`));\n }\n }\n }\n\n let execTargetPath = patches.length > 0 ? outputPath : newBinaryPath;\n\n // If websearch is enabled, regenerate wrapper files\n // Support both new 'websearch' field and old 'proxy' field for backward compatibility\n const hasWebsearch = meta.patches.websearch || !!meta.patches.proxy;\n if (hasWebsearch) {\n // Determine forward target: apiBase > proxy (legacy) > default\n const forwardTarget =\n meta.patches.apiBase || meta.patches.proxy || \"https://api.factory.ai\";\n const proxyDir = join(homedir(), \".droid-patch\", \"proxy\");\n const { wrapperScript } = await createWebSearchUnifiedFiles(\n proxyDir,\n execTargetPath,\n meta.name,\n forwardTarget,\n meta.patches.standalone || false,\n );\n execTargetPath = wrapperScript;\n if (verbose) {\n console.log(styleText(\"gray\", ` Regenerated websearch wrapper`));\n if (meta.patches.standalone) {\n console.log(styleText(\"gray\", ` Standalone mode: enabled`));\n }\n }\n // Migrate old proxy field to new websearch field\n if (meta.patches.proxy && !meta.patches.websearch) {\n meta.patches.websearch = true;\n meta.patches.apiBase = meta.patches.proxy;\n delete meta.patches.proxy;\n }\n }\n\n if (meta.patches.statusline) {\n const statuslineDir = join(homedir(), \".droid-patch\", \"statusline\");\n let sessionsScript: string | undefined;\n if (meta.patches.sessions) {\n const result = await createSessionsScript(statuslineDir, meta.name);\n sessionsScript = result.sessionsScript;\n }\n const { wrapperScript } = await createStatuslineFiles(\n statuslineDir,\n execTargetPath,\n meta.name,\n sessionsScript,\n );\n execTargetPath = wrapperScript;\n if (verbose) {\n console.log(styleText(\"gray\", ` Regenerated statusline wrapper`));\n }\n }\n\n // Update symlink - find existing or use stored aliasPath\n const { symlink, unlink, readlink, lstat } = await import(\"node:fs/promises\");\n let aliasPath = meta.aliasPath;\n\n // If aliasPath not stored (old version), try to find existing symlink\n if (!aliasPath) {\n const commonPathDirs = [\n join(homedir(), \".local/bin\"),\n join(homedir(), \"bin\"),\n join(homedir(), \".bin\"),\n \"/opt/homebrew/bin\",\n \"/usr/local/bin\",\n join(homedir(), \".droid-patch\", \"aliases\"),\n ];\n\n for (const dir of commonPathDirs) {\n const possiblePath = join(dir, meta.name);\n if (existsSync(possiblePath)) {\n try {\n const stats = await lstat(possiblePath);\n if (stats.isSymbolicLink()) {\n const target = await readlink(possiblePath);\n if (\n target.includes(\".droid-patch/bins\") ||\n target.includes(\".droid-patch/proxy\") ||\n target.includes(\".droid-patch/statusline\")\n ) {\n aliasPath = possiblePath;\n if (verbose) {\n console.log(styleText(\"gray\", ` Found existing symlink: ${aliasPath}`));\n }\n break;\n }\n }\n } catch {\n // Ignore errors, continue searching\n }\n }\n }\n }\n\n // Update symlink if we have a path\n if (aliasPath) {\n try {\n if (existsSync(aliasPath)) {\n const currentTarget = await readlink(aliasPath);\n if (currentTarget !== execTargetPath) {\n await unlink(aliasPath);\n await symlink(execTargetPath, aliasPath);\n if (verbose) {\n console.log(styleText(\"gray\", ` Updated symlink: ${aliasPath}`));\n }\n }\n } else {\n // Symlink doesn't exist, recreate it\n await symlink(execTargetPath, aliasPath);\n if (verbose) {\n console.log(styleText(\"gray\", ` Recreated symlink: ${aliasPath}`));\n }\n }\n // Store aliasPath in metadata for future updates\n meta.aliasPath = aliasPath;\n } catch (symlinkError) {\n console.log(\n styleText(\n \"yellow\",\n ` [!] Could not update symlink: ${(symlinkError as Error).message}`,\n ),\n );\n }\n }\n\n // Update metadata\n meta.updatedAt = new Date().toISOString();\n meta.originalBinaryPath = newBinaryPath;\n meta.droidVersion = getDroidVersion(newBinaryPath);\n meta.droidPatchVersion = version;\n await saveAliasMetadata(meta);\n\n console.log(styleText(\"green\", ` ✓ Updated successfully`));\n successCount++;\n } catch (error) {\n console.log(styleText(\"red\", ` ✗ Error: ${(error as Error).message}`));\n if (verbose) {\n console.error((error as Error).stack);\n }\n failCount++;\n }\n }\n\n console.log();\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n if (dryRun) {\n console.log(styleText([\"blue\", \"bold\"], \" DRY RUN COMPLETE\"));\n console.log(styleText(\"gray\", ` Would update ${successCount} alias(es)`));\n } else if (failCount === 0) {\n console.log(styleText([\"green\", \"bold\"], \" UPDATE COMPLETE\"));\n console.log(styleText(\"gray\", ` Updated ${successCount} alias(es)`));\n } else {\n console.log(styleText([\"yellow\", \"bold\"], \" UPDATE FINISHED WITH ERRORS\"));\n console.log(styleText(\"gray\", ` Success: ${successCount}, Failed: ${failCount}`));\n }\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n })\n .run()\n .catch((err: Error) => {\n console.error(err);\n process.exit(1);\n });\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAm9BA,SAAS,0BAA0B,gBAAwB,0BAAkC;AAC3F,QAAO;;;;;;;;;;;;uBAYc,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwerC,SAAS,uBACP,WACA,iBACA,aAAsB,OACd;CACR,MAAM,gBAAgB,aAAa,uBAAuB;AAC1D,QAAO;;;;;gBAKO,gBAAgB;aACnB,UAAU;;;cAGT,aAAa,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IA4DjC,cAAc;;IAEd,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyDlB,eAAsB,4BACpB,WACA,WACA,WACA,SACA,aAAsB,OACqC;AAC3D,KAAI,CAAC,WAAW,UAAU,CACxB,OAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;CAG7C,MAAM,kBAAkB,KAAK,WAAW,GAAG,UAAU,WAAW;CAChE,MAAM,oBAAoB,KAAK,WAAW,UAAU;AAIpD,OAAM,UAAU,iBAAiB,0BADX,WAAW,yBACwC,CAAC;AAC1E,SAAQ,IAAI,6BAA6B,kBAAkB;AAG3D,OAAM,UACJ,mBACA,uBAAuB,WAAW,iBAAiB,WAAW,CAC/D;AACD,OAAM,MAAM,mBAAmB,IAAM;AACrC,SAAQ,IAAI,wBAAwB,oBAAoB;AAExD,KAAI,WACF,SAAQ,IAAI,8BAA8B;AAG5C,QAAO;EACL,eAAe;EACf,eAAe;EAChB;;;;;AC5mDH,SAAS,kCAA0C;AAEjD,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwiCT,SAAS,gCACP,gBACA,mBACA,oBACQ;AACR,QAAO,mCAAmC,gBAAgB,mBAAmB,mBAAmB;;AAGlG,SAAS,mCACP,gBACA,mBACA,oBACQ;AAQR,QAAO;;;;sBAPgB,KAAK,UAAU,eAAe,CAWlB;6BAVT,KAAK,UAAU,kBAAkB,CAWd;0BAVlB,qBAAqB,KAAK,UAAU,mBAAmB,GAAG,OAW1C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAo6B7C,eAAsB,sBACpB,WACA,gBACA,WACA,oBAC2D;AAC3D,KAAI,CAAC,WAAW,UAAU,CACxB,OAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;CAG7C,MAAM,oBAAoB,KAAK,WAAW,GAAG,UAAU,gBAAgB;CACvE,MAAM,oBAAoB,KAAK,WAAW,UAAU;AAEpD,OAAM,UAAU,mBAAmB,iCAAiC,CAAC;AACrE,OAAM,MAAM,mBAAmB,IAAM;AAQrC,OAAM,UAAU,mBANA,gCACd,gBACA,mBACA,mBACD,CAE0C;AAC3C,OAAM,MAAM,mBAAmB,IAAM;AAErC,QAAO;EAAE,eAAe;EAAmB,eAAe;EAAmB;;;;;;;;AC9/D/E,SAAS,8BAA8B,WAA2B;AAEhE,QAAO;;;;;;;;;;;qBADW,KAAK,UAAU,UAAU,CAYd;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwQ/B,eAAsB,qBACpB,WACA,WACqC;AACrC,KAAI,CAAC,WAAW,UAAU,CACxB,OAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;CAG7C,MAAM,qBAAqB,KAAK,WAAW,GAAG,UAAU,cAAc;AAEtE,OAAM,UAAU,oBAAoB,8BAA8B,UAAU,CAAC;AAC7E,OAAM,MAAM,oBAAoB,IAAM;AAEtC,QAAO,EAAE,gBAAgB,oBAAoB;;;;;AC7Q/C,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAEzD,SAAS,aAAqB;AAC5B,KAAI;EACF,MAAM,UAAU,KAAK,WAAW,MAAM,eAAe;AAErD,SADY,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC,CAC3C,WAAW;SAChB;AACN,SAAO;;;AAIX,MAAM,UAAU,YAAY;AAE5B,SAAS,gBAAgB,WAAuC;AAC9D,KAAI;EACF,MAAM,SAAS,SAAS,IAAI,UAAU,cAAc;GAClD,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAC/B,SAAS;GACV,CAAC,CAAC,MAAM;EAET,MAAM,QAAQ,OAAO,MAAM,kBAAkB;AAC7C,SAAO,QAAQ,MAAM,KAAK,UAAU;SAC9B;AACN;;;AAIJ,SAAS,uBAA+B;CACtC,MAAM,OAAO,SAAS;AAGtB,KAAI;EACF,MAAM,SAAS,SAAS,eAAe;GACrC,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAChC,CAAC,CAAC,MAAM;AACT,MAAI,UAAU,WAAW,OAAO,CAC9B,QAAO;SAEH;CAKR,MAAM,QAAQ;EAEZ,KAAK,MAAM,UAAU,OAAO,QAAQ;EAEpC;EAEA;EAEA;EAEA;EACD;AAED,MAAK,MAAM,KAAK,MACd,KAAI,WAAW,EAAE,CAAE,QAAO;AAI5B,QAAO,KAAK,MAAM,UAAU,OAAO,QAAQ;;AAG7C,IAAI,eAAe,4DAA4D,CAC5E,QAAQ,eAAe,QAAQ,CAC/B,OACC,eACA,kFACD,CACA,OACC,gBACA,iFACD,CACA,OACC,oBACA,6GACD,CACA,OACC,eACA,oFACD,CACA,OAAO,gBAAgB,iDAAiD,CACxE,OAAO,cAAc,qDAAqD,CAC1E,OAAO,gBAAgB,oEAAoE,CAC3F,OACC,sBACA,8EACD,CACA,OACC,uBACA,oEACD,CACA,OAAO,aAAa,uDAAuD,CAC3E,OAAO,qBAAqB,2BAA2B,CACvD,OAAO,sBAAsB,sCAAsC,CACnE,OAAO,eAAe,0CAA0C,CAChE,OAAO,iBAAiB,wBAAwB,CAChD,SAAS,WAAW,oCAAoC,CACxD,OAAO,OAAO,SAAS,SAAS;CAC/B,MAAM,QAAQ,OAAO;CACrB,MAAM,WAAW,QAAQ;CACzB,MAAM,YAAY,QAAQ;CAC1B,MAAM,UAAU,QAAQ;CACxB,MAAM,YAAY,QAAQ;CAC1B,MAAM,aAAa,QAAQ;CAC3B,MAAM,WAAW,QAAQ;CACzB,MAAM,aAAa,QAAQ;CAG3B,MAAM,kBAAkB,YAAY,WAAW,2BAA2B;CAC1E,MAAM,kBAAkB,QAAQ;CAChC,MAAM,cAAc,QAAQ;CAC5B,MAAM,SAAS,QAAQ;CACvB,MAAM,OAAQ,QAAQ,QAAmB,sBAAsB;CAC/D,MAAM,YAAY,QAAQ;CAC1B,MAAM,SAAS,QAAQ,WAAW;CAClC,MAAM,UAAU,QAAQ;CAGxB,MAAM,aAAa,aAAa,QAAQ,KAAK,WAAW,MAAM,GAAG;CAEjE,MAAM,mBACJ,CAAC,CAAC,YAAY,CAAC,CAAC,aAAa,CAAC,CAAC,mBAAmB,CAAC,CAAC,eAAgB,CAAC,CAAC,WAAW,CAAC;CAEpF,MAAM,oBAAoB;AAM1B,KAAI,CAAC,qBAAqB,aAAa,oBAAoB;AACzD,MAAI,CAAC,OAAO;AACV,WAAQ,IAAI,UAAU,OAAO,0DAA0D,CAAC;AACxF,WAAQ,IAAI,UAAU,QAAQ,6CAA6C,CAAC;AAC5E,WAAQ,IAAI,UAAU,QAAQ,8CAA8C,CAAC;AAC7E,WAAQ,KAAK,EAAE;;AAGjB,UAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,UAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,wBAAwB,CAAC;AACjE,UAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,UAAQ,KAAK;AACb,MAAI,WAAW;AACb,WAAQ,IAAI,UAAU,SAAS,qBAAqB,CAAC;AACrD,WAAQ,IAAI,UAAU,SAAS,mBAAmB,kBAAkB,CAAC;AACrE,OAAI,WACF,SAAQ,IAAI,UAAU,SAAS,2BAA2B,CAAC;;AAG/D,MAAI,kBACF,SAAQ,IAAI,UAAU,SAAS,sBAAsB,CAAC;AAExD,UAAQ,KAAK;EAEb,IAAI,iBAAiB;AACrB,MAAI,WAAW;GAGb,MAAM,EAAE,kBAAkB,MAAM,4BADf,KAAK,SAAS,EAAE,gBAAgB,QAAQ,EAGvD,gBACA,OACA,iBACA,WACD;AACD,oBAAiB;;AAGnB,MAAI,mBAAmB;GACrB,MAAM,gBAAgB,KAAK,SAAS,EAAE,gBAAgB,aAAa;GAEnE,IAAIA;AACJ,OAAI,SAEF,mBADe,MAAM,qBAAqB,eAAe,MAAM,EACvC;GAE1B,MAAM,EAAE,kBAAkB,MAAM,sBAC9B,eACA,gBACA,OACA,eACD;AACD,oBAAiB;;EAInB,MAAM,cAAc,MAAM,sBAAsB,gBAAgB,OAAO,QAAQ;EAG/E,MAAM,eAAe,gBAAgB,KAAK;AAqB1C,QAAM,kBApBW,eACf,OACA,MACA;GACE,UAAU;GACV,WAAW;GACX,SAAS,WAAW;GACpB,WAAW,CAAC,CAAC;GACb,YAAY,CAAC,CAAC;GACd,UAAU,CAAC,CAAC;GACZ,iBAAiB;GACjB,aAAa;GACD;GACb,EACD;GACE,mBAAmB;GACnB;GACA,WAAW,YAAY;GACxB,CACF,CACgC;AAEjC,UAAQ,KAAK;AACb,UAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,UAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,mBAAmB,CAAC;AAC7D,UAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,UAAQ,KAAK;AACb,UAAQ,IAAI,gBAAgB;AAC5B,UAAQ,IAAI,UAAU,UAAU,KAAK,QAAQ,CAAC;AAC9C,UAAQ,KAAK;AACb,MAAI,WAAW;AACb,WAAQ,IAAI,UAAU,QAAQ,iBAAiB,CAAC;AAChD,WAAQ,IACN,UAAU,QAAQ,sEAAsE,CACzF;AACD,WAAQ,IAAI,UAAU,QAAQ,kDAAkD,CAAC;AACjF,WAAQ,KAAK;AACb,WAAQ,IAAI,wCAAwC;AACpD,WAAQ,IAAI,UAAU,UAAU,oCAAoC,CAAC;AACrE,WAAQ,IAAI,UAAU,QAAQ,4CAA4C,CAAC;AAC3E,WAAQ,IAAI,UAAU,QAAQ,4CAA4C,CAAC;AAC3E,WAAQ,IAAI,UAAU,QAAQ,mBAAmB,CAAC;AAClD,WAAQ,IAAI,UAAU,QAAQ,8CAA8C,CAAC;AAC7E,WAAQ,IAAI,UAAU,QAAQ,kDAAkD,CAAC;AACjF,WAAQ,IAAI,UAAU,QAAQ,wDAAwD,CAAC;AACvF,WAAQ,KAAK;AACb,WAAQ,IAAI,cAAc;AAC1B,WAAQ,IAAI,UAAU,QAAQ,gCAAgC,CAAC;;AAEjE;;AAGF,KACE,CAAC,YACD,CAAC,aACD,CAAC,WACD,CAAC,aACD,CAAC,qBACD,CAAC,mBACD,CAAC,aACD;AACA,UAAQ,IAAI,UAAU,UAAU,+CAA+C,CAAC;AAChF,UAAQ,IAAI,UAAU,QAAQ,yDAAyD,CAAC;AACxF,UAAQ,IACN,UAAU,QAAQ,iEAAiE,CACpF;AACD,UAAQ,IACN,UACE,QACA,+FACD,CACF;AACD,UAAQ,IAAI,UAAU,QAAQ,qDAAqD,CAAC;AACpF,UAAQ,IAAI,UAAU,QAAQ,uDAAuD,CAAC;AACtF,UAAQ,IACN,UAAU,QAAQ,qEAAqE,CACxF;AACD,UAAQ,IACN,UAAU,QAAQ,qEAAqE,CACxF;AACD,UAAQ,IACN,UAAU,QAAQ,mEAAmE,CACtF;AACD,UAAQ,KAAK;AACb,UAAQ,IAAI,kBAAkB;AAC9B,UAAQ,IAAI,UAAU,QAAQ,6CAA6C,CAAC;AAC5E,UAAQ,IAAI,UAAU,QAAQ,+CAA+C,CAAC;AAC9E,UAAQ,IAAI,UAAU,QAAQ,2DAA2D,CAAC;AAC1F,UAAQ,IAAI,UAAU,QAAQ,6CAA6C,CAAC;AAC5E,UAAQ,IAAI,UAAU,QAAQ,yDAAyD,CAAC;AACxF,UAAQ,IAAI,UAAU,QAAQ,8CAA8C,CAAC;AAC7E,UAAQ,IAAI,UAAU,QAAQ,6DAA6D,CAAC;AAC5F,UAAQ,IAAI,UAAU,QAAQ,sDAAsD,CAAC;AACrF,UAAQ,IACN,UACE,QACA,2EACD,CACF;AACD,UAAQ,KAAK,EAAE;;AAGjB,KAAI,CAAC,SAAS,CAAC,QAAQ;AACrB,UAAQ,IAAI,UAAU,OAAO,gCAAgC,CAAC;AAC9D,UAAQ,IACN,UACE,QACA,0EACD,CACF;AACD,UAAQ,KAAK,EAAE;;AAGjB,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,yBAAyB,CAAC;AAClE,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,KAAK;CAEb,MAAMC,UAAmB,EAAE;AAC3B,KAAI,SACF,SAAQ,KAAK;EACX,MAAM;EACN,aAAa;EACb,SAAS,OAAO,KAAK,cAAc;EACnC,aAAa,OAAO,KAAK,cAAc;EACxC,CAAC;AAKJ,KAAI,UACF,SAAQ,KAAK;EACX,MAAM;EACN,aAAa;EACb,SAAS,OAAO,KAAK,8BAA8B;EACnD,aAAa,OAAO,KAAK,gCAA8B;EACxD,CAAC;AAOJ,KAAI,WAAW,CAAC,WAAW;EACzB,MAAM,cAAc;EACpB,MAAM,iBAAiB;EAGvB,IAAI,gBAAgB,QAAQ,QAAQ,QAAQ,GAAG;AAE/C,MAAI,cAAc,SAAS,gBAAgB;AACzC,WAAQ,IACN,UAAU,OAAO,+BAA+B,eAAe,qBAAqB,CACrF;AACD,WAAQ,IACN,UAAU,QAAQ,gBAAgB,cAAc,KAAK,cAAc,OAAO,SAAS,CACpF;AACD,WAAQ,IAAI,UAAU,QAAQ,eAAe,eAAe,aAAa,CAAC;AAC1E,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,UAAU,qDAAqD,CAAC;AACtF,WAAQ,IAAI,UAAU,QAAQ,cAAc,CAAC;AAC7C,WAAQ,IAAI,UAAU,QAAQ,uCAAuC,CAAC;AACtE,WAAQ,IAAI,UAAU,QAAQ,sCAAsC,CAAC;AACrE,WAAQ,KAAK,EAAE;;EAKjB,MAAM,YAAY,cAAc,OAAO,gBAAgB,IAAI;AAE3D,UAAQ,KAAK;GACX,MAAM;GACN,aAAa,iCAAiC,cAAc;GAC5D,SAAS,OAAO,KAAK,YAAY;GACjC,aAAa,OAAO,KAAK,UAAU;GACpC,CAAC;;AAKJ,KAAI,iBAAiB;AAEnB,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,uCAAqC;GAC1D,aAAa,OAAO,KAAK,uCAAqC;GAC/D,CAAC;AAGF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,kCAAgC;GACrD,aAAa,OAAO,KAAK,kCAAgC;GAC1D,CAAC;AAIF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,qCAAqC;GAC1D,aAAa,OAAO,KAAK,qCAAqC;GAC/D,CAAC;AAIF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,sCAAsC;GAC3D,aAAa,OAAO,KAAK,sCAAsC;GAChE,CAAC;AAMF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,kDAAkD;GACvE,aAAa,OAAO,KAAK,kDAAkD;GAC5E,CAAC;;AAOJ,KAAI,aAAa;AAKf,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,gBAAgB;GACrC,aAAa,OAAO,KAAK,gBAAgB;GAC1C,CAAC;AAEF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,kBAAkB;GACvC,aAAa,OAAO,KAAK,kBAAkB;GAC5C,CAAC;AAMF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,4BAA4B;GACjD,aAAa,OAAO,KAAK,4BAA4B;GACtD,CAAC;;AAGJ,KAAI;EACF,MAAM,SAAS,MAAM,WAAW;GAC9B,WAAW;GACC;GACZ;GACA;GACA;GACA;GACD,CAAC;AAEF,MAAI,QAAQ;AACV,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,WAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,qBAAqB,CAAC;AAC9D,WAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,QAAQ,+CAA+C,CAAC;AAC9E,WAAQ,IAAI,UAAU,QAAQ,iCAAiC,SAAS,iBAAiB,CAAC;AAC1F,WAAQ,KAAK,EAAE;;AAIjB,MAAI,aAAa,OAAO,WAAW,OAAO,YAAY;AACpD,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,WAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,qBAAqB,CAAC;AAC/D,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,SAAS,4BAA4B,OAAO,aAAa,CAAC;AAChF,WAAQ,KAAK,EAAE;;AAGjB,MAAI,OAAO,WAAW,OAAO,cAAc,OAAO;AAChD,WAAQ,KAAK;GAEb,IAAI,iBAAiB,OAAO;AAE5B,OAAI,WAAW;IAEb,MAAM,EAAE,kBAAkB,MAAM,4BADf,KAAK,SAAS,EAAE,gBAAgB,QAAQ,EAGvD,gBACA,OACA,iBACA,WACD;AACD,qBAAiB;AAEjB,YAAQ,KAAK;AACb,YAAQ,IAAI,UAAU,QAAQ,oBAAoB,CAAC;AACnD,YAAQ,IAAI,UAAU,SAAS,qBAAqB,kBAAkB,CAAC;AACvE,QAAI,WACF,SAAQ,IAAI,UAAU,SAAS,6BAA6B,CAAC;;AAIjE,OAAI,mBAAmB;IACrB,MAAM,gBAAgB,KAAK,SAAS,EAAE,gBAAgB,aAAa;IACnE,IAAID;AACJ,QAAI,SAEF,mBADe,MAAM,qBAAqB,eAAe,MAAM,EACvC;IAE1B,MAAM,EAAE,kBAAkB,MAAM,sBAC9B,eACA,gBACA,OACA,eACD;AACD,qBAAiB;AACjB,YAAQ,KAAK;AACb,YAAQ,IAAI,UAAU,QAAQ,qBAAqB,CAAC;;GAGtD,IAAI;AACJ,OAAI,aAAa,kBACf,eAAc,MAAM,sBAAsB,gBAAgB,OAAO,QAAQ;OAEzE,eAAc,MAAM,YAAY,OAAO,YAAY,OAAO,QAAQ;GAIpE,MAAM,eAAe,gBAAgB,KAAK;AAqB1C,SAAM,kBApBW,eACf,OACA,MACA;IACE,UAAU,CAAC,CAAC;IACZ,WAAW,CAAC,CAAC;IACb,SAAS,WAAW;IACpB,WAAW,CAAC,CAAC;IACb,YAAY,CAAC,CAAC;IACd,UAAU,CAAC,CAAC;IACZ,iBAAiB,CAAC,CAAC;IACnB,aAAa,CAAC,CAAC;IACf,YAAY,CAAC,CAAC;IACf,EACD;IACE,mBAAmB;IACnB;IACA,WAAW,YAAY;IACxB,CACF,CACgC;;AAGnC,MAAI,OAAO,SAAS;AAClB,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,WAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,qBAAqB,CAAC;AAC/D,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;;AAGjD,UAAQ,KAAK,OAAO,UAAU,IAAI,EAAE;UAC7B,OAAO;AACd,UAAQ,MAAM,UAAU,OAAO,UAAW,MAAgB,UAAU,CAAC;AACrE,MAAI,QAAS,SAAQ,MAAO,MAAgB,MAAM;AAClD,UAAQ,KAAK,EAAE;;EAEjB,CACD,QAAQ,QAAQ,+BAA+B,CAC/C,OAAO,YAAY;AAClB,OAAM,aAAa;EACnB,CACD,QAAQ,UAAU,qCAAqC,CACvD,SAAS,mBAAmB,oCAAoC,CAChE,OAAO,6BAA6B,qDAAqD,CACzF,OAAO,6BAA6B,wCAAwC,CAC5E,OACC,iBACA,0IACD,CACA,OAAO,OAAO,SAAS,SAAS;CAC/B,MAAM,SAAS,OAAO;CACtB,MAAM,eAAe,QAAQ;CAC7B,MAAM,eAAe,QAAQ;CAC7B,MAAM,OAAO,QAAQ;AAGrB,KAAI,gBAAgB,gBAAgB,MAAM;AACxC,QAAM,sBAAsB;GAC1B;GACA;GACA,OAAO,OAAO,CAAC,KAAK,GAAG;GACxB,CAAC;AACF;;AAIF,KAAI,CAAC,QAAQ;AACX,UAAQ,MACN,UACE,OACA,gGACD,CACF;AACD,UAAQ,KAAK,EAAE;;AAIjB,KAAI,OAAO,SAAS,IAAI,IAAI,WAAW,OAAO,EAAE;EAE9C,MAAM,EAAE,qBAAW,MAAM,OAAO;AAChC,MAAI;AACF,SAAME,SAAO,OAAO;AACpB,WAAQ,IAAI,UAAU,SAAS,gBAAgB,SAAS,CAAC;WAClD,OAAO;AACd,WAAQ,MAAM,UAAU,OAAO,UAAW,MAAgB,UAAU,CAAC;AACrE,WAAQ,KAAK,EAAE;;OAIjB,OAAM,YAAY,OAAO;EAE3B,CACD,QAAQ,WAAW,4BAA4B,CAC/C,aAAa;AACZ,SAAQ,IAAI,gBAAgB,UAAU;EACtC,CACD,QAAQ,SAAS,mDAAmD,CACpE,OAAO,YAAY;AAClB,OAAM,iBAAiB;EACvB,CACD,QAAQ,UAAU,0CAA0C,CAC5D,SAAS,WAAW,oEAAoE,CACxF,OAAO,aAAa,iCAAiC,CACrD,OAAO,qBAAqB,2BAA2B,CACvD,OAAO,iBAAiB,wBAAwB,CAChD,OAAO,OAAO,SAAS,SAAS;CAC/B,MAAM,YAAY,OAAO;CACzB,MAAM,SAAS,QAAQ;CACvB,MAAM,gBAAiB,QAAQ,QAAmB,sBAAsB;CACxE,MAAM,UAAU,QAAQ;AAExB,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,uBAAuB,CAAC;AAChE,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,KAAK;AAGb,KAAI,CAAC,WAAW,cAAc,EAAE;AAC9B,UAAQ,IAAI,UAAU,OAAO,oCAAoC,gBAAgB,CAAC;AAClF,UAAQ,IAAI,UAAU,QAAQ,qCAAqC,CAAC;AACpE,UAAQ,KAAK,EAAE;;CAIjB,IAAIC;AACJ,KAAI,WAAW;EACb,MAAM,OAAO,MAAM,kBAAkB,UAAU;AAC/C,MAAI,CAAC,MAAM;AACT,WAAQ,IAAI,UAAU,OAAO,uCAAuC,UAAU,GAAG,CAAC;AAClF,WAAQ,IACN,UAAU,QAAQ,qEAAqE,CACxF;AACD,WAAQ,IAAI,UAAU,QAAQ,0DAA0D,CAAC;AACzF,WAAQ,KAAK,EAAE;;AAEjB,aAAW,CAAC,KAAK;QACZ;AACL,aAAW,MAAM,iBAAiB;AAClC,MAAI,SAAS,WAAW,GAAG;AACzB,WAAQ,IAAI,UAAU,UAAU,kCAAkC,CAAC;AACnE,WAAQ,IAAI,UAAU,QAAQ,4DAA4D,CAAC;AAC3F,WAAQ,KAAK,EAAE;;;AAInB,SAAQ,IAAI,UAAU,SAAS,uBAAuB,gBAAgB,CAAC;AACvE,SAAQ,IAAI,UAAU,SAAS,SAAS,SAAS,OAAO,sBAAsB,CAAC;AAC/E,KAAI,OACF,SAAQ,IAAI,UAAU,QAAQ,sCAAsC,CAAC;AAEvE,SAAQ,KAAK;CAEb,IAAI,eAAe;CACnB,IAAI,YAAY;AAEhB,MAAK,MAAM,QAAQ,UAAU;AAC3B,MAAI,CAAC,KAAM;AAEX,UAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,UAAQ,IAAI,UAAU,SAAS,aAAa,UAAU,CAAC,QAAQ,OAAO,EAAE,KAAK,KAAK,GAAG,CAAC;AACtF,UAAQ,IAAI,UAAU,QAAQ,cAAc,cAAc,KAAK,QAAQ,GAAG,CAAC;AAE3E,MAAI,QAAQ;AACV,WAAQ,IAAI,UAAU,QAAQ,qCAAqC,CAAC;AACpE;AACA;;AAGF,MAAI;GAEF,MAAMF,UAAmB,EAAE;AAE3B,OAAI,KAAK,QAAQ,SACf,SAAQ,KAAK;IACX,MAAM;IACN,aAAa;IACb,SAAS,OAAO,KAAK,cAAc;IACnC,aAAa,OAAO,KAAK,cAAc;IACxC,CAAC;AAGJ,OAAI,KAAK,QAAQ,UACf,SAAQ,KAAK;IACX,MAAM;IACN,aAAa;IACb,SAAS,OAAO,KAAK,8BAA8B;IACnD,aAAa,OAAO,KAAK,gCAA8B;IACxD,CAAC;AAKJ,OAAI,KAAK,QAAQ,WAAW,CAAC,KAAK,QAAQ,WAAW;IACnD,MAAM,cAAc;IACpB,MAAM,YAAY,KAAK,QAAQ,QAAQ,OAAO,IAAoB,IAAI;AACtE,YAAQ,KAAK;KACX,MAAM;KACN,aAAa,iCAAiC,KAAK,QAAQ,QAAQ;KACnE,SAAS,OAAO,KAAK,YAAY;KACjC,aAAa,OAAO,KAAK,UAAU;KACpC,CAAC;;AAGJ,OAAI,KAAK,QAAQ,iBAAiB;AAChC,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,uCAAqC;KAC1D,aAAa,OAAO,KAAK,uCAAqC;KAC/D,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,kCAAgC;KACrD,aAAa,OAAO,KAAK,kCAAgC;KAC1D,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,qCAAqC;KAC1D,aAAa,OAAO,KAAK,qCAAqC;KAC/D,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,sCAAsC;KAC3D,aAAa,OAAO,KAAK,sCAAsC;KAChE,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,kDAAkD;KACvE,aAAa,OAAO,KAAK,kDAAkD;KAC5E,CAAC;;AAGJ,OAAI,KAAK,QAAQ,aAAa;AAC5B,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,gBAAgB;KACrC,aAAa,OAAO,KAAK,gBAAgB;KAC1C,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,kBAAkB;KACvC,aAAa,OAAO,KAAK,kBAAkB;KAC5C,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,4BAA4B;KACjD,aAAa,OAAO,KAAK,4BAA4B;KACtD,CAAC;;GAKJ,MAAM,aAAa,KADH,KAAK,SAAS,EAAE,gBAAgB,OAAO,EACtB,GAAG,KAAK,KAAK,UAAU;AAGxD,OAAI,QAAQ,SAAS,GAAG;AAUtB,QAAI,EATW,MAAM,WAAW;KAC9B,WAAW;KACX;KACA;KACA,QAAQ;KACR,QAAQ;KACR;KACD,CAAC,EAEU,SAAS;AACnB,aAAQ,IAAI,UAAU,OAAO,8BAA8B,CAAC;AAC5D;AACA;;AAIF,QAAI,QAAQ,aAAa,SACvB,KAAI;KACF,MAAM,EAAE,yBAAa,MAAM,OAAO;AAClC,gBAAS,qCAAqC,WAAW,IAAI,EAC3D,OAAO,QACR,CAAC;AACF,SAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,qBAAqB,CAAC;YAEhD;AACN,aAAQ,IAAI,UAAU,UAAU,iCAAiC,CAAC;;;GAKxE,IAAI,iBAAiB,QAAQ,SAAS,IAAI,aAAa;AAKvD,OADqB,KAAK,QAAQ,aAAa,CAAC,CAAC,KAAK,QAAQ,OAC5C;IAEhB,MAAM,gBACJ,KAAK,QAAQ,WAAW,KAAK,QAAQ,SAAS;IAEhD,MAAM,EAAE,kBAAkB,MAAM,4BADf,KAAK,SAAS,EAAE,gBAAgB,QAAQ,EAGvD,gBACA,KAAK,MACL,eACA,KAAK,QAAQ,cAAc,MAC5B;AACD,qBAAiB;AACjB,QAAI,SAAS;AACX,aAAQ,IAAI,UAAU,QAAQ,kCAAkC,CAAC;AACjE,SAAI,KAAK,QAAQ,WACf,SAAQ,IAAI,UAAU,QAAQ,6BAA6B,CAAC;;AAIhE,QAAI,KAAK,QAAQ,SAAS,CAAC,KAAK,QAAQ,WAAW;AACjD,UAAK,QAAQ,YAAY;AACzB,UAAK,QAAQ,UAAU,KAAK,QAAQ;AACpC,YAAO,KAAK,QAAQ;;;AAIxB,OAAI,KAAK,QAAQ,YAAY;IAC3B,MAAM,gBAAgB,KAAK,SAAS,EAAE,gBAAgB,aAAa;IACnE,IAAID;AACJ,QAAI,KAAK,QAAQ,SAEf,mBADe,MAAM,qBAAqB,eAAe,KAAK,KAAK,EAC3C;IAE1B,MAAM,EAAE,kBAAkB,MAAM,sBAC9B,eACA,gBACA,KAAK,MACL,eACD;AACD,qBAAiB;AACjB,QAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,mCAAmC,CAAC;;GAKtE,MAAM,EAAE,oBAAS,kBAAQ,sBAAU,UAAU,MAAM,OAAO;GAC1D,IAAI,YAAY,KAAK;AAGrB,OAAI,CAAC,WAAW;IACd,MAAM,iBAAiB;KACrB,KAAK,SAAS,EAAE,aAAa;KAC7B,KAAK,SAAS,EAAE,MAAM;KACtB,KAAK,SAAS,EAAE,OAAO;KACvB;KACA;KACA,KAAK,SAAS,EAAE,gBAAgB,UAAU;KAC3C;AAED,SAAK,MAAM,OAAO,gBAAgB;KAChC,MAAM,eAAe,KAAK,KAAK,KAAK,KAAK;AACzC,SAAI,WAAW,aAAa,CAC1B,KAAI;AAEF,WADc,MAAM,MAAM,aAAa,EAC7B,gBAAgB,EAAE;OAC1B,MAAM,SAAS,MAAMI,WAAS,aAAa;AAC3C,WACE,OAAO,SAAS,oBAAoB,IACpC,OAAO,SAAS,qBAAqB,IACrC,OAAO,SAAS,0BAA0B,EAC1C;AACA,oBAAY;AACZ,YAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,6BAA6B,YAAY,CAAC;AAE1E;;;aAGE;;;AAQd,OAAI,UACF,KAAI;AACF,QAAI,WAAW,UAAU,EAEvB;SADsB,MAAMA,WAAS,UAAU,KACzB,gBAAgB;AACpC,YAAMF,SAAO,UAAU;AACvB,YAAMG,UAAQ,gBAAgB,UAAU;AACxC,UAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,sBAAsB,YAAY,CAAC;;WAGhE;AAEL,WAAMA,UAAQ,gBAAgB,UAAU;AACxC,SAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,wBAAwB,YAAY,CAAC;;AAIvE,SAAK,YAAY;YACV,cAAc;AACrB,YAAQ,IACN,UACE,UACA,mCAAoC,aAAuB,UAC5D,CACF;;AAKL,QAAK,6BAAY,IAAI,MAAM,EAAC,aAAa;AACzC,QAAK,qBAAqB;AAC1B,QAAK,eAAe,gBAAgB,cAAc;AAClD,QAAK,oBAAoB;AACzB,SAAM,kBAAkB,KAAK;AAE7B,WAAQ,IAAI,UAAU,SAAS,2BAA2B,CAAC;AAC3D;WACO,OAAO;AACd,WAAQ,IAAI,UAAU,OAAO,cAAe,MAAgB,UAAU,CAAC;AACvE,OAAI,QACF,SAAQ,MAAO,MAAgB,MAAM;AAEvC;;;AAIJ,SAAQ,KAAK;AACb,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,KAAI,QAAQ;AACV,UAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,qBAAqB,CAAC;AAC9D,UAAQ,IAAI,UAAU,QAAQ,kBAAkB,aAAa,YAAY,CAAC;YACjE,cAAc,GAAG;AAC1B,UAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,oBAAoB,CAAC;AAC9D,UAAQ,IAAI,UAAU,QAAQ,aAAa,aAAa,YAAY,CAAC;QAChE;AACL,UAAQ,IAAI,UAAU,CAAC,UAAU,OAAO,EAAE,gCAAgC,CAAC;AAC3E,UAAQ,IAAI,UAAU,QAAQ,cAAc,aAAa,YAAY,YAAY,CAAC;;AAEpF,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;EAC9C,CACD,KAAK,CACL,OAAO,QAAe;AACrB,SAAQ,MAAM,IAAI;AAClB,SAAQ,KAAK,EAAE;EACf"}
|
|
1
|
+
{"version":3,"file":"cli.mjs","names":["sessionsScript: string | undefined","patches: Patch[]","unlink","metaList: Awaited<ReturnType<typeof loadAliasMetadata>>[]","readlink","symlink"],"sources":["../src/websearch-patch.ts","../src/statusline-patch.ts","../src/sessions-patch.ts","../src/cli.ts"],"sourcesContent":["import type { Patch } from \"./patcher.ts\";\nimport { writeFile, chmod, mkdir } from \"node:fs/promises\";\nimport { join } from \"node:path\";\nimport { existsSync } from \"node:fs\";\n\n/**\n * WebSearch Patch Generator\n *\n * Since injecting code directly into binary is complex (requires exact byte length matching),\n * we use a more practical approach:\n *\n * 1. --websearch option will:\n * a) Generate a standalone search proxy server script\n * b) Modify droid's API URL to point to local proxy (using --api-base)\n * c) Create a wrapper script to start both proxy and droid\n *\n * Environment variables:\n * - GOOGLE_PSE_API_KEY: Google Programmable Search Engine API Key\n * - GOOGLE_PSE_CX: Google Custom Search Engine ID\n * - If not set, will fallback to DuckDuckGo\n */\n\n/**\n * Generate search proxy server code\n */\nfunction generateSearchProxyServerCode(): string {\n return `#!/usr/bin/env node\n/**\n * Droid WebSearch Proxy Server\n * Auto-generated by droid-patch --websearch\n * \n * Supports:\n * - Google PSE (requires GOOGLE_PSE_API_KEY and GOOGLE_PSE_CX)\n * - DuckDuckGo (free fallback)\n */\n\nconst http = require('http');\nconst https = require('https');\n\nconst FACTORY_API = 'https://api.factory.ai';\n\n// Auto-find available port\nfunction findAvailablePort(startPort = 23119) {\n return new Promise((resolve, reject) => {\n const net = require('net');\n const server = net.createServer();\n \n server.listen(startPort, '127.0.0.1', () => {\n const port = server.address().port;\n server.close(() => resolve(port));\n });\n \n server.on('error', (err) => {\n if (err.code === 'EADDRINUSE') {\n // Port is in use, try next one\n resolve(findAvailablePort(startPort + 1));\n } else {\n reject(err);\n }\n });\n });\n}\n\nlet PORT = process.env.SEARCH_PROXY_PORT || 23119;\n\n// === Search Implementation ===\n\nasync function searchGooglePSE(query, numResults, apiKey, cx) {\n // Use curl command\n const { execSync } = require('child_process');\n \n const url = 'https://www.googleapis.com/customsearch/v1?key=' + apiKey + '&cx=' + cx + '&q=' + encodeURIComponent(query) + '&num=' + Math.min(numResults, 10);\n const curlCmd = \\`curl -s \"\\${url}\"\\`;\n \n try {\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n \n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n snippet: item.snippet,\n publishedDate: null,\n author: null,\n score: null\n }));\n } catch (e) {\n throw new Error('Google PSE error: ' + e.message);\n }\n}\n\nasync function searchDuckDuckGo(query, numResults) {\n // Use curl command, because Node.js fetch may have issues in some environments\n const { execSync } = require('child_process');\n\n // Method 1: Try using DuckDuckGo HTML lite version (via curl)\n try {\n const curlCmd = \\`curl -s -X POST \"https://lite.duckduckgo.com/lite/\" -H \"Content-Type: application/x-www-form-urlencoded\" -H \"User-Agent: Mozilla/5.0\" -d \"q=\\${encodeURIComponent(query)}\"\\`;\n const html = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n\n if (html && html.length > 1000) {\n const results = parseDDGLiteHTML(html, numResults);\n if (results.length > 0) {\n console.error('[search] DDG lite returned ' + results.length + ' results');\n return results;\n }\n }\n } catch (e) {\n console.error('[search] DDG lite (curl) failed:', e.message);\n }\n\n // Method 2: Fallback to Instant Answer API (via curl)\n try {\n const apiUrl = 'https://api.duckduckgo.com/?q=' + encodeURIComponent(query) + '&format=json&no_html=1&skip_disambig=1';\n const curlCmd = \\`curl -s \"\\${apiUrl}\" -H \"User-Agent: Mozilla/5.0\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n \n const results = [];\n\n if (data.Abstract && data.AbstractURL) {\n results.push({\n title: data.Heading || query,\n url: data.AbstractURL,\n snippet: data.Abstract,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n for (const topic of (data.RelatedTopics || [])) {\n if (results.length >= numResults) break;\n if (topic.Text && topic.FirstURL) {\n results.push({\n title: topic.Text.substring(0, 100),\n url: topic.FirstURL,\n snippet: topic.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n if (topic.Topics) {\n for (const st of topic.Topics) {\n if (results.length >= numResults) break;\n if (st.Text && st.FirstURL) {\n results.push({\n title: st.Text.substring(0, 100),\n url: st.FirstURL,\n snippet: st.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n }\n }\n }\n\n if (results.length > 0) {\n console.error('[search] DDG API returned ' + results.length + ' results');\n return results;\n }\n } catch (e) {\n console.error('[search] DDG API (curl) failed:', e.message);\n }\n\n return [];\n}\n\n// Parse DuckDuckGo Lite HTML\nfunction parseDDGLiteHTML(html, maxResults) {\n const results = [];\n\n // Match result links - DuckDuckGo Lite format\n // <a rel=\"nofollow\" href=\"URL\">TITLE</a>\n const linkRegex = /<a[^>]+rel=\"nofollow\"[^>]+href=\"([^\"]+)\"[^>]*>([^<]+)<\\\\/a>/gi;\n const snippetRegex = /<td[^>]*class=\"result-snippet\"[^>]*>([^<]*)<\\\\/td>/gi;\n\n const links = [];\n let match;\n\n // Extract all links\n while ((match = linkRegex.exec(html)) !== null && links.length < maxResults) {\n let url = match[1];\n // Skip DuckDuckGo internal links\n if (url.includes('duckduckgo.com') && !url.includes('uddg=')) continue;\n // Decode redirect URL\n if (url.includes('uddg=')) {\n const uddgMatch = url.match(/uddg=([^&]+)/);\n if (uddgMatch) url = decodeURIComponent(uddgMatch[1]);\n }\n links.push({\n url: url,\n title: decodeHTMLEntities(match[2].trim())\n });\n }\n\n // Extract snippets\n const snippets = [];\n while ((match = snippetRegex.exec(html)) !== null && snippets.length < maxResults) {\n snippets.push(decodeHTMLEntities(match[1].trim()));\n }\n\n // Combine results\n for (let i = 0; i < links.length && results.length < maxResults; i++) {\n results.push({\n title: links[i].title,\n url: links[i].url,\n snippet: snippets[i] || '',\n publishedDate: null,\n author: null,\n score: null\n });\n }\n \n return results;\n}\n\nfunction decodeHTMLEntities(str) {\n return str\n .replace(/&/g, '&')\n .replace(/</g, '<')\n .replace(/>/g, '>')\n .replace(/"/g, '\"')\n .replace(/'/g, \"'\")\n .replace(/ /g, ' ');\n}\n\nasync function search(query, numResults = 10) {\n const googleApiKey = process.env.GOOGLE_PSE_API_KEY;\n const googleCx = process.env.GOOGLE_PSE_CX;\n\n // Try Google PSE first\n if (googleApiKey && googleCx) {\n try {\n console.error('[search] Trying Google PSE...');\n const results = await searchGooglePSE(query, numResults, googleApiKey, googleCx);\n if (results.length > 0) {\n console.error('[search] Google PSE returned ' + results.length + ' results');\n return { results, source: 'google-pse' };\n }\n } catch (e) {\n console.error('[search] Google PSE failed:', e.message);\n }\n }\n\n // Fallback to DuckDuckGo\n try {\n console.error('[search] Using DuckDuckGo...');\n const results = await searchDuckDuckGo(query, numResults);\n console.error('[search] DuckDuckGo returned ' + results.length + ' results');\n return { results, source: 'duckduckgo' };\n } catch (e) {\n console.error('[search] DuckDuckGo failed:', e.message);\n }\n\n return { results: [], source: 'none' };\n}\n\n// === HTTP Server ===\n\nconst server = http.createServer(async (req, res) => {\n const url = new URL(req.url, 'http://' + req.headers.host);\n\n // Health check\n if (url.pathname === '/health') {\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ \n status: 'ok',\n google: !!(process.env.GOOGLE_PSE_API_KEY && process.env.GOOGLE_PSE_CX),\n duckduckgo: true\n }));\n return;\n }\n\n // Search endpoint\n if (url.pathname === '/api/tools/exa/search' && req.method === 'POST') {\n let body = '';\n req.on('data', chunk => body += chunk);\n req.on('end', async () => {\n try {\n const { query, numResults } = JSON.parse(body);\n console.error('[search] Query: \"' + query + '\"');\n \n const { results, source } = await search(query, numResults || 10);\n console.error('[search] ' + results.length + ' results from ' + source);\n \n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ results }));\n } catch (e) {\n console.error('[search] Error:', e);\n res.writeHead(500, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: String(e), results: [] }));\n }\n });\n return;\n }\n\n // Proxy other requests to Factory API\n console.error('[proxy] ' + req.method + ' ' + url.pathname);\n \n const proxyUrl = new URL(FACTORY_API + url.pathname + url.search);\n \n const proxyReq = https.request(proxyUrl, {\n method: req.method,\n headers: { ...req.headers, host: proxyUrl.host }\n }, proxyRes => {\n res.writeHead(proxyRes.statusCode, proxyRes.headers);\n proxyRes.pipe(res);\n });\n\n proxyReq.on('error', e => {\n console.error('[proxy] Error:', e.message);\n res.writeHead(502, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Proxy failed' }));\n });\n\n if (req.method !== 'GET' && req.method !== 'HEAD') {\n req.pipe(proxyReq);\n } else {\n proxyReq.end();\n }\n});\n\n// Start server (async, auto-find available port)\n(async () => {\n const fs = require('fs');\n const path = require('path');\n\n // If port not specified, auto-find available port\n if (!process.env.SEARCH_PROXY_PORT) {\n PORT = await findAvailablePort(23119);\n }\n\n server.listen(PORT, '127.0.0.1', () => {\n const hasGoogle = process.env.GOOGLE_PSE_API_KEY && process.env.GOOGLE_PSE_CX;\n\n // Write port number to temp file for wrapper script to read\n const portFile = process.env.SEARCH_PROXY_PORT_FILE || path.join(require('os').tmpdir(), 'droid-search-proxy-' + process.pid + '.port');\n fs.writeFileSync(portFile, PORT.toString());\n\n // Output port number to stdout (for parent process to capture)\n console.log('PORT=' + PORT);\n \n console.error('');\n console.error('╔═══════════════════════════════════════════════════════════════╗');\n console.error('║ Droid WebSearch Proxy ║');\n console.error('╠═══════════════════════════════════════════════════════════════╣');\n console.error('║ 🔍 Google PSE: ' + (hasGoogle ? 'Configured ✓' : 'Not set (set GOOGLE_PSE_API_KEY & CX)').padEnd(45) + '║');\n console.error('║ 🦆 DuckDuckGo: Always available ║');\n console.error('║ 🚀 Server: http://127.0.0.1:' + PORT + ' ║'.slice(0, 65) + '║');\n console.error('╚═══════════════════════════════════════════════════════════════╝');\n console.error('');\n });\n})();\n\n// Handle graceful shutdown\nprocess.on('SIGTERM', () => server.close());\nprocess.on('SIGINT', () => server.close());\n`;\n}\n\n/**\n * Generate wrapper script, auto-start proxy and droid\n */\nfunction generateWrapperScript(droidPath: string, proxyScriptPath: string): string {\n return `#!/bin/bash\n# Droid with WebSearch Proxy\n# Auto-generated by droid-patch --websearch\n\nPROXY_SCRIPT=\"${proxyScriptPath}\"\nDROID_BIN=\"${droidPath}\"\nPORT_FILE=\"/tmp/droid-search-proxy-$$.port\"\n\n# Start proxy and get dynamic port\nstart_proxy() {\n # Start proxy, capture output to get port\n SEARCH_PROXY_PORT_FILE=\"$PORT_FILE\" node \"$PROXY_SCRIPT\" &\n PROXY_PID=$!\n\n # Wait for proxy to start and get port\n for i in {1..20}; do\n if [ -f \"$PORT_FILE\" ]; then\n PORT=$(cat \"$PORT_FILE\")\n if curl -s \"http://127.0.0.1:$PORT/health\" > /dev/null 2>&1; then\n echo \"[websearch] Proxy started on port $PORT\"\n return 0\n fi\n fi\n sleep 0.2\n done\n\n echo \"[websearch] Failed to start proxy\"\n kill $PROXY_PID 2>/dev/null\n return 1\n}\n\n# Cleanup function\ncleanup() {\n [ -n \"$PROXY_PID\" ] && kill $PROXY_PID 2>/dev/null\n [ -f \"$PORT_FILE\" ] && rm -f \"$PORT_FILE\"\n}\ntrap cleanup EXIT\n\n# Start proxy\nif ! start_proxy; then\n exit 1\nfi\n\n# Run droid\nexport FACTORY_API_BASE_URL_OVERRIDE=\"http://127.0.0.1:$PORT\"\nexec \"$DROID_BIN\" \"$@\"\n`;\n}\n\n/**\n * Generate WebSearch Patch\n *\n * Since injecting code directly into binary is complex, we use the following strategy:\n * 1. Create proxy server script\n * 2. Modify API URL to point to local\n * 3. Return a combined patch\n */\nexport function generateWebSearchPatch(): Patch | null {\n // Return a URL replacement patch\n // Use local proxy port 23119 (idle port)\n const originalUrl = \"https://api.factory.ai\";\n const localUrl = \"http://127.0.0.1:23119\";\n\n // Need to pad to same length\n if (localUrl.length > originalUrl.length) {\n console.error(`[websearch] Local URL too long: ${localUrl.length} > ${originalUrl.length}`);\n return null;\n }\n\n const paddedUrl = localUrl.padEnd(originalUrl.length, \" \");\n\n return {\n name: \"webSearch\",\n description: `Replace API URL with local proxy (${localUrl})`,\n pattern: Buffer.from(originalUrl),\n replacement: Buffer.from(paddedUrl),\n };\n}\n\n/**\n * Create WebSearch proxy files\n */\nexport async function createWebSearchProxyFiles(\n outputDir: string,\n droidPath: string,\n aliasName: string,\n): Promise<{ proxyScript: string; wrapperScript: string }> {\n // Ensure directory exists\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const proxyScriptPath = join(outputDir, `${aliasName}-search-proxy.js`);\n const wrapperScriptPath = join(outputDir, `${aliasName}-with-search`);\n\n // Write proxy server script\n await writeFile(proxyScriptPath, generateSearchProxyServerCode());\n console.log(`[*] Created search proxy: ${proxyScriptPath}`);\n\n // Write wrapper script\n await writeFile(wrapperScriptPath, generateWrapperScript(droidPath, proxyScriptPath));\n await chmod(wrapperScriptPath, 0o755);\n console.log(`[*] Created wrapper script: ${wrapperScriptPath}`);\n\n return {\n proxyScript: proxyScriptPath,\n wrapperScript: wrapperScriptPath,\n };\n}\n\n/**\n * Get proxy server code (for export)\n */\nexport function getSearchProxyCode(): string {\n return generateSearchProxyServerCode();\n}\n\n/**\n * Generate Bun preload script\n * This script executes before droid main program, starts search proxy\n */\nfunction generatePreloadScript(): string {\n return `// Droid WebSearch Preload Script\n// Auto-generated by droid-patch --websearch-preload\n// Start search proxy before droid main program\n\nconst http = require('http');\nconst https = require('https');\nconst { execSync } = require('child_process');\n\nconst PORT = process.env.DROID_SEARCH_PORT || 23119;\nconst FACTORY_API = 'https://api.factory.ai';\n\n// Google PSE search\nasync function searchGooglePSE(query, num) {\n const apiKey = process.env.GOOGLE_PSE_API_KEY;\n const cx = process.env.GOOGLE_PSE_CX;\n if (!apiKey || !cx) return null;\n \n try {\n const url = \\`https://www.googleapis.com/customsearch/v1?key=\\${apiKey}&cx=\\${cx}&q=\\${encodeURIComponent(query)}&num=\\${Math.min(num, 10)}\\`;\n const res = await fetch(url);\n const data = await res.json();\n if (data.error) return null;\n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || ''\n }));\n } catch (e) {\n return null;\n }\n}\n\n// DuckDuckGo search (use curl for reliability)\nfunction searchDuckDuckGo(query, num) {\n try {\n const url = \\`https://api.duckduckgo.com/?q=\\${encodeURIComponent(query)}&format=json&no_html=1&skip_disambig=1\\`;\n const output = execSync(\\`curl -s \"\\${url}\"\\`, { encoding: 'utf8', timeout: 10000 });\n const data = JSON.parse(output);\n const results = [];\n\n if (data.AbstractText && data.AbstractURL) {\n results.push({ title: data.Heading || query, url: data.AbstractURL, content: data.AbstractText });\n }\n\n for (const t of (data.RelatedTopics || [])) {\n if (results.length >= num) break;\n if (t.Text && t.FirstURL) {\n results.push({ title: t.Text.split(' - ')[0], url: t.FirstURL, content: t.Text });\n }\n // Handle subcategories\n if (t.Topics) {\n for (const sub of t.Topics) {\n if (results.length >= num) break;\n if (sub.Text && sub.FirstURL) {\n results.push({ title: sub.Text.split(' - ')[0], url: sub.FirstURL, content: sub.Text });\n }\n }\n }\n }\n return results;\n } catch (e) {\n return [];\n }\n}\n\n// Search function\nasync function search(query, num) {\n // Try Google PSE first\n const googleResults = await searchGooglePSE(query, num);\n if (googleResults && googleResults.length > 0) {\n console.error('[preload-search] Using Google PSE');\n return googleResults;\n }\n\n // Fallback to DuckDuckGo\n console.error('[preload-search] Using DuckDuckGo');\n return searchDuckDuckGo(query, num);\n}\n\n// Check if port is already in use\nfunction isPortInUse(port) {\n try {\n execSync(\\`curl -s http://127.0.0.1:\\${port}/health\\`, { timeout: 1000 });\n return true;\n } catch {\n return false;\n }\n}\n\n// Skip if proxy already running\nif (isPortInUse(PORT)) {\n console.error(\\`[preload] Search proxy already running on port \\${PORT}\\`);\n} else {\n // Start proxy server\n const server = http.createServer(async (req, res) => {\n const url = new URL(req.url, \\`http://\\${req.headers.host}\\`);\n\n // Health check\n if (url.pathname === '/health') {\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ status: 'ok' }));\n return;\n }\n\n // Search endpoint\n if (url.pathname === '/api/tools/exa/search' && req.method === 'POST') {\n let body = '';\n req.on('data', c => body += c);\n req.on('end', async () => {\n try {\n const { query, numResults } = JSON.parse(body);\n console.error(\\`[preload-search] Query: \"\\${query}\"\\`);\n const results = await search(query, numResults || 10);\n console.error(\\`[preload-search] Found \\${results.length} results\\`);\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ results }));\n } catch (e) {\n console.error('[preload-search] Error:', e.message);\n res.writeHead(500, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: String(e), results: [] }));\n }\n });\n return;\n }\n\n // Proxy other requests to Factory API\n const proxyUrl = new URL(FACTORY_API + url.pathname + url.search);\n const proxyReq = https.request(proxyUrl, {\n method: req.method,\n headers: { ...req.headers, host: proxyUrl.host }\n }, proxyRes => {\n res.writeHead(proxyRes.statusCode, proxyRes.headers);\n proxyRes.pipe(res);\n });\n proxyReq.on('error', (e) => {\n console.error('[preload-proxy] Error:', e.message);\n res.writeHead(502, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Proxy failed' }));\n });\n if (req.method !== 'GET' && req.method !== 'HEAD') {\n req.pipe(proxyReq);\n } else {\n proxyReq.end();\n }\n });\n\n server.listen(PORT, '127.0.0.1', () => {\n console.error(\\`[preload] Search proxy started on http://127.0.0.1:\\${PORT}\\`);\n });\n}\n`;\n}\n\n/**\n * Generate bunfig.toml content\n */\nfunction generateBunfigToml(preloadScriptPath: string): string {\n return `# Droid WebSearch Configuration\n# Auto-generated by droid-patch --websearch-preload\n\npreload = [\"${preloadScriptPath}\"]\n`;\n}\n\n/**\n * Generate preload wrapper script\n * This script cd's to the bunfig.toml directory, then executes droid\n */\nfunction generatePreloadWrapperScript(droidPath: string, bunfigDir: string): string {\n return `#!/bin/bash\n# Droid with WebSearch (Preload)\n# Auto-generated by droid-patch --preload\n\nBUNFIG_DIR=\"${bunfigDir}\"\nDROID_BIN=\"${droidPath}\"\nORIGINAL_DIR=\"$(pwd)\"\n\n# cd to bunfig.toml directory (Bun reads bunfig.toml from cwd)\ncd \"$BUNFIG_DIR\"\n\n# Execute droid, pass all arguments, set working directory to original\nexec \"$DROID_BIN\" --cwd \"$ORIGINAL_DIR\" \"$@\"\n`;\n}\n\n/**\n * Create WebSearch files using Preload method\n *\n * Advantages:\n * - No need to modify binary\n * - Uses Bun's native preload mechanism\n *\n * Files created:\n * - preload script (search proxy)\n * - bunfig.toml (Bun configuration)\n * - wrapper script (directly executable command)\n */\nexport async function createWebSearchPreloadFiles(\n droidDir: string,\n droidPath: string,\n aliasName: string,\n): Promise<{\n preloadScript: string;\n bunfigPath: string;\n wrapperScript: string;\n}> {\n // Ensure directory exists\n if (!existsSync(droidDir)) {\n await mkdir(droidDir, { recursive: true });\n }\n\n const preloadScriptPath = join(droidDir, `${aliasName}-search-preload.js`);\n const bunfigPath = join(droidDir, \"bunfig.toml\");\n const wrapperScriptPath = join(droidDir, aliasName);\n\n // Write preload script\n await writeFile(preloadScriptPath, generatePreloadScript());\n console.log(`[*] Created preload script: ${preloadScriptPath}`);\n\n // Write bunfig.toml\n await writeFile(bunfigPath, generateBunfigToml(preloadScriptPath));\n console.log(`[*] Created bunfig.toml: ${bunfigPath}`);\n\n // Write wrapper script\n await writeFile(wrapperScriptPath, generatePreloadWrapperScript(droidPath, droidDir));\n await chmod(wrapperScriptPath, 0o755);\n console.log(`[*] Created wrapper: ${wrapperScriptPath}`);\n\n return {\n preloadScript: preloadScriptPath,\n bunfigPath: bunfigPath,\n wrapperScript: wrapperScriptPath,\n };\n}\n\n/**\n * Get preload script code (for export)\n */\nexport function getPreloadScriptCode(): string {\n return generatePreloadScript();\n}\n\n/**\n * Generate unified Fetch Hook Preload script\n * Directly hooks globalThis.fetch, no proxy server needed\n * @internal Reserved for future use - alternative to proxy server approach\n */\nfunction _generateFetchHookPreload(): string {\n return `// Droid WebSearch Fetch Hook\n// Auto-generated by droid-patch --websearch\n// Hook globalThis.fetch to intercept search requests\n\nconst DEBUG = process.env.DROID_SEARCH_DEBUG === '1';\n\nfunction log(...args) {\n if (DEBUG) console.error('[websearch]', ...args);\n}\n\n// === Search Implementation ===\n\nasync function searchGooglePSE(query, numResults) {\n const apiKey = process.env.GOOGLE_PSE_API_KEY;\n const cx = process.env.GOOGLE_PSE_CX;\n if (!apiKey || !cx) return null;\n\n try {\n const url = \\`https://www.googleapis.com/customsearch/v1?key=\\${apiKey}&cx=\\${cx}&q=\\${encodeURIComponent(query)}&num=\\${Math.min(numResults, 10)}\\`;\n const res = await fetch(url);\n const data = await res.json();\n if (data.error) {\n log('Google PSE error:', data.error.message);\n return null;\n }\n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n } catch (e) {\n log('Google PSE failed:', e.message);\n return null;\n }\n}\n\nasync function searchDuckDuckGo(query, numResults) {\n const { execSync } = require('child_process');\n\n // Method 1: Try DuckDuckGo HTML lite\n try {\n const curlCmd = \\`curl -s -X POST \"https://lite.duckduckgo.com/lite/\" -H \"Content-Type: application/x-www-form-urlencoded\" -H \"User-Agent: Mozilla/5.0\" -d \"q=\\${encodeURIComponent(query)}\"\\`;\n const html = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n\n if (html && html.length > 1000) {\n const results = parseDDGLiteHTML(html, numResults);\n if (results.length > 0) {\n log('DDG lite:', results.length, 'results');\n return results;\n }\n }\n } catch (e) {\n log('DDG lite failed:', e.message);\n }\n\n // Method 2: Fallback to Instant Answer API\n try {\n const apiUrl = \\`https://api.duckduckgo.com/?q=\\${encodeURIComponent(query)}&format=json&no_html=1&skip_disambig=1\\`;\n const curlCmd = \\`curl -s \"\\${apiUrl}\" -H \"User-Agent: Mozilla/5.0\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n const results = [];\n\n if (data.Abstract && data.AbstractURL) {\n results.push({\n title: data.Heading || query,\n url: data.AbstractURL,\n content: data.Abstract,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n for (const topic of (data.RelatedTopics || [])) {\n if (results.length >= numResults) break;\n if (topic.Text && topic.FirstURL) {\n results.push({\n title: topic.Text.substring(0, 100),\n url: topic.FirstURL,\n content: topic.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n if (topic.Topics) {\n for (const st of topic.Topics) {\n if (results.length >= numResults) break;\n if (st.Text && st.FirstURL) {\n results.push({\n title: st.Text.substring(0, 100),\n url: st.FirstURL,\n content: st.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n }\n }\n }\n\n if (results.length > 0) {\n log('DDG API:', results.length, 'results');\n return results;\n }\n } catch (e) {\n log('DDG API failed:', e.message);\n }\n\n return [];\n}\n\nfunction parseDDGLiteHTML(html, maxResults) {\n const results = [];\n const linkRegex = /<a[^>]+rel=\"nofollow\"[^>]+href=\"([^\"]+)\"[^>]*>([^<]+)<\\\\/a>/gi;\n const snippetRegex = /<td[^>]*class=\"result-snippet\"[^>]*>([^<]*)<\\\\/td>/gi;\n\n const links = [];\n let match;\n\n while ((match = linkRegex.exec(html)) !== null && links.length < maxResults) {\n let url = match[1];\n if (url.includes('duckduckgo.com') && !url.includes('uddg=')) continue;\n if (url.includes('uddg=')) {\n const uddgMatch = url.match(/uddg=([^&]+)/);\n if (uddgMatch) url = decodeURIComponent(uddgMatch[1]);\n }\n links.push({\n url: url,\n title: decodeHTMLEntities(match[2].trim())\n });\n }\n\n const snippets = [];\n while ((match = snippetRegex.exec(html)) !== null && snippets.length < maxResults) {\n snippets.push(decodeHTMLEntities(match[1].trim()));\n }\n\n for (let i = 0; i < links.length && results.length < maxResults; i++) {\n results.push({\n title: links[i].title,\n url: links[i].url,\n content: snippets[i] || '',\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n return results;\n}\n\nfunction decodeHTMLEntities(str) {\n return str\n .replace(/&/g, '&')\n .replace(/</g, '<')\n .replace(/>/g, '>')\n .replace(/"/g, '\"')\n .replace(/'/g, \"'\")\n .replace(/ /g, ' ');\n}\n\nasync function search(query, numResults = 10) {\n // Try Google PSE first\n const googleResults = await searchGooglePSE(query, numResults);\n if (googleResults && googleResults.length > 0) {\n log('Using Google PSE');\n return { results: googleResults, source: 'google-pse' };\n }\n\n // Fallback to DuckDuckGo\n log('Using DuckDuckGo');\n const ddgResults = await searchDuckDuckGo(query, numResults);\n return { results: ddgResults, source: 'duckduckgo' };\n}\n\n// === Fetch Hook ===\n\nconst originalFetch = globalThis.fetch;\n\nglobalThis.fetch = async function(input, init) {\n const url = typeof input === 'string' ? input : (input instanceof URL ? input.href : input.url);\n\n // Intercept search requests\n if (url && url.includes('/api/tools/exa/search')) {\n log('Intercepted search request');\n\n try {\n let body = init?.body;\n if (body && typeof body !== 'string') {\n body = await new Response(body).text();\n }\n\n const { query, numResults } = JSON.parse(body || '{}');\n log('Query:', query);\n\n const { results, source } = await search(query, numResults || 10);\n log('Results:', results.length, 'from', source);\n\n return new Response(JSON.stringify({ results }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' }\n });\n } catch (e) {\n log('Search error:', e.message);\n return new Response(JSON.stringify({ error: String(e), results: [] }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' }\n });\n }\n }\n\n // Pass through all other requests\n return originalFetch.apply(this, arguments);\n};\n\n// Also hook Bun.fetch if available\nif (typeof Bun !== 'undefined' && Bun.fetch) {\n const originalBunFetch = Bun.fetch;\n Bun.fetch = globalThis.fetch;\n}\n\nlog('Fetch hook installed');\n`;\n}\n\n/**\n * Generate search proxy server code (runs in background)\n * Since BUN_CONFIG_PRELOAD doesn't work with compiled binaries,\n * use a local proxy server to intercept search requests instead\n *\n * Each droid instance runs its own proxy server.\n * The proxy is killed automatically when droid exits.\n * @param factoryApiUrl - Custom Factory API URL (default: https://api.factory.ai)\n */\nfunction generateSearchProxyServer(factoryApiUrl: string = \"https://api.factory.ai\"): string {\n return `#!/usr/bin/env node\n// Droid WebSearch Proxy Server\n// Auto-generated by droid-patch --websearch\n// This proxy runs as a child process of droid and is killed when droid exits\n\nconst http = require('http');\nconst https = require('https');\nconst { execSync } = require('child_process');\nconst fs = require('fs');\n\nconst DEBUG = process.env.DROID_SEARCH_DEBUG === '1';\nconst PORT = parseInt(process.env.SEARCH_PROXY_PORT || '0'); // 0 = auto-assign\nconst FACTORY_API = '${factoryApiUrl}';\n\nfunction log(...args) {\n if (DEBUG) console.error('[websearch]', ...args);\n}\n\n// === Search Implementation ===\n\n// Smithery Exa MCP - highest priority, requires SMITHERY_API_KEY and SMITHERY_PROFILE\nasync function searchSmitheryExa(query, numResults) {\n const apiKey = process.env.SMITHERY_API_KEY;\n const profile = process.env.SMITHERY_PROFILE;\n if (!apiKey || !profile) return null;\n\n try {\n // Construct URL with authentication\n const serverUrl = \\`https://server.smithery.ai/exa/mcp?api_key=\\${encodeURIComponent(apiKey)}&profile=\\${encodeURIComponent(profile)}\\`;\n log('Smithery Exa request');\n\n // Use MCP protocol to call the search tool via HTTP POST\n const requestBody = JSON.stringify({\n jsonrpc: '2.0',\n id: 1,\n method: 'tools/call',\n params: {\n name: 'web_search_exa',\n arguments: {\n query: query,\n numResults: numResults\n }\n }\n });\n\n const curlCmd = \\`curl -s -X POST \"\\${serverUrl}\" -H \"Content-Type: application/json\" -d '\\${requestBody.replace(/'/g, \"'\\\\\\\\\\\\\\\\''\")}'\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 30000 });\n const response = JSON.parse(jsonStr);\n\n // Parse MCP response\n if (response.result && response.result.content) {\n // MCP returns content as array of text blocks\n const textContent = response.result.content.find(c => c.type === 'text');\n if (textContent && textContent.text) {\n try {\n const searchResults = JSON.parse(textContent.text);\n if (Array.isArray(searchResults) && searchResults.length > 0) {\n return searchResults.slice(0, numResults).map(item => ({\n title: item.title || '',\n url: item.url || '',\n content: item.text || item.snippet || item.highlights?.join(' ') || '',\n publishedDate: item.publishedDate || null,\n author: item.author || null,\n score: item.score || null\n }));\n }\n } catch (parseErr) {\n log('Smithery response parsing failed');\n }\n }\n }\n\n if (response.error) {\n log('Smithery Exa error:', response.error.message || response.error);\n return null;\n }\n } catch (e) {\n log('Smithery Exa failed:', e.message);\n return null;\n }\n return null;\n}\n\nasync function searchGooglePSE(query, numResults) {\n const apiKey = process.env.GOOGLE_PSE_API_KEY;\n const cx = process.env.GOOGLE_PSE_CX;\n if (!apiKey || !cx) return null;\n\n try {\n const url = \\`https://www.googleapis.com/customsearch/v1?key=\\${apiKey}&cx=\\${cx}&q=\\${encodeURIComponent(query)}&num=\\${Math.min(numResults, 10)}\\`;\n log('Google PSE request:', url.replace(apiKey, '***'));\n\n const curlCmd = \\`curl -s \"\\${url}\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.error) {\n log('Google PSE error:', data.error.message);\n return null;\n }\n return (data.items || []).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n } catch (e) {\n log('Google PSE failed:', e.message);\n return null;\n }\n}\n\n// SearXNG - self-hosted meta search engine\nasync function searchSearXNG(query, numResults) {\n const searxngUrl = process.env.SEARXNG_URL;\n if (!searxngUrl) return null;\n\n try {\n const url = \\`\\${searxngUrl}/search?q=\\${encodeURIComponent(query)}&format=json&engines=google,bing,duckduckgo\\`;\n log('SearXNG request:', url);\n\n const curlCmd = \\`curl -s \"\\${url}\" -H \"Accept: application/json\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.results && data.results.length > 0) {\n return data.results.slice(0, numResults).map(item => ({\n title: item.title,\n url: item.url,\n content: item.content || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n }\n } catch (e) {\n log('SearXNG failed:', e.message);\n }\n return null;\n}\n\n// Serper API - free tier available (2500 queries/month)\nasync function searchSerper(query, numResults) {\n const apiKey = process.env.SERPER_API_KEY;\n if (!apiKey) return null;\n\n try {\n const curlCmd = \\`curl -s \"https://google.serper.dev/search\" -H \"X-API-KEY: \\${apiKey}\" -H \"Content-Type: application/json\" -d '{\"q\":\"\\${query.replace(/\"/g, '\\\\\\\\\"')}\",\"num\":\\${numResults}}'\\`;\n log('Serper request');\n\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.organic && data.organic.length > 0) {\n return data.organic.slice(0, numResults).map(item => ({\n title: item.title,\n url: item.link,\n content: item.snippet || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n }\n } catch (e) {\n log('Serper failed:', e.message);\n }\n return null;\n}\n\n// Brave Search API - free tier available\nasync function searchBrave(query, numResults) {\n const apiKey = process.env.BRAVE_API_KEY;\n if (!apiKey) return null;\n\n try {\n const url = \\`https://api.search.brave.com/res/v1/web/search?q=\\${encodeURIComponent(query)}&count=\\${numResults}\\`;\n const curlCmd = \\`curl -s \"\\${url}\" -H \"Accept: application/json\" -H \"X-Subscription-Token: \\${apiKey}\"\\`;\n log('Brave request');\n\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n if (data.web && data.web.results && data.web.results.length > 0) {\n return data.web.results.slice(0, numResults).map(item => ({\n title: item.title,\n url: item.url,\n content: item.description || '',\n publishedDate: null,\n author: null,\n score: null\n }));\n }\n } catch (e) {\n log('Brave failed:', e.message);\n }\n return null;\n}\n\n// DuckDuckGo - limited reliability due to bot detection\nasync function searchDuckDuckGo(query, numResults) {\n // DuckDuckGo Instant Answer API (limited results but more reliable)\n try {\n const apiUrl = \\`https://api.duckduckgo.com/?q=\\${encodeURIComponent(query)}&format=json&no_html=1&skip_disambig=1\\`;\n const curlCmd = \\`curl -s \"\\${apiUrl}\" -H \"User-Agent: Mozilla/5.0\"\\`;\n const jsonStr = execSync(curlCmd, { encoding: 'utf-8', timeout: 15000 });\n const data = JSON.parse(jsonStr);\n\n const results = [];\n\n if (data.Abstract && data.AbstractURL) {\n results.push({\n title: data.Heading || query,\n url: data.AbstractURL,\n content: data.Abstract,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n for (const topic of (data.RelatedTopics || [])) {\n if (results.length >= numResults) break;\n if (topic.Text && topic.FirstURL) {\n results.push({\n title: topic.Text.substring(0, 100),\n url: topic.FirstURL,\n content: topic.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n if (topic.Topics) {\n for (const st of topic.Topics) {\n if (results.length >= numResults) break;\n if (st.Text && st.FirstURL) {\n results.push({\n title: st.Text.substring(0, 100),\n url: st.FirstURL,\n content: st.Text,\n publishedDate: null,\n author: null,\n score: null\n });\n }\n }\n }\n }\n\n if (results.length > 0) {\n log('DDG API:', results.length, 'results');\n return results;\n }\n } catch (e) {\n log('DDG API failed:', e.message);\n }\n\n return [];\n}\n\nfunction parseDDGLiteHTML(html, maxResults) {\n const results = [];\n const linkRegex = /<a[^>]+rel=\"nofollow\"[^>]+href=\"([^\"]+)\"[^>]*>([^<]+)<\\\\/a>/gi;\n const snippetRegex = /<td[^>]*class=\"result-snippet\"[^>]*>([^<]*)<\\\\/td>/gi;\n\n const links = [];\n let match;\n\n while ((match = linkRegex.exec(html)) !== null && links.length < maxResults) {\n let url = match[1];\n if (url.includes('duckduckgo.com') && !url.includes('uddg=')) continue;\n if (url.includes('uddg=')) {\n const uddgMatch = url.match(/uddg=([^&]+)/);\n if (uddgMatch) url = decodeURIComponent(uddgMatch[1]);\n }\n links.push({\n url: url,\n title: decodeHTMLEntities(match[2].trim())\n });\n }\n\n const snippets = [];\n while ((match = snippetRegex.exec(html)) !== null && snippets.length < maxResults) {\n snippets.push(decodeHTMLEntities(match[1].trim()));\n }\n\n for (let i = 0; i < links.length && results.length < maxResults; i++) {\n results.push({\n title: links[i].title,\n url: links[i].url,\n content: snippets[i] || '',\n publishedDate: null,\n author: null,\n score: null\n });\n }\n\n return results;\n}\n\nfunction decodeHTMLEntities(str) {\n return str\n .replace(/&/g, '&')\n .replace(/</g, '<')\n .replace(/>/g, '>')\n .replace(/"/g, '\"')\n .replace(/'/g, \"'\")\n .replace(/ /g, ' ');\n}\n\nasync function search(query, numResults = 10) {\n // Priority order:\n // 1. Smithery Exa MCP (best quality if configured)\n // 2. Google PSE (most reliable if configured)\n // 3. Serper (free tier: 2500/month)\n // 4. Brave Search (free tier available)\n // 5. SearXNG (self-hosted)\n // 6. DuckDuckGo (limited due to bot detection)\n\n // 1. Smithery Exa MCP (highest priority)\n const smitheryResults = await searchSmitheryExa(query, numResults);\n if (smitheryResults && smitheryResults.length > 0) {\n log('Using Smithery Exa');\n return { results: smitheryResults, source: 'smithery-exa' };\n }\n\n // 2. Google PSE\n const googleResults = await searchGooglePSE(query, numResults);\n if (googleResults && googleResults.length > 0) {\n log('Using Google PSE');\n return { results: googleResults, source: 'google-pse' };\n }\n\n // 3. Serper\n const serperResults = await searchSerper(query, numResults);\n if (serperResults && serperResults.length > 0) {\n log('Using Serper');\n return { results: serperResults, source: 'serper' };\n }\n\n // 4. Brave Search\n const braveResults = await searchBrave(query, numResults);\n if (braveResults && braveResults.length > 0) {\n log('Using Brave Search');\n return { results: braveResults, source: 'brave' };\n }\n\n // 5. SearXNG\n const searxngResults = await searchSearXNG(query, numResults);\n if (searxngResults && searxngResults.length > 0) {\n log('Using SearXNG');\n return { results: searxngResults, source: 'searxng' };\n }\n\n // 6. DuckDuckGo (last resort, limited results)\n log('Using DuckDuckGo (fallback)');\n const ddgResults = await searchDuckDuckGo(query, numResults);\n return { results: ddgResults, source: 'duckduckgo' };\n}\n\n// === HTTP Proxy Server ===\n\nconst server = http.createServer(async (req, res) => {\n const url = new URL(req.url, \\`http://\\${req.headers.host}\\`);\n\n // Health check\n if (url.pathname === '/health') {\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ status: 'ok', port: server.address()?.port || PORT }));\n return;\n }\n\n // Search endpoint - intercept\n if (url.pathname === '/api/tools/exa/search' && req.method === 'POST') {\n let body = '';\n req.on('data', c => body += c);\n req.on('end', async () => {\n try {\n const { query, numResults } = JSON.parse(body);\n log('Search query:', query);\n const { results, source } = await search(query, numResults || 10);\n log('Results:', results.length, 'from', source);\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ results }));\n } catch (e) {\n log('Search error:', e.message);\n res.writeHead(500, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: String(e), results: [] }));\n }\n });\n return;\n }\n\n // === Standalone mode (controlled by STANDALONE_MODE env) ===\n // Whitelist approach: only allow core LLM APIs, mock everything else\n if (process.env.STANDALONE_MODE === '1') {\n const pathname = url.pathname;\n\n // Whitelist: Core APIs that should be forwarded to upstream\n const isCoreLLMApi = pathname.startsWith('/api/llm/a/') || pathname.startsWith('/api/llm/o/');\n // /api/tools/exa/search is already handled above\n\n if (!isCoreLLMApi) {\n // Special handling for specific routes\n if (pathname === '/api/sessions/create') {\n log('Mock (dynamic):', pathname);\n const sessionId = \\`local-\\${Date.now()}-\\${Math.random().toString(36).slice(2, 10)}\\`;\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ id: sessionId }));\n return;\n }\n\n if (pathname === '/api/cli/whoami') {\n log('Mock (401):', pathname);\n res.writeHead(401, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Unauthorized', message: 'Local mode - use token fallback' }));\n return;\n }\n\n if (pathname === '/api/tools/get-url-contents') {\n log('Mock (404):', pathname);\n res.writeHead(404, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Not available', message: 'Use local URL fetch fallback' }));\n return;\n }\n\n // All other non-core APIs: return empty success\n log('Mock (default):', pathname);\n res.writeHead(200, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({}));\n return;\n }\n }\n\n // Proxy core LLM requests to upstream API\n log('Proxy:', req.method, url.pathname);\n\n const proxyUrl = new URL(FACTORY_API + url.pathname + url.search);\n // Choose http or https based on target protocol\n const proxyModule = proxyUrl.protocol === 'https:' ? https : http;\n const proxyReq = proxyModule.request(proxyUrl, {\n method: req.method,\n headers: { ...req.headers, host: proxyUrl.host }\n }, proxyRes => {\n res.writeHead(proxyRes.statusCode, proxyRes.headers);\n proxyRes.pipe(res);\n });\n\n proxyReq.on('error', e => {\n log('Proxy error:', e.message);\n res.writeHead(502, { 'Content-Type': 'application/json' });\n res.end(JSON.stringify({ error: 'Proxy failed: ' + e.message }));\n });\n\n if (req.method !== 'GET' && req.method !== 'HEAD') {\n req.pipe(proxyReq);\n } else {\n proxyReq.end();\n }\n});\n\n// If port is 0, system will automatically assign an available port\nserver.listen(PORT, '127.0.0.1', () => {\n const actualPort = server.address().port;\n const hasGoogle = process.env.GOOGLE_PSE_API_KEY && process.env.GOOGLE_PSE_CX;\n\n // Write port file for parent process to read\n const portFile = process.env.SEARCH_PROXY_PORT_FILE;\n if (portFile) {\n fs.writeFileSync(portFile, String(actualPort));\n }\n\n // Output PORT= line for wrapper script to parse\n console.log('PORT=' + actualPort);\n\n const hasSmithery = process.env.SMITHERY_API_KEY && process.env.SMITHERY_PROFILE;\n log('Search proxy started on http://127.0.0.1:' + actualPort);\n log('Smithery Exa:', hasSmithery ? 'configured (priority 1)' : 'not set');\n log('Google PSE:', hasGoogle ? 'configured' : 'not set');\n log('Serper:', process.env.SERPER_API_KEY ? 'configured' : 'not set');\n log('Brave:', process.env.BRAVE_API_KEY ? 'configured' : 'not set');\n log('SearXNG:', process.env.SEARXNG_URL || 'not set');\n});\n\nprocess.on('SIGTERM', () => { server.close(); process.exit(0); });\nprocess.on('SIGINT', () => { server.close(); process.exit(0); });\n`;\n}\n\n/**\n * Generate unified Wrapper script\n * Each droid instance runs its own proxy:\n * - Uses port 0 to let system auto-assign available port\n * - Proxy runs as child process\n * - Proxy is killed when droid exits\n * - Supports multiple droid instances running simultaneously\n */\n/* eslint-disable no-useless-escape */\nfunction generateUnifiedWrapper(\n droidPath: string,\n proxyScriptPath: string,\n standalone: boolean = false,\n): string {\n const standaloneEnv = standalone ? \"STANDALONE_MODE=1 \" : \"\";\n return `#!/bin/bash\n# Droid with WebSearch\n# Auto-generated by droid-patch --websearch\n# Each instance runs its own proxy on a system-assigned port\n\nPROXY_SCRIPT=\"${proxyScriptPath}\"\nDROID_BIN=\"${droidPath}\"\nPROXY_PID=\"\"\nPORT_FILE=\"/tmp/droid-websearch-\\$\\$.port\"\nSTANDALONE=\"${standalone ? \"1\" : \"0\"}\"\n\n# Passthrough for non-interactive/meta commands (avoid starting a proxy for help/version/etc)\nshould_passthrough() {\n # Any help/version flags before \"--\"\n for arg in \"\\$@\"; do\n if [ \"\\$arg\" = \"--\" ]; then\n break\n fi\n case \"\\$arg\" in\n --help|-h|--version|-V)\n return 0\n ;;\n esac\n done\n\n # Top-level command token\n local end_opts=0\n for arg in \"\\$@\"; do\n if [ \"\\$arg\" = \"--\" ]; then\n end_opts=1\n continue\n fi\n if [ \"\\$end_opts\" -eq 0 ] && [[ \"\\$arg\" == -* ]]; then\n continue\n fi\n case \"\\$arg\" in\n help|version|completion|completions|exec)\n return 0\n ;;\n esac\n break\n done\n\n return 1\n}\n\nif should_passthrough \"\\$@\"; then\n exec \"\\$DROID_BIN\" \"\\$@\"\nfi\n\n# Cleanup function - kill proxy when droid exits\ncleanup() {\n if [ -n \"\\$PROXY_PID\" ] && kill -0 \"\\$PROXY_PID\" 2>/dev/null; then\n [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Stopping proxy (PID: \\$PROXY_PID)\" >&2\n kill \"\\$PROXY_PID\" 2>/dev/null\n wait \"\\$PROXY_PID\" 2>/dev/null\n fi\n rm -f \"\\$PORT_FILE\"\n}\n\n# Set up trap to cleanup on exit\ntrap cleanup EXIT INT TERM\n\n[ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Starting proxy...\" >&2\n[ \"\\$STANDALONE\" = \"1\" ] && [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Standalone mode enabled\" >&2\n\n# Start proxy with port 0 (system will assign available port)\n# Proxy writes actual port to PORT_FILE\nif [ -n \"\\$DROID_SEARCH_DEBUG\" ]; then\n ${standaloneEnv}SEARCH_PROXY_PORT=0 SEARCH_PROXY_PORT_FILE=\"\\$PORT_FILE\" node \"\\$PROXY_SCRIPT\" 2>&1 &\nelse\n ${standaloneEnv}SEARCH_PROXY_PORT=0 SEARCH_PROXY_PORT_FILE=\"\\$PORT_FILE\" node \"\\$PROXY_SCRIPT\" >/dev/null 2>&1 &\nfi\nPROXY_PID=\\$!\n\n# Wait for proxy to start and get actual port (max 5 seconds)\nfor i in {1..50}; do\n # Check if proxy process is still running\n if ! kill -0 \"\\$PROXY_PID\" 2>/dev/null; then\n [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Proxy process died\" >&2\n break\n fi\n if [ -f \"\\$PORT_FILE\" ]; then\n ACTUAL_PORT=\\$(cat \"\\$PORT_FILE\" 2>/dev/null)\n if [ -n \"\\$ACTUAL_PORT\" ] && curl -s \"http://127.0.0.1:\\$ACTUAL_PORT/health\" > /dev/null 2>&1; then\n [ -n \"\\$DROID_SEARCH_DEBUG\" ] && echo \"[websearch] Proxy ready on port \\$ACTUAL_PORT (PID: \\$PROXY_PID)\" >&2\n break\n fi\n fi\n sleep 0.1\ndone\n\n# Check if proxy started successfully\nif [ ! -f \"\\$PORT_FILE\" ] || [ -z \"\\$(cat \"\\$PORT_FILE\" 2>/dev/null)\" ]; then\n echo \"[websearch] Failed to start proxy, running without websearch\" >&2\n cleanup\n exec \"\\$DROID_BIN\" \"\\$@\"\nfi\n\nACTUAL_PORT=\\$(cat \"\\$PORT_FILE\")\nrm -f \"\\$PORT_FILE\"\n\n# Run droid with proxy\nexport FACTORY_API_BASE_URL_OVERRIDE=\"http://127.0.0.1:\\$ACTUAL_PORT\"\n\"\\$DROID_BIN\" \"\\$@\"\nDROID_EXIT_CODE=\\$?\n\n# Cleanup will be called by trap\nexit \\$DROID_EXIT_CODE\n`;\n}\n/* eslint-enable no-useless-escape */\n\n/**\n * Create unified WebSearch files\n *\n * Approach: Proxy server mode\n * - wrapper script starts local proxy server\n * - proxy server intercepts search requests, passes through other requests\n * - uses FACTORY_API_BASE_URL_OVERRIDE env var to point to proxy\n * - alias works directly, no extra steps needed\n *\n * @param outputDir - Directory to write files to\n * @param droidPath - Path to droid binary\n * @param aliasName - Alias name for the wrapper\n * @param apiBase - Custom API base URL for proxy to forward requests to\n * @param standalone - Standalone mode: mock non-LLM Factory APIs\n */\nexport async function createWebSearchUnifiedFiles(\n outputDir: string,\n droidPath: string,\n aliasName: string,\n apiBase?: string,\n standalone: boolean = false,\n): Promise<{ wrapperScript: string; preloadScript: string }> {\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const proxyScriptPath = join(outputDir, `${aliasName}-proxy.js`);\n const wrapperScriptPath = join(outputDir, aliasName);\n\n // Write proxy server script with custom API base if provided\n const factoryApiUrl = apiBase || \"https://api.factory.ai\";\n await writeFile(proxyScriptPath, generateSearchProxyServer(factoryApiUrl));\n console.log(`[*] Created proxy script: ${proxyScriptPath}`);\n\n // Write unified wrapper\n await writeFile(\n wrapperScriptPath,\n generateUnifiedWrapper(droidPath, proxyScriptPath, standalone),\n );\n await chmod(wrapperScriptPath, 0o755);\n console.log(`[*] Created wrapper: ${wrapperScriptPath}`);\n\n if (standalone) {\n console.log(`[*] Standalone mode enabled`);\n }\n\n return {\n wrapperScript: wrapperScriptPath,\n preloadScript: proxyScriptPath, // Keep interface compatible\n };\n}\n","import { chmod, mkdir, writeFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport { join } from \"node:path\";\n\nfunction generateStatuslineMonitorScript(): string {\n // Keep this script dependency-free (Node built-ins only). It runs from the wrapper via `node`.\n return `#!/usr/bin/env node\n/* Auto-generated by droid-patch --statusline */\n\nconst fs = require('fs');\nconst os = require('os');\nconst path = require('path');\nconst { spawn, spawnSync } = require('child_process');\n\n// This monitor does NOT draw directly to the terminal. It emits newline-delimited\n// statusline frames to stdout. A wrapper (PTY proxy) is responsible for rendering\n// the latest frame on a reserved bottom row to avoid flicker.\n\nconst FACTORY_HOME = path.join(os.homedir(), '.factory');\n\nconst SESSIONS_ROOT = path.join(FACTORY_HOME, 'sessions');\nconst LOG_PATH = path.join(FACTORY_HOME, 'logs', 'droid-log-single.log');\nconst CONFIG_PATH = path.join(FACTORY_HOME, 'config.json');\nconst GLOBAL_SETTINGS_PATH = path.join(FACTORY_HOME, 'settings.json');\n\nconst IS_APPLE_TERMINAL = process.env.TERM_PROGRAM === 'Apple_Terminal';\nconst MIN_RENDER_INTERVAL_MS = IS_APPLE_TERMINAL ? 1000 : 500;\n\nconst START_MS = Date.now();\nconst ARGS = process.argv.slice(2);\nconst PGID = Number(process.env.DROID_STATUSLINE_PGID || '');\nconst SESSION_ID_RE = /\"sessionId\":\"([0-9a-f-]{36})\"/i;\n\nfunction sleep(ms) {\n return new Promise((r) => setTimeout(r, ms));\n}\n\nfunction isPositiveInt(n) {\n return Number.isFinite(n) && n > 0;\n}\n\nfunction extractSessionIdFromLine(line) {\n if (!line) return null;\n const m = String(line).match(SESSION_ID_RE);\n return m ? m[1] : null;\n}\n\nfunction parseLineTimestampMs(line) {\n const s = String(line || '');\n if (!s || s[0] !== '[') return null;\n const end = s.indexOf(']');\n if (end <= 1) return null;\n const raw = s.slice(1, end);\n const ms = Date.parse(raw);\n return Number.isFinite(ms) ? ms : null;\n}\n\nfunction safeStatMtimeMs(p) {\n try {\n const stat = fs.statSync(p);\n const ms = Number(stat?.mtimeMs ?? 0);\n return Number.isFinite(ms) ? ms : 0;\n } catch {\n return 0;\n }\n}\n\nfunction nextCompactionState(line, current) {\n if (!line) return current;\n if (line.includes('[Compaction] Start')) return true;\n const endMarkers = ['End', 'Done', 'Finish', 'Finished', 'Complete', 'Completed'];\n if (endMarkers.some(m => line.includes('[Compaction] ' + m))) return false;\n return current;\n}\n\nfunction firstNonNull(promises) {\n const list = Array.isArray(promises) ? promises : [];\n if (list.length === 0) return Promise.resolve(null);\n return new Promise((resolve) => {\n let pending = list.length;\n let done = false;\n for (const p of list) {\n Promise.resolve(p)\n .then((value) => {\n if (done) return;\n if (value) {\n done = true;\n resolve(value);\n return;\n }\n pending -= 1;\n if (pending <= 0) resolve(null);\n })\n .catch(() => {\n if (done) return;\n pending -= 1;\n if (pending <= 0) resolve(null);\n });\n }\n });\n}\n\nfunction listPidsInProcessGroup(pgid) {\n if (!isPositiveInt(pgid)) return [];\n try {\n const res = spawnSync('ps', ['-ax', '-o', 'pid=,pgid='], {\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 800,\n });\n if (!res || res.status !== 0) return [];\n const out = String(res.stdout || '');\n const pids = [];\n for (const line of out.split('\\\\n')) {\n const parts = line.trim().split(/\\\\s+/);\n if (parts.length < 2) continue;\n const pid = Number(parts[0]);\n const g = Number(parts[1]);\n if (Number.isFinite(pid) && g === pgid) pids.push(pid);\n }\n return pids;\n } catch {\n return [];\n }\n}\n\nfunction resolveOpenSessionFromPids(pids) {\n if (!Array.isArray(pids) || pids.length === 0) return null;\n // lsof prints file names as lines prefixed with \"n\" when using -Fn\n try {\n const res = spawnSync('lsof', ['-p', pids.join(','), '-Fn'], {\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 1200,\n });\n if (!res || res.status !== 0) return null;\n const out = String(res.stdout || '');\n for (const line of out.split('\\\\n')) {\n if (!line || line[0] !== 'n') continue;\n const name = line.slice(1);\n if (!name.startsWith(SESSIONS_ROOT + path.sep)) continue;\n const m = name.match(/([0-9a-f-]{36})\\\\.(jsonl|settings\\\\.json)$/i);\n if (!m) continue;\n const id = m[1];\n const workspaceDir = path.dirname(name);\n if (path.dirname(workspaceDir) !== SESSIONS_ROOT) continue;\n return { workspaceDir, id };\n }\n } catch {\n return null;\n }\n return null;\n}\n\nasync function resolveSessionFromProcessGroup(shouldAbort, maxTries = 20) {\n if (!isPositiveInt(PGID)) return null;\n // Wait a little for droid to create/open the session files.\n for (let i = 0; i < maxTries; i++) {\n if (shouldAbort && shouldAbort()) return null;\n const pids = listPidsInProcessGroup(PGID);\n const found = resolveOpenSessionFromPids(pids);\n if (found) return found;\n await sleep(100);\n }\n return null;\n}\n\nfunction safeReadFile(filePath) {\n try {\n return fs.readFileSync(filePath, 'utf8');\n } catch {\n return null;\n }\n}\n\nfunction safeJsonParse(text) {\n if (!text) return null;\n try {\n // Factory settings/config files can contain comments. Strip them safely without\n // breaking URLs like \"http://...\" which contain \"//\" inside strings.\n const stripComments = (input) => {\n let out = '';\n let inString = false;\n let escape = false;\n for (let i = 0; i < input.length; i++) {\n const ch = input[i];\n const next = input[i + 1];\n\n if (inString) {\n out += ch;\n if (escape) {\n escape = false;\n continue;\n }\n if (ch === '\\\\\\\\') {\n escape = true;\n continue;\n }\n if (ch === '\"') {\n inString = false;\n }\n continue;\n }\n\n if (ch === '\"') {\n inString = true;\n out += ch;\n continue;\n }\n\n // Line comment\n if (ch === '/' && next === '/') {\n while (i < input.length && input[i] !== '\\\\n') i++;\n out += '\\\\n';\n continue;\n }\n\n // Block comment\n if (ch === '/' && next === '*') {\n i += 2;\n while (i < input.length && !(input[i] === '*' && input[i + 1] === '/')) i++;\n i += 1;\n continue;\n }\n\n out += ch;\n }\n return out;\n };\n\n return JSON.parse(stripComments(text));\n } catch {\n return null;\n }\n}\n\nfunction readJsonFile(filePath) {\n return safeJsonParse(safeReadFile(filePath));\n}\n\nfunction isUuid(text) {\n return /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(text);\n}\n\nfunction parseResume(args) {\n for (let i = 0; i < args.length; i++) {\n const a = args[i];\n if (a === '-r' || a === '--resume') {\n const next = args[i + 1];\n if (next && isUuid(next)) return { resumeFlag: true, resumeId: next };\n return { resumeFlag: true, resumeId: null };\n }\n if (a.startsWith('--resume=')) {\n const value = a.slice('--resume='.length);\n return { resumeFlag: true, resumeId: isUuid(value) ? value : null };\n }\n }\n return { resumeFlag: false, resumeId: null };\n}\n\nfunction sanitizeWorkspaceDirName(cwd) {\n return String(cwd)\n .replace(/[:]/g, '')\n .replace(/[\\\\\\\\/]/g, '-')\n .replace(/\\\\s+/g, '-');\n}\n\nfunction listSessionCandidates(workspaceDir) {\n let files = [];\n try {\n files = fs.readdirSync(workspaceDir);\n } catch {\n return [];\n }\n const candidates = [];\n for (const file of files) {\n const m = file.match(/^([0-9a-f-]{36})\\\\.(jsonl|settings\\\\.json)$/i);\n if (!m) continue;\n const id = m[1];\n const fullPath = path.join(workspaceDir, file);\n try {\n const stat = fs.statSync(fullPath);\n candidates.push({ id, fullPath, mtimeMs: stat.mtimeMs });\n } catch {\n // ignore\n }\n }\n return candidates;\n}\n\nfunction findWorkspaceDirForSessionId(workspaceDirs, sessionId) {\n for (const dir of workspaceDirs) {\n try {\n const settingsPath = path.join(dir, sessionId + '.settings.json');\n if (fs.existsSync(settingsPath)) return dir;\n } catch {\n // ignore\n }\n }\n return null;\n}\n\nfunction pickLatestSessionAcross(workspaceDirs) {\n let best = null;\n for (const dir of workspaceDirs) {\n const candidates = listSessionCandidates(dir);\n for (const c of candidates) {\n if (!best || c.mtimeMs > best.mtimeMs) {\n best = { workspaceDir: dir, id: c.id, mtimeMs: c.mtimeMs };\n }\n }\n }\n return best ? { workspaceDir: best.workspaceDir, id: best.id } : null;\n}\n\nasync function waitForNewSessionAcross(workspaceDirs, knownIdsByWorkspace, startMs, shouldAbort) {\n for (let i = 0; i < 80; i++) {\n if (shouldAbort && shouldAbort()) return null;\n let best = null;\n for (const dir of workspaceDirs) {\n const known = knownIdsByWorkspace.get(dir) || new Set();\n const candidates = listSessionCandidates(dir);\n for (const c of candidates) {\n if (!(c.mtimeMs >= startMs - 50 || !known.has(c.id))) continue;\n if (!best || c.mtimeMs > best.mtimeMs) {\n best = { workspaceDir: dir, id: c.id, mtimeMs: c.mtimeMs };\n }\n }\n }\n if (best?.id) return { workspaceDir: best.workspaceDir, id: best.id };\n await sleep(100);\n }\n return null;\n}\n\nfunction safeRealpath(p) {\n try {\n return fs.realpathSync(p);\n } catch {\n return null;\n }\n}\n\nfunction resolveWorkspaceDirs(cwd) {\n const logical = cwd;\n const real = safeRealpath(cwd);\n const dirs = [];\n for (const value of [logical, real]) {\n if (!value || typeof value !== 'string') continue;\n dirs.push(path.join(SESSIONS_ROOT, sanitizeWorkspaceDirName(value)));\n }\n return Array.from(new Set(dirs));\n}\n\nfunction resolveSessionSettings(workspaceDir, sessionId) {\n const settingsPath = path.join(workspaceDir, sessionId + '.settings.json');\n const settings = readJsonFile(settingsPath) || {};\n return { settingsPath, settings };\n}\n\nfunction resolveGlobalSettingsModel() {\n const global = readJsonFile(GLOBAL_SETTINGS_PATH);\n return global && typeof global.model === 'string' ? global.model : null;\n}\n\nfunction resolveCustomModelIndex(modelId) {\n if (typeof modelId !== 'string') return null;\n if (!modelId.startsWith('custom:')) return null;\n const m = modelId.match(/-(\\\\d+)$/);\n if (!m) return null;\n const idx = Number(m[1]);\n return Number.isFinite(idx) ? idx : null;\n}\n\nfunction resolveUnderlyingModelId(modelId, factoryConfig) {\n const idx = resolveCustomModelIndex(modelId);\n if (idx == null) return modelId;\n const entry = factoryConfig?.custom_models?.[idx];\n if (entry && typeof entry.model === 'string') return entry.model;\n return modelId;\n}\n\nfunction resolveProvider(modelId, factoryConfig) {\n const idx = resolveCustomModelIndex(modelId);\n if (idx != null) {\n const entry = factoryConfig?.custom_models?.[idx];\n if (entry && typeof entry.provider === 'string') return entry.provider;\n }\n if (typeof modelId === 'string' && modelId.startsWith('claude-')) return 'anthropic';\n return '';\n}\n\nfunction formatInt(n) {\n if (!Number.isFinite(n)) return '0';\n return Math.round(n).toString();\n}\n\nfunction formatTokens(n) {\n if (!Number.isFinite(n)) return '0';\n const sign = n < 0 ? '-' : '';\n const abs = Math.abs(n);\n if (abs >= 1_000_000) {\n const v = abs / 1_000_000;\n const s = v >= 10 ? v.toFixed(0) : v.toFixed(1);\n return sign + s.replace(/\\\\.0$/, '') + 'M';\n }\n if (abs >= 10_000) {\n const v = abs / 1_000;\n const s = v >= 100 ? v.toFixed(0) : v.toFixed(1);\n return sign + s.replace(/\\\\.0$/, '') + 'k';\n }\n return sign + Math.round(abs).toString();\n}\n\nfunction emitFrame(line) {\n try {\n process.stdout.write(String(line || '') + '\\\\n');\n } catch {\n // ignore\n }\n}\n\nfunction seg(bg, fg, text) {\n if (!text) return '';\n return '\\\\x1b[48;5;' + bg + 'm' + '\\\\x1b[38;5;' + fg + 'm' + ' ' + text + ' ' + '\\\\x1b[0m';\n}\n\nfunction resolveGitBranch(cwd) {\n try {\n const res = spawnSync('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {\n cwd,\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 800,\n });\n if (res && res.status === 0) {\n const branch = String(res.stdout || '').trim();\n if (branch && branch !== 'HEAD') return branch;\n }\n } catch {}\n try {\n const headPath = path.join(cwd, '.git', 'HEAD');\n const head = safeReadFile(headPath);\n if (head && head.startsWith('ref: ')) {\n const ref = head.slice('ref: '.length).trim();\n const m = ref.match(/refs\\\\/heads\\\\/(.+)$/);\n if (m) return m[1];\n }\n } catch {}\n return '';\n}\n\nfunction resolveGitDiffSummary(cwd) {\n try {\n const res = spawnSync('git', ['diff', '--shortstat'], {\n cwd,\n encoding: 'utf8',\n stdio: ['ignore', 'pipe', 'ignore'],\n timeout: 800,\n });\n if (!res || res.status !== 0) return '';\n const text = String(res.stdout || '').trim();\n if (!text) return '';\n const ins = (text.match(/(\\\\d+)\\\\sinsertions?\\\\(\\\\+\\\\)/) || [])[1];\n const del = (text.match(/(\\\\d+)\\\\sdeletions?\\\\(-\\\\)/) || [])[1];\n const i = ins ? Number(ins) : 0;\n const d = del ? Number(del) : 0;\n if (!Number.isFinite(i) && !Number.isFinite(d)) return '';\n if (i === 0 && d === 0) return '';\n return '(+' + formatInt(i) + ',-' + formatInt(d) + ')';\n } catch {\n return '';\n }\n}\n\nfunction buildLine(params) {\n const {\n provider,\n model,\n cwdBase,\n gitBranch,\n gitDiff,\n usedTokens,\n cacheRead,\n deltaInput,\n lastOutputTokens,\n sessionUsage,\n compacting,\n } = params;\n\n let ctxPart = 'Ctx: ' + formatTokens(usedTokens);\n\n const cachePart =\n cacheRead > 0 || deltaInput > 0\n ? ' c' + formatTokens(cacheRead) + '+n' + formatTokens(deltaInput)\n : '';\n\n const compactPart = compacting ? ' COMPACT' : '';\n\n const usagePart = (() => {\n const u = sessionUsage || {};\n const input = Number(u.inputTokens ?? 0);\n const output = Number(u.outputTokens ?? 0);\n const cacheCreation = Number(u.cacheCreationTokens ?? 0);\n const cacheReadTotal = Number(u.cacheReadTokens ?? 0);\n const thinking = Number(u.thinkingTokens ?? 0);\n if (!(input || output || cacheCreation || cacheReadTotal || thinking)) return '';\n const parts = [];\n if (input) parts.push('In:' + formatTokens(input));\n if (output) parts.push('Out:' + formatTokens(output));\n if (cacheCreation) parts.push('Cre:' + formatTokens(cacheCreation));\n if (cacheReadTotal) parts.push('Read:' + formatTokens(cacheReadTotal));\n if (thinking) parts.push('Think:' + formatTokens(thinking));\n if (lastOutputTokens > 0) parts.push('LastOut:' + formatTokens(lastOutputTokens));\n return parts.join(' ');\n })();\n\n const modelPart = model ? 'Model: ' + model : '';\n const providerPart = provider ? 'Prov: ' + provider : '';\n const cwdPart = cwdBase ? 'cwd: ' + cwdBase : '';\n const branchPart = gitBranch ? '\\\\uE0A0 ' + gitBranch : '';\n const diffPart = gitDiff || '';\n\n // Background segments (powerline-like blocks)\n const sModel = seg(88, 15, modelPart); // dark red\n const sProvider = seg(160, 15, providerPart); // red\n const sCtx = seg(220, 0, ctxPart + (cachePart ? ' (' + cachePart.trim() + ')' : '')); // yellow\n const sUsage = seg(173, 0, usagePart); // orange-ish\n const sBranch = seg(24, 15, branchPart); // blue\n const sDiff = seg(34, 0, diffPart); // green\n const sCwd = seg(238, 15, cwdPart); // gray\n const sExtra = seg(99, 15, compactPart.trim()); // purple-ish\n\n return [sModel, sProvider, sCtx, sUsage, sBranch, sDiff, sCwd, sExtra].filter(Boolean).join('');\n}\n\nasync function main() {\n let factoryConfig = readJsonFile(CONFIG_PATH) || {};\n\n const cwd = process.cwd();\n const cwdBase = path.basename(cwd) || cwd;\n const workspaceDirs = resolveWorkspaceDirs(cwd);\n const knownIdsByWorkspace = new Map();\n for (const dir of workspaceDirs) {\n const set = new Set();\n for (const c of listSessionCandidates(dir)) set.add(c.id);\n knownIdsByWorkspace.set(dir, set);\n }\n\n const { resumeFlag, resumeId } = parseResume(ARGS);\n\n let sessionId = null;\n let workspaceDir = null;\n if (resumeId) {\n sessionId = resumeId;\n workspaceDir = findWorkspaceDirForSessionId(workspaceDirs, sessionId) || workspaceDirs[0] || null;\n } else {\n let abortResolve = false;\n const shouldAbort = () => abortResolve;\n\n const byProcPromise = resolveSessionFromProcessGroup(shouldAbort, 20);\n\n let picked = null;\n if (resumeFlag) {\n // For --resume without an explicit id, don't block startup too long on ps/lsof.\n // Prefer process-group resolution when it is fast; otherwise fall back to latest.\n picked = await Promise.race([\n byProcPromise,\n sleep(400).then(() => null),\n ]);\n if (!picked) picked = pickLatestSessionAcross(workspaceDirs);\n } else {\n const freshPromise = waitForNewSessionAcross(workspaceDirs, knownIdsByWorkspace, START_MS, shouldAbort);\n picked = await firstNonNull([byProcPromise, freshPromise]);\n if (!picked) picked = pickLatestSessionAcross(workspaceDirs);\n }\n\n abortResolve = true;\n\n sessionId = picked?.id || null;\n workspaceDir = picked?.workspaceDir || workspaceDirs[0] || null;\n }\n\n if (!sessionId || !workspaceDir) return;\n let sessionIdLower = String(sessionId).toLowerCase();\n\n let settingsPath = '';\n let sessionSettings = {};\n ({ settingsPath, settings: sessionSettings } = resolveSessionSettings(workspaceDir, sessionId));\n\n let configMtimeMs = safeStatMtimeMs(CONFIG_PATH);\n let globalSettingsMtimeMs = safeStatMtimeMs(GLOBAL_SETTINGS_PATH);\n let globalSettingsModel = resolveGlobalSettingsModel();\n\n let modelId =\n (sessionSettings && typeof sessionSettings.model === 'string' ? sessionSettings.model : null) ||\n globalSettingsModel ||\n null;\n\n let provider =\n sessionSettings && typeof sessionSettings.providerLock === 'string'\n ? sessionSettings.providerLock\n : resolveProvider(modelId, factoryConfig);\n let underlyingModel = resolveUnderlyingModelId(modelId, factoryConfig) || modelId || 'unknown';\n\n function refreshModel() {\n const nextModelId =\n (sessionSettings && typeof sessionSettings.model === 'string' ? sessionSettings.model : null) ||\n globalSettingsModel ||\n null;\n\n // Use providerLock if set, otherwise resolve from model/config (same logic as initialization)\n const nextProvider =\n sessionSettings && typeof sessionSettings.providerLock === 'string'\n ? sessionSettings.providerLock\n : resolveProvider(nextModelId, factoryConfig);\n const nextUnderlying = resolveUnderlyingModelId(nextModelId, factoryConfig) || nextModelId || 'unknown';\n\n let changed = false;\n if (nextModelId !== modelId) {\n modelId = nextModelId;\n changed = true;\n }\n if (nextProvider !== provider) {\n provider = nextProvider;\n changed = true;\n }\n if (nextUnderlying !== underlyingModel) {\n underlyingModel = nextUnderlying;\n changed = true;\n }\n\n if (changed) renderNow();\n }\n\n let last = { cacheReadInputTokens: 0, contextCount: 0, outputTokens: 0 };\n let sessionUsage =\n sessionSettings && typeof sessionSettings.tokenUsage === 'object' && sessionSettings.tokenUsage\n ? sessionSettings.tokenUsage\n : {};\n let compacting = false;\n let lastRenderAt = 0;\n let lastRenderedLine = '';\n let gitBranch = '';\n let gitDiff = '';\n let lastContextMs = 0;\n\n function renderNow() {\n const usedTokens = (last.cacheReadInputTokens || 0) + (last.contextCount || 0);\n const line = buildLine({\n provider,\n model: underlyingModel,\n cwdBase,\n gitBranch,\n gitDiff,\n usedTokens,\n cacheRead: last.cacheReadInputTokens || 0,\n deltaInput: last.contextCount || 0,\n lastOutputTokens: last.outputTokens || 0,\n sessionUsage,\n compacting,\n });\n if (line !== lastRenderedLine) {\n lastRenderedLine = line;\n emitFrame(line);\n }\n }\n\n // Initial render.\n renderNow();\n\n // Resolve git info asynchronously so startup isn't blocked on large repos.\n setTimeout(() => {\n try {\n gitBranch = resolveGitBranch(cwd);\n gitDiff = resolveGitDiffSummary(cwd);\n renderNow();\n } catch {}\n }, 0).unref();\n\n let reseedInProgress = false;\n let reseedQueued = false;\n\n function updateLastFromContext(ctx, updateOutputTokens, tsMs) {\n const ts = Number.isFinite(tsMs) ? tsMs : null;\n if (ts != null && lastContextMs && ts < lastContextMs) return false;\n const cacheRead = Number(ctx?.cacheReadInputTokens);\n const contextCount = Number(ctx?.contextCount);\n const out = Number(ctx?.outputTokens);\n if (Number.isFinite(cacheRead)) last.cacheReadInputTokens = cacheRead;\n if (Number.isFinite(contextCount)) last.contextCount = contextCount;\n if (updateOutputTokens && Number.isFinite(out)) last.outputTokens = out;\n if (ts != null) lastContextMs = ts;\n return true;\n }\n\n function seedLastContextFromLog(options) {\n const opts = options || {};\n const maxScanBytes = Number.isFinite(opts.maxScanBytes) ? opts.maxScanBytes : 64 * 1024 * 1024;\n const preferStreaming = !!opts.preferStreaming;\n const minTimestampMs = Number.isFinite(lastContextMs) && lastContextMs > 0 ? lastContextMs : 0;\n const earlyStopAfterBestBytes = Math.min(2 * 1024 * 1024, Math.max(256 * 1024, maxScanBytes));\n\n if (reseedInProgress) {\n reseedQueued = true;\n return;\n }\n reseedInProgress = true;\n\n setTimeout(() => {\n try {\n // Backward scan to find the most recent context entry for this session.\n // Prefer streaming context if requested; otherwise accept any context line\n // that includes cacheReadInputTokens/contextCount fields.\n const CHUNK_BYTES = 1024 * 1024; // 1 MiB\n\n const fd = fs.openSync(LOG_PATH, 'r');\n try {\n const stat = fs.fstatSync(fd);\n const size = Number(stat?.size ?? 0);\n let pos = size;\n let scanned = 0;\n let remainder = '';\n let bestCtx = null;\n let bestIsStreaming = false;\n let bestTs = null;\n let bestHasTs = false;\n let bytesSinceBest = 0;\n\n while (pos > 0 && scanned < maxScanBytes && (!bestHasTs || bytesSinceBest < earlyStopAfterBestBytes)) {\n const readSize = Math.min(CHUNK_BYTES, pos);\n const start = pos - readSize;\n const buf = Buffer.alloc(readSize);\n fs.readSync(fd, buf, 0, readSize, start);\n pos = start;\n scanned += readSize;\n bytesSinceBest += readSize;\n\n let text = buf.toString('utf8') + remainder;\n let lines = String(text).split('\\\\n');\n remainder = lines.shift() || '';\n if (pos === 0 && remainder) {\n lines.unshift(remainder);\n remainder = '';\n }\n\n for (let i = lines.length - 1; i >= 0; i--) {\n const line = String(lines[i] || '').trimEnd();\n if (!line) continue;\n if (!line.includes('Context:')) continue;\n const sid = extractSessionIdFromLine(line);\n if (!sid || String(sid).toLowerCase() !== sessionIdLower) continue;\n\n const isStreaming = line.includes('[Agent] Streaming result');\n if (preferStreaming && !isStreaming) continue;\n\n const ctxIndex = line.indexOf('Context: ');\n if (ctxIndex === -1) continue;\n const jsonStr = line.slice(ctxIndex + 'Context: '.length).trim();\n let ctx;\n try {\n ctx = JSON.parse(jsonStr);\n } catch {\n continue;\n }\n\n const cacheRead = Number(ctx?.cacheReadInputTokens);\n const contextCount = Number(ctx?.contextCount);\n const hasUsage = Number.isFinite(cacheRead) || Number.isFinite(contextCount);\n if (!hasUsage) continue;\n\n const ts = parseLineTimestampMs(line);\n if (ts != null && minTimestampMs && ts < minTimestampMs) {\n continue;\n }\n\n if (ts != null) {\n if (!bestHasTs || ts > bestTs) {\n bestCtx = ctx;\n bestIsStreaming = isStreaming;\n bestTs = ts;\n bestHasTs = true;\n bytesSinceBest = 0;\n }\n } else if (!bestHasTs && !bestCtx) {\n // No timestamps available yet: the first match when scanning backward\n // is the most recent in file order.\n bestCtx = ctx;\n bestIsStreaming = isStreaming;\n bestTs = null;\n }\n }\n\n if (remainder.length > 8192) remainder = remainder.slice(-8192);\n }\n\n if (bestCtx) {\n updateLastFromContext(bestCtx, bestIsStreaming, bestTs);\n }\n } finally {\n try {\n fs.closeSync(fd);\n } catch {}\n }\n } catch {\n // ignore\n } finally {\n reseedInProgress = false;\n if (reseedQueued) {\n reseedQueued = false;\n seedLastContextFromLog({ maxScanBytes, preferStreaming });\n return;\n }\n renderNow();\n }\n }, 0).unref();\n }\n\n // Seed prompt-context usage from existing logs (important for resumed sessions).\n // Do this asynchronously to avoid delaying the first statusline frame.\n let initialSeedDone = false;\n if (resumeFlag || resumeId) {\n initialSeedDone = true;\n seedLastContextFromLog({ maxScanBytes: 64 * 1024 * 1024, preferStreaming: true });\n }\n\n // Watch session settings for autonomy/reasoning changes (cheap polling with mtime).\n let settingsMtimeMs = 0;\n let lastCtxPollMs = 0;\n setInterval(() => {\n // Refresh config/global settings if they changed (model display depends on these).\n const configMtime = safeStatMtimeMs(CONFIG_PATH);\n if (configMtime && configMtime !== configMtimeMs) {\n configMtimeMs = configMtime;\n factoryConfig = readJsonFile(CONFIG_PATH) || {};\n refreshModel();\n }\n\n const globalMtime = safeStatMtimeMs(GLOBAL_SETTINGS_PATH);\n if (globalMtime && globalMtime !== globalSettingsMtimeMs) {\n globalSettingsMtimeMs = globalMtime;\n globalSettingsModel = resolveGlobalSettingsModel();\n refreshModel();\n }\n\n try {\n const stat = fs.statSync(settingsPath);\n if (stat.mtimeMs === settingsMtimeMs) return;\n settingsMtimeMs = stat.mtimeMs;\n const next = readJsonFile(settingsPath) || {};\n sessionSettings = next;\n\n // Keep session token usage in sync (used by /status).\n if (next && typeof next.tokenUsage === 'object' && next.tokenUsage) {\n sessionUsage = next.tokenUsage;\n }\n\n // Keep model/provider in sync (model can change during a running session).\n refreshModel();\n\n const now = Date.now();\n if (now - lastRenderAt >= MIN_RENDER_INTERVAL_MS) {\n lastRenderAt = now;\n renderNow();\n }\n } catch {\n // ignore\n }\n }, 750).unref();\n\n // Fallback: periodically rescan log if context is still zero after startup.\n // This handles cases where tail misses early log entries.\n setInterval(() => {\n const now = Date.now();\n if (now - START_MS < 3000) return; // wait 3s after startup\n if (last.contextCount > 0 || last.cacheReadInputTokens > 0) return; // already have data\n if (now - lastCtxPollMs < 5000) return; // throttle to every 5s\n lastCtxPollMs = now;\n seedLastContextFromLog({ maxScanBytes: 4 * 1024 * 1024, preferStreaming: false });\n }, 2000).unref();\n\n function switchToSession(nextSessionId) {\n if (!nextSessionId || !isUuid(nextSessionId)) return;\n const nextLower = String(nextSessionId).toLowerCase();\n if (nextLower === sessionIdLower) return;\n\n sessionId = nextSessionId;\n sessionIdLower = nextLower;\n\n const resolved = resolveSessionSettings(workspaceDir, nextSessionId);\n settingsPath = resolved.settingsPath;\n sessionSettings = resolved.settings || {};\n\n sessionUsage =\n sessionSettings && typeof sessionSettings.tokenUsage === 'object' && sessionSettings.tokenUsage\n ? sessionSettings.tokenUsage\n : {};\n\n // Reset cached state for the new session.\n last = { cacheReadInputTokens: 0, contextCount: 0, outputTokens: 0 };\n lastContextMs = 0;\n compacting = false;\n settingsMtimeMs = 0;\n lastCtxPollMs = 0;\n\n refreshModel();\n renderNow();\n\n // Best-effort: if the new session already has Context lines in the log, seed quickly.\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }\n\n // Follow the Factory log and update based on session-scoped events.\n const tail = spawn('tail', ['-n', '0', '-F', LOG_PATH], {\n stdio: ['ignore', 'pipe', 'ignore'],\n });\n\n let buffer = '';\n tail.stdout.on('data', (chunk) => {\n buffer += String(chunk);\n while (true) {\n const idx = buffer.indexOf('\\\\n');\n if (idx === -1) break;\n const line = buffer.slice(0, idx).trimEnd();\n buffer = buffer.slice(idx + 1);\n\n const tsMs = parseLineTimestampMs(line);\n const lineSessionId = extractSessionIdFromLine(line);\n const isSessionLine =\n lineSessionId && String(lineSessionId).toLowerCase() === sessionIdLower;\n\n // /compress (aka /compact) can create a new session ID. Follow it so ctx/model keep updating.\n if (line.includes('oldSessionId') && line.includes('newSessionId') && line.includes('Context:')) {\n const ctxIndex = line.indexOf('Context: ');\n if (ctxIndex !== -1) {\n const jsonStr = line.slice(ctxIndex + 'Context: '.length).trim();\n try {\n const meta = JSON.parse(jsonStr);\n const oldId = meta?.oldSessionId;\n const newId = meta?.newSessionId;\n if (\n isUuid(oldId) &&\n isUuid(newId) &&\n String(oldId).toLowerCase() === sessionIdLower &&\n String(newId).toLowerCase() !== sessionIdLower\n ) {\n switchToSession(String(newId));\n continue;\n }\n } catch {\n // ignore\n }\n }\n }\n\n let compactionChanged = false;\n let compactionEnded = false;\n if (line.includes('[Compaction]')) {\n // Accept session-scoped compaction lines; allow end markers to clear even\n // if the line lacks a session id (some builds omit Context on end lines).\n if (isSessionLine || (compacting && !lineSessionId)) {\n const next = nextCompactionState(line, compacting);\n if (next !== compacting) {\n compacting = next;\n compactionChanged = true;\n if (!compacting) compactionEnded = true;\n }\n }\n }\n\n if (compactionChanged && compacting) {\n // Compaction can start after a context-limit error. Ensure we display the latest\n // pre-compaction ctx by reseeding from log (tail can miss bursts).\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: true });\n }\n\n if (compactionEnded) {\n // ctx usage changes dramatically after compaction, but the next Context line\n // can be delayed. Clear displayed ctx immediately to avoid showing stale numbers.\n last.cacheReadInputTokens = 0;\n last.contextCount = 0;\n if (tsMs != null) lastContextMs = tsMs;\n }\n\n if (!line.includes('Context:')) {\n if (compactionChanged) {\n lastRenderAt = Date.now();\n renderNow();\n }\n if (compactionEnded) {\n // Compaction often completes between turns. Refresh ctx numbers promptly\n // by rescanning the most recent Context entry for this session.\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 250).unref();\n }\n continue;\n }\n if (!isSessionLine) {\n if (compactionChanged) {\n lastRenderAt = Date.now();\n renderNow();\n }\n if (compactionEnded) {\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 250).unref();\n }\n continue;\n }\n\n const ctxIndex = line.indexOf('Context: ');\n if (ctxIndex === -1) continue;\n const jsonStr = line.slice(ctxIndex + 'Context: '.length).trim();\n let ctx;\n try {\n ctx = JSON.parse(jsonStr);\n } catch {\n if (compactionChanged) {\n lastRenderAt = Date.now();\n renderNow();\n }\n continue;\n }\n\n // Context usage can appear on multiple session-scoped log lines; update whenever present.\n // (Streaming is still the best source for outputTokens / LastOut.)\n updateLastFromContext(ctx, false, tsMs);\n\n // For new sessions: if this is the first valid Context line and ctx is still 0,\n // trigger a reseed to catch any earlier log entries we might have missed.\n if (!initialSeedDone && last.contextCount === 0) {\n initialSeedDone = true;\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 100).unref();\n }\n\n if (line.includes('[Agent] Streaming result')) {\n updateLastFromContext(ctx, true, tsMs);\n }\n\n const now = Date.now();\n if (compactionChanged || now - lastRenderAt >= MIN_RENDER_INTERVAL_MS) {\n lastRenderAt = now;\n renderNow();\n }\n\n if (compactionEnded) {\n setTimeout(() => {\n seedLastContextFromLog({ maxScanBytes: 8 * 1024 * 1024, preferStreaming: false });\n }, 250).unref();\n }\n }\n });\n\n const stop = () => {\n try { tail.kill('SIGTERM'); } catch {}\n process.exit(0);\n };\n\n process.on('SIGTERM', stop);\n process.on('SIGINT', stop);\n process.on('SIGHUP', stop);\n}\n\nmain().catch(() => {});\n`;\n}\n\nfunction generateStatuslineWrapperScript(\n execTargetPath: string,\n monitorScriptPath: string,\n sessionsScriptPath?: string,\n): string {\n return generateStatuslineWrapperScriptBun(execTargetPath, monitorScriptPath, sessionsScriptPath);\n}\n\nfunction generateStatuslineWrapperScriptBun(\n execTargetPath: string,\n monitorScriptPath: string,\n sessionsScriptPath?: string,\n): string {\n const execTargetJson = JSON.stringify(execTargetPath);\n const monitorScriptJson = JSON.stringify(monitorScriptPath);\n const sessionsScriptJson = sessionsScriptPath ? JSON.stringify(sessionsScriptPath) : \"null\";\n\n // Notes:\n // - Requires Bun >= 1.3.5 (Bun.Terminal via Bun.spawn({ terminal }))\n // - Keep dependencies zero; this file is written as a standalone executable.\n return `#!/usr/bin/env bun\n// Droid with Statusline (Bun PTY proxy)\n// Auto-generated by droid-patch --statusline\n\nconst EXEC_TARGET = ${execTargetJson};\nconst STATUSLINE_MONITOR = ${monitorScriptJson};\nconst SESSIONS_SCRIPT = ${sessionsScriptJson};\n\nconst IS_APPLE_TERMINAL = process.env.TERM_PROGRAM === \"Apple_Terminal\";\nconst MIN_RENDER_INTERVAL_MS = IS_APPLE_TERMINAL ? 800 : 400;\nconst QUIET_MS = 50;\nconst FORCE_REPAINT_INTERVAL_MS = 2000;\nconst RESERVED_ROWS = 1;\n\nconst BYPASS_FLAGS = new Set([\"--help\", \"-h\", \"--version\", \"-V\"]);\nconst BYPASS_COMMANDS = new Set([\"help\", \"version\", \"completion\", \"completions\", \"exec\"]);\n\nfunction shouldPassthrough(argv) {\n for (const a of argv) {\n if (a === \"--\") break;\n if (BYPASS_FLAGS.has(a)) return true;\n }\n let endOpts = false;\n let cmd = null;\n for (const a of argv) {\n if (a === \"--\") {\n endOpts = true;\n continue;\n }\n if (!endOpts && a.startsWith(\"-\")) continue;\n cmd = a;\n break;\n }\n return cmd && BYPASS_COMMANDS.has(cmd);\n}\n\nfunction isSessionsCommand(argv) {\n for (const a of argv) {\n if (a === \"--\") return false;\n if (a === \"--sessions\") return true;\n }\n return false;\n}\n\nasync function execPassthrough(argv) {\n const proc = Bun.spawn([EXEC_TARGET, ...argv], {\n stdin: \"inherit\",\n stdout: \"inherit\",\n stderr: \"inherit\",\n });\n const code = await proc.exited;\n process.exit(code ?? 0);\n}\n\nasync function runSessions() {\n if (SESSIONS_SCRIPT) {\n const proc = Bun.spawn([\"node\", String(SESSIONS_SCRIPT)], {\n stdin: \"inherit\",\n stdout: \"inherit\",\n stderr: \"inherit\",\n });\n const code = await proc.exited;\n process.exit(code ?? 0);\n }\n process.stderr.write(\"[statusline] sessions script not found\\\\n\");\n process.exit(1);\n}\n\nfunction writeStdout(s) {\n try {\n process.stdout.write(s);\n } catch {\n // ignore\n }\n}\n\nfunction termSize() {\n const rows = Number(process.stdout.rows || 24);\n const cols = Number(process.stdout.columns || 80);\n return { rows: Number.isFinite(rows) ? rows : 24, cols: Number.isFinite(cols) ? cols : 80 };\n}\n\nconst ANSI_RE = /\\\\x1b\\\\[[0-9;]*m/g;\nconst RESET_SGR = \"\\\\x1b[0m\";\n\nfunction visibleWidth(text) {\n return String(text || \"\").replace(ANSI_RE, \"\").length;\n}\n\nfunction clampAnsi(text, cols) {\n if (!cols || cols <= 0) return String(text || \"\");\n cols = cols > 1 ? cols - 1 : cols; // avoid last-column wrap\n if (cols < 10) return String(text || \"\");\n const s = String(text || \"\");\n let visible = 0;\n let i = 0;\n const out = [];\n while (i < s.length) {\n const ch = s[i];\n if (ch === \"\\\\x1b\") {\n const m = s.indexOf(\"m\", i);\n if (m !== -1) {\n out.push(s.slice(i, m + 1));\n i = m + 1;\n continue;\n }\n out.push(ch);\n i += 1;\n continue;\n }\n if (visible >= cols) break;\n out.push(ch);\n i += 1;\n visible += 1;\n }\n if (i < s.length && cols >= 1) {\n if (visible >= cols) {\n if (out.length) out[out.length - 1] = \"…\";\n else out.push(\"…\");\n } else {\n out.push(\"…\");\n }\n out.push(RESET_SGR);\n }\n return out.join(\"\");\n}\n\nfunction splitSegments(text) {\n if (!text) return [];\n const s = String(text);\n const segments = [];\n let start = 0;\n while (true) {\n const idx = s.indexOf(RESET_SGR, start);\n if (idx === -1) {\n const tail = s.slice(start);\n if (tail) segments.push(tail);\n break;\n }\n const seg = s.slice(start, idx + RESET_SGR.length);\n if (seg) segments.push(seg);\n start = idx + RESET_SGR.length;\n }\n return segments;\n}\n\nfunction wrapSegments(segments, cols) {\n if (!segments || segments.length === 0) return [\"\"];\n if (!cols || cols <= 0) return [segments.join(\"\")];\n\n const lines = [];\n let cur = [];\n let curW = 0;\n\n for (let seg of segments) {\n let segW = visibleWidth(seg);\n if (segW <= 0) continue;\n\n if (cur.length === 0) {\n if (segW > cols) {\n seg = clampAnsi(seg, cols);\n segW = visibleWidth(seg);\n }\n cur = [seg];\n curW = segW;\n continue;\n }\n\n if (curW + segW <= cols) {\n cur.push(seg);\n curW += segW;\n } else {\n lines.push(cur.join(\"\"));\n if (segW > cols) {\n seg = clampAnsi(seg, cols);\n segW = visibleWidth(seg);\n }\n cur = [seg];\n curW = segW;\n }\n }\n\n if (cur.length) lines.push(cur.join(\"\"));\n return lines.length ? lines : [\"\"];\n}\n\nclass StatusRenderer {\n constructor() {\n this.raw = \"\";\n this.segments = [];\n this.lines = [\"\"];\n this.activeReservedRows = RESERVED_ROWS;\n this.force = false;\n this.urgent = false;\n this.lastRenderMs = 0;\n this.lastChildOutMs = 0;\n this.cursorVisible = true;\n }\n noteChildOutput() {\n this.lastChildOutMs = Date.now();\n }\n setCursorVisible(v) {\n this.cursorVisible = !!v;\n }\n forceRepaint(urgent = false) {\n this.force = true;\n if (urgent) this.urgent = true;\n }\n setActiveReservedRows(n) {\n const v = Number(n || 1);\n this.activeReservedRows = Number.isFinite(v) ? Math.max(1, Math.trunc(v)) : 1;\n }\n setLine(line) {\n const next = String(line || \"\");\n if (next !== this.raw) {\n this.raw = next;\n this.segments = splitSegments(next);\n this.force = true;\n }\n }\n desiredReservedRows(physicalRows, cols, minReserved) {\n let rows = Number(physicalRows || 24);\n rows = Number.isFinite(rows) ? rows : 24;\n cols = Number(cols || 80);\n cols = Number.isFinite(cols) ? cols : 80;\n\n const maxReserved = Math.max(1, rows - 4);\n const segs = this.segments.length ? this.segments : (this.raw ? [this.raw] : []);\n let lines = segs.length ? wrapSegments(segs, cols) : [\"\"];\n\n const needed = Math.min(lines.length, maxReserved);\n let desired = Math.max(Number(minReserved || 1), needed);\n desired = Math.min(desired, maxReserved);\n\n if (lines.length < desired) lines = new Array(desired - lines.length).fill(\"\").concat(lines);\n if (lines.length > desired) lines = lines.slice(-desired);\n\n this.lines = lines;\n return desired;\n }\n clearReservedArea(physicalRows, cols, reservedRows, restoreRow = 1, restoreCol = 1) {\n let rows = Number(physicalRows || 24);\n rows = Number.isFinite(rows) ? rows : 24;\n cols = Number(cols || 80);\n cols = Number.isFinite(cols) ? cols : 80;\n let reserved = Number(reservedRows || 1);\n reserved = Number.isFinite(reserved) ? Math.max(1, Math.trunc(reserved)) : 1;\n\n reserved = Math.min(reserved, rows);\n const startRow = rows - reserved + 1;\n const parts = [\"\\\\x1b[?2026h\", \"\\\\x1b[?25l\", RESET_SGR];\n for (let i = 0; i < reserved; i++) parts.push(\"\\\\x1b[\" + (startRow + i) + \";1H\\\\x1b[2K\");\n parts.push(\"\\\\x1b[\" + restoreRow + \";\" + restoreCol + \"H\");\n parts.push(this.cursorVisible ? \"\\\\x1b[?25h\" : \"\\\\x1b[?25l\");\n parts.push(\"\\\\x1b[?2026l\");\n writeStdout(parts.join(\"\"));\n }\n render(physicalRows, cols, restoreRow = 1, restoreCol = 1) {\n if (!this.force) return;\n if (!this.raw) {\n this.force = false;\n this.urgent = false;\n return;\n }\n const now = Date.now();\n if (!this.urgent && now - this.lastRenderMs < MIN_RENDER_INTERVAL_MS) return;\n if (!this.urgent && QUIET_MS > 0 && now - this.lastChildOutMs < QUIET_MS) return;\n\n let rows = Number(physicalRows || 24);\n rows = Number.isFinite(rows) ? rows : 24;\n cols = Number(cols || 80);\n cols = Number.isFinite(cols) ? cols : 80;\n if (cols <= 0) cols = 80;\n\n const reserved = Math.max(1, Math.min(this.activeReservedRows, Math.max(1, rows - 4)));\n const startRow = rows - reserved + 1;\n const childRows = rows - reserved;\n\n let lines = this.lines.length ? this.lines.slice() : [\"\"];\n if (lines.length < reserved) lines = new Array(reserved - lines.length).fill(\"\").concat(lines);\n if (lines.length > reserved) lines = lines.slice(-reserved);\n\n const parts = [\"\\\\x1b[?2026h\", \"\\\\x1b[?25l\"];\n parts.push(\"\\\\x1b[1;\" + childRows + \"r\");\n for (let i = 0; i < reserved; i++) {\n const row = startRow + i;\n const text = clampAnsi(lines[i], cols);\n parts.push(\"\\\\x1b[\" + row + \";1H\" + RESET_SGR + \"\\\\x1b[2K\");\n parts.push(\"\\\\x1b[\" + row + \";1H\" + text + RESET_SGR);\n }\n parts.push(\"\\\\x1b[\" + restoreRow + \";\" + restoreCol + \"H\");\n parts.push(this.cursorVisible ? \"\\\\x1b[?25h\" : \"\\\\x1b[?25l\");\n parts.push(\"\\\\x1b[?2026l\");\n writeStdout(parts.join(\"\"));\n\n this.lastRenderMs = now;\n this.force = false;\n this.urgent = false;\n }\n clear() {\n const { rows, cols } = termSize();\n this.clearReservedArea(rows, cols, Math.max(this.activeReservedRows, RESERVED_ROWS));\n }\n}\n\nclass OutputRewriter {\n constructor() {\n this.buf = new Uint8Array(0);\n }\n feed(chunk, maxRow) {\n if (!chunk || chunk.length === 0) return chunk;\n const merged = new Uint8Array(this.buf.length + chunk.length);\n merged.set(this.buf, 0);\n merged.set(chunk, this.buf.length);\n this.buf = new Uint8Array(0);\n\n const out = [];\n let i = 0;\n\n const isFinal = (v) => v >= 0x40 && v <= 0x7e;\n\n while (i < merged.length) {\n const b = merged[i];\n if (b !== 0x1b) {\n out.push(b);\n i += 1;\n continue;\n }\n if (i + 1 >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const nxt = merged[i + 1];\n if (nxt !== 0x5b) {\n out.push(b);\n i += 1;\n continue;\n }\n\n let j = i + 2;\n while (j < merged.length && !isFinal(merged[j])) j += 1;\n if (j >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const final = merged[j];\n let seq = merged.slice(i, j + 1);\n\n if ((final === 0x48 || final === 0x66) && maxRow > 0) {\n const params = merged.slice(i + 2, j);\n const s = new TextDecoder().decode(params);\n if (!s || /^[0-9;]/.test(s)) {\n const parts = s ? s.split(\";\") : [];\n const row = Number(parts[0] || 1);\n const col = Number(parts[1] || 1);\n let r = Number.isFinite(row) ? row : 1;\n let c = Number.isFinite(col) ? col : 1;\n if (r === 999 || r > maxRow) r = maxRow;\n if (r < 1) r = 1;\n if (c < 1) c = 1;\n const newParams = new TextEncoder().encode(String(r) + \";\" + String(c));\n const ns = new Uint8Array(2 + newParams.length + 1);\n ns[0] = 0x1b;\n ns[1] = 0x5b;\n ns.set(newParams, 2);\n ns[ns.length - 1] = final;\n seq = ns;\n }\n } else if (final === 0x72 && maxRow > 0) {\n const params = merged.slice(i + 2, j);\n const s = new TextDecoder().decode(params);\n if (!s || /^[0-9;]/.test(s)) {\n const parts = s ? s.split(\";\") : [];\n const top = Number(parts[0] || 1);\n const bottom = Number(parts[1] || maxRow);\n let t = Number.isFinite(top) ? top : 1;\n let btm = Number.isFinite(bottom) ? bottom : maxRow;\n if (t <= 0) t = 1;\n if (btm <= 0 || btm === 999 || btm > maxRow) btm = maxRow;\n if (t > btm) t = 1;\n const str = \"\\\\x1b[\" + String(t) + \";\" + String(btm) + \"r\";\n seq = new TextEncoder().encode(str);\n }\n }\n\n for (const bb of seq) out.push(bb);\n i = j + 1;\n }\n\n return new Uint8Array(out);\n }\n}\n\nclass CursorTracker {\n constructor() {\n this.row = 1;\n this.col = 1;\n this.savedRow = 1;\n this.savedCol = 1;\n this.buf = new Uint8Array(0);\n this.inOsc = false;\n this.utf8Cont = 0;\n this.wrapPending = false;\n }\n position() {\n return { row: this.row, col: this.col };\n }\n feed(chunk, maxRow, maxCol) {\n if (!chunk || chunk.length === 0) return;\n maxRow = Math.max(1, Number(maxRow || 1));\n maxCol = Math.max(1, Number(maxCol || 1));\n\n const merged = new Uint8Array(this.buf.length + chunk.length);\n merged.set(this.buf, 0);\n merged.set(chunk, this.buf.length);\n this.buf = new Uint8Array(0);\n\n const clamp = () => {\n if (this.row < 1) this.row = 1;\n else if (this.row > maxRow) this.row = maxRow;\n if (this.col < 1) this.col = 1;\n else if (this.col > maxCol) this.col = maxCol;\n };\n\n const parseIntDefault = (v, d) => {\n const n = Number(v);\n return Number.isFinite(n) && n > 0 ? Math.trunc(n) : d;\n };\n\n let i = 0;\n const isFinal = (v) => v >= 0x40 && v <= 0x7e;\n\n while (i < merged.length) {\n const b = merged[i];\n\n if (this.inOsc) {\n if (b === 0x07) {\n this.inOsc = false;\n i += 1;\n continue;\n }\n if (b === 0x1b) {\n if (i + 1 >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n if (merged[i + 1] === 0x5c) {\n this.inOsc = false;\n i += 2;\n continue;\n }\n }\n i += 1;\n continue;\n }\n\n if (this.utf8Cont > 0) {\n if (b >= 0x80 && b <= 0xbf) {\n this.utf8Cont -= 1;\n i += 1;\n continue;\n }\n this.utf8Cont = 0;\n }\n\n if (b === 0x1b) {\n this.wrapPending = false;\n if (i + 1 >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const nxt = merged[i + 1];\n\n if (nxt === 0x5b) {\n let j = i + 2;\n while (j < merged.length && !isFinal(merged[j])) j += 1;\n if (j >= merged.length) {\n this.buf = merged.slice(i);\n break;\n }\n const final = merged[j];\n const params = merged.slice(i + 2, j);\n const s = new TextDecoder().decode(params);\n if (s && !/^[0-9;]/.test(s)) {\n i = j + 1;\n continue;\n }\n const parts = s ? s.split(\";\") : [];\n const p0 = parseIntDefault(parts[0] || \"\", 1);\n const p1 = parseIntDefault(parts[1] || \"\", 1);\n\n if (final === 0x48 || final === 0x66) {\n this.row = p0;\n this.col = p1;\n clamp();\n } else if (final === 0x41) {\n this.row = Math.max(1, this.row - p0);\n } else if (final === 0x42) {\n this.row = Math.min(maxRow, this.row + p0);\n } else if (final === 0x43) {\n this.col = Math.min(maxCol, this.col + p0);\n } else if (final === 0x44) {\n this.col = Math.max(1, this.col - p0);\n } else if (final === 0x45) {\n this.row = Math.min(maxRow, this.row + p0);\n this.col = 1;\n } else if (final === 0x46) {\n this.row = Math.max(1, this.row - p0);\n this.col = 1;\n } else if (final === 0x47) {\n this.col = p0;\n clamp();\n } else if (final === 0x64) {\n this.row = p0;\n clamp();\n } else if (final === 0x72) {\n this.row = 1;\n this.col = 1;\n } else if (final === 0x73) {\n this.savedRow = this.row;\n this.savedCol = this.col;\n } else if (final === 0x75) {\n this.row = this.savedRow;\n this.col = this.savedCol;\n clamp();\n }\n\n i = j + 1;\n continue;\n }\n\n if (nxt === 0x5d || nxt === 0x50 || nxt === 0x5e || nxt === 0x5f || nxt === 0x58) {\n this.inOsc = true;\n i += 2;\n continue;\n }\n\n if (nxt === 0x37) {\n this.savedRow = this.row;\n this.savedCol = this.col;\n i += 2;\n continue;\n }\n if (nxt === 0x38) {\n this.row = this.savedRow;\n this.col = this.savedCol;\n clamp();\n i += 2;\n continue;\n }\n\n i += 2;\n continue;\n }\n\n if (b === 0x0d) {\n this.col = 1;\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b === 0x0a || b === 0x0b || b === 0x0c) {\n this.row = Math.min(maxRow, this.row + 1);\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b === 0x08) {\n this.col = Math.max(1, this.col - 1);\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b === 0x09) {\n const nextStop = Math.floor((this.col - 1) / 8 + 1) * 8 + 1;\n this.col = Math.min(maxCol, nextStop);\n this.wrapPending = false;\n i += 1;\n continue;\n }\n if (b < 0x20 || b === 0x7f) {\n i += 1;\n continue;\n }\n\n if (this.wrapPending) {\n this.row = Math.min(maxRow, this.row + 1);\n this.col = 1;\n this.wrapPending = false;\n }\n\n if (b >= 0x80) {\n if ((b & 0xe0) === 0xc0) this.utf8Cont = 1;\n else if ((b & 0xf0) === 0xe0) this.utf8Cont = 2;\n else if ((b & 0xf8) === 0xf0) this.utf8Cont = 3;\n else this.utf8Cont = 0;\n }\n\n if (this.col < maxCol) this.col += 1;\n else {\n this.col = maxCol;\n this.wrapPending = true;\n }\n i += 1;\n }\n }\n}\n\nasync function main() {\n const argv = process.argv.slice(2);\n\n if (isSessionsCommand(argv)) await runSessions();\n\n if (!process.stdin.isTTY || !process.stdout.isTTY || shouldPassthrough(argv)) {\n await execPassthrough(argv);\n return;\n }\n\n // Clean viewport.\n writeStdout(\"\\\\x1b[?2026h\\\\x1b[0m\\\\x1b[r\\\\x1b[2J\\\\x1b[H\\\\x1b[?2026l\");\n\n const renderer = new StatusRenderer();\n renderer.setLine(\"\\\\x1b[48;5;238m\\\\x1b[38;5;15m Statusline: starting… \\\\x1b[0m\");\n renderer.forceRepaint(true);\n\n let { rows: physicalRows, cols: physicalCols } = termSize();\n let effectiveReservedRows = renderer.desiredReservedRows(physicalRows, physicalCols, RESERVED_ROWS);\n renderer.setActiveReservedRows(effectiveReservedRows);\n let childRows = Math.max(4, physicalRows - effectiveReservedRows);\n let childCols = Math.max(10, physicalCols);\n\n // Reserve the bottom rows early, before the child starts writing.\n writeStdout(\n \"\\\\x1b[?2026h\\\\x1b[?25l\\\\x1b[1;\" + childRows + \"r\\\\x1b[1;1H\\\\x1b[?25h\\\\x1b[?2026l\",\n );\n renderer.forceRepaint(true);\n renderer.render(physicalRows, physicalCols, 1, 1);\n\n // Spawn child with terminal support.\n let child;\n try {\n child = Bun.spawn([EXEC_TARGET, ...argv], {\n cwd: process.cwd(),\n env: process.env,\n detached: true,\n terminal: {\n cols: childCols,\n rows: childRows,\n data(_terminal, data) {\n onChildData(data);\n },\n },\n onExit(_proc, exitCode, signal, _error) {\n onChildExit(exitCode, signal);\n },\n });\n } catch (e) {\n process.stderr.write(\"[statusline] failed to spawn child: \" + String(e?.message || e) + \"\\\\n\");\n process.exit(1);\n }\n\n const terminal = child.terminal;\n\n // Best-effort PGID resolution (matches Python wrapper behavior).\n // This improves session resolution (ps/lsof scanning) and signal forwarding.\n let pgid = child.pid;\n try {\n const res = Bun.spawnSync([\"ps\", \"-o\", \"pgid=\", \"-p\", String(child.pid)], {\n stdin: \"ignore\",\n stdout: \"pipe\",\n stderr: \"ignore\",\n });\n if (res && res.exitCode === 0 && res.stdout) {\n const text = new TextDecoder().decode(res.stdout).trim();\n const n = Number(text);\n if (Number.isFinite(n) && n > 0) pgid = Math.trunc(n);\n }\n } catch {}\n\n // Spawn monitor (Node).\n const monitorEnv = { ...process.env, DROID_STATUSLINE_PGID: String(pgid) };\n const monitor = Bun.spawn([\"node\", STATUSLINE_MONITOR, ...argv], {\n stdin: \"ignore\",\n stdout: \"pipe\",\n stderr: \"ignore\",\n env: monitorEnv,\n });\n\n let shouldStop = false;\n const rewriter = new OutputRewriter();\n const cursor = new CursorTracker();\n\n let detectBuf = new Uint8Array(0);\n let detectStr = \"\";\n let cursorVisible = true;\n let scrollRegionDirty = true;\n let lastForceRepaintMs = Date.now();\n let lastPhysicalRows = 0;\n let lastPhysicalCols = 0;\n\n function appendDetect(chunk) {\n const max = 128;\n const merged = new Uint8Array(Math.min(max, detectBuf.length + chunk.length));\n const takePrev = Math.max(0, merged.length - chunk.length);\n if (takePrev > 0) merged.set(detectBuf.slice(Math.max(0, detectBuf.length - takePrev)), 0);\n merged.set(chunk.slice(Math.max(0, chunk.length - (merged.length - takePrev))), takePrev);\n detectBuf = merged;\n try {\n detectStr = Buffer.from(detectBuf).toString(\"latin1\");\n } catch {\n detectStr = \"\";\n }\n }\n\n function includesBytes(needle) {\n return detectStr.includes(needle);\n }\n\n function lastIndexOfBytes(needle) {\n return detectStr.lastIndexOf(needle);\n }\n\n function includesScrollRegionCSI() {\n return /\\\\x1b\\\\[[0-9]*;?[0-9]*r/.test(detectStr);\n }\n\n function updateCursorVisibility() {\n const show = includesBytes(\"\\\\x1b[?25h\");\n const hide = includesBytes(\"\\\\x1b[?25l\");\n if (show || hide) {\n // best-effort: if both present, whichever appears later \"wins\"\n const h = lastIndexOfBytes(\"\\\\x1b[?25h\");\n const l = lastIndexOfBytes(\"\\\\x1b[?25l\");\n cursorVisible = h > l;\n renderer.setCursorVisible(cursorVisible);\n }\n }\n\n function needsScrollRegionReset() {\n return (\n includesBytes(\"\\\\x1b[?1049\") ||\n includesBytes(\"\\\\x1b[?1047\") ||\n includesBytes(\"\\\\x1b[?47\") ||\n includesBytes(\"\\\\x1b[J\") ||\n includesBytes(\"\\\\x1b[0J\") ||\n includesBytes(\"\\\\x1b[1J\") ||\n includesBytes(\"\\\\x1b[2J\") ||\n includesBytes(\"\\\\x1b[3J\") ||\n includesBytes(\"\\\\x1b[r\") ||\n includesScrollRegionCSI()\n );\n }\n\n function onChildData(data) {\n if (shouldStop) return;\n const chunk = data instanceof Uint8Array ? data : new Uint8Array(data);\n appendDetect(chunk);\n if (needsScrollRegionReset()) scrollRegionDirty = true;\n updateCursorVisibility();\n\n renderer.noteChildOutput();\n const rewritten = rewriter.feed(chunk, childRows);\n cursor.feed(rewritten, childRows, childCols);\n writeStdout(Buffer.from(rewritten));\n }\n\n function onChildExit(exitCode, signal) {\n if (shouldStop) return;\n shouldStop = true;\n const code = exitCode ?? (signal != null ? 128 + signal : 0);\n cleanup().finally(() => process.exit(code));\n }\n\n async function readMonitor() {\n if (!monitor.stdout) return;\n const reader = monitor.stdout.getReader();\n let buf = \"\";\n while (!shouldStop) {\n const { value, done } = await reader.read();\n if (done || !value) break;\n buf += new TextDecoder().decode(value);\n while (true) {\n const idx = buf.indexOf(\"\\\\n\");\n if (idx === -1) break;\n const line = buf.slice(0, idx).replace(/\\\\r$/, \"\");\n buf = buf.slice(idx + 1);\n if (!line) continue;\n renderer.setLine(line);\n renderer.forceRepaint(false);\n }\n }\n }\n readMonitor().catch(() => {});\n\n function repaintStatusline(forceUrgent = false) {\n const { row, col } = cursor.position();\n let r = Math.max(1, Math.min(childRows, row));\n let c = Math.max(1, Math.min(childCols, col));\n\n if (scrollRegionDirty) {\n const seq =\n \"\\\\x1b[?2026h\\\\x1b[?25l\\\\x1b[1;\" +\n childRows +\n \"r\\\\x1b[\" +\n r +\n \";\" +\n c +\n \"H\" +\n (cursorVisible ? \"\\\\x1b[?25h\" : \"\\\\x1b[?25l\") +\n \"\\\\x1b[?2026l\";\n writeStdout(seq);\n scrollRegionDirty = false;\n }\n\n renderer.forceRepaint(forceUrgent);\n renderer.render(physicalRows, physicalCols, r, c);\n }\n\n function handleSizeChange(nextRows, nextCols, forceUrgent = false) {\n physicalRows = nextRows;\n physicalCols = nextCols;\n\n const desired = renderer.desiredReservedRows(physicalRows, physicalCols, RESERVED_ROWS);\n const { row, col } = cursor.position();\n if (desired < effectiveReservedRows) {\n renderer.clearReservedArea(physicalRows, physicalCols, effectiveReservedRows, row, col);\n }\n effectiveReservedRows = desired;\n renderer.setActiveReservedRows(effectiveReservedRows);\n\n childRows = Math.max(4, physicalRows - effectiveReservedRows);\n childCols = Math.max(10, physicalCols);\n try {\n terminal.resize(childCols, childRows);\n } catch {}\n try {\n process.kill(-child.pid, \"SIGWINCH\");\n } catch {\n try { process.kill(child.pid, \"SIGWINCH\"); } catch {}\n }\n\n scrollRegionDirty = true;\n renderer.forceRepaint(true);\n repaintStatusline(forceUrgent);\n }\n\n process.on(\"SIGWINCH\", () => {\n const next = termSize();\n handleSizeChange(next.rows, next.cols, true);\n });\n\n // Forward signals to child's process group when possible.\n const forward = (sig) => {\n try {\n process.kill(-pgid, sig);\n } catch {\n try {\n process.kill(child.pid, sig);\n } catch {}\n }\n };\n for (const s of [\"SIGTERM\", \"SIGINT\", \"SIGHUP\"]) {\n try {\n process.on(s, () => forward(s));\n } catch {}\n }\n\n // Raw stdin -> PTY.\n try {\n process.stdin.setRawMode(true);\n } catch {}\n process.stdin.resume();\n process.stdin.on(\"data\", (buf) => {\n try {\n if (typeof buf === \"string\") terminal.write(buf);\n else {\n // Prefer bytes when supported; fall back to UTF-8 decoding.\n try {\n // Bun.Terminal.write may accept Uint8Array in newer versions.\n terminal.write(buf);\n } catch {\n terminal.write(new TextDecoder().decode(buf));\n }\n }\n } catch {}\n });\n\n const tick = setInterval(() => {\n if (shouldStop) return;\n const next = termSize();\n const sizeChanged = next.rows !== lastPhysicalRows || next.cols !== lastPhysicalCols;\n const desired = renderer.desiredReservedRows(next.rows, next.cols, RESERVED_ROWS);\n if (sizeChanged || desired !== effectiveReservedRows) {\n handleSizeChange(next.rows, next.cols, true);\n lastPhysicalRows = next.rows;\n lastPhysicalCols = next.cols;\n lastForceRepaintMs = Date.now();\n return;\n }\n const now = Date.now();\n if (now - lastForceRepaintMs >= FORCE_REPAINT_INTERVAL_MS) {\n repaintStatusline(false);\n lastForceRepaintMs = now;\n } else {\n const { row, col } = cursor.position();\n renderer.render(physicalRows, physicalCols, row, col);\n }\n }, 50);\n\n async function cleanup() {\n clearInterval(tick);\n try {\n process.stdin.setRawMode(false);\n } catch {}\n try {\n const { row, col } = cursor.position();\n renderer.clearReservedArea(physicalRows, physicalCols, effectiveReservedRows, row, col);\n } catch {}\n try {\n writeStdout(\"\\\\x1b[r\\\\x1b[0m\\\\x1b[?25h\");\n } catch {}\n try {\n monitor.kill();\n } catch {}\n try {\n terminal.close();\n } catch {}\n }\n\n // Keep process alive until child exits.\n await child.exited;\n await cleanup();\n}\n\nmain().catch(() => process.exit(1));\n`;\n}\n\nexport async function createStatuslineFiles(\n outputDir: string,\n execTargetPath: string,\n aliasName: string,\n sessionsScriptPath?: string,\n): Promise<{ wrapperScript: string; monitorScript: string }> {\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const monitorScriptPath = join(outputDir, `${aliasName}-statusline.js`);\n const wrapperScriptPath = join(outputDir, aliasName);\n\n await writeFile(monitorScriptPath, generateStatuslineMonitorScript());\n await chmod(monitorScriptPath, 0o755);\n\n const wrapper = generateStatuslineWrapperScript(\n execTargetPath,\n monitorScriptPath,\n sessionsScriptPath,\n );\n\n await writeFile(wrapperScriptPath, wrapper);\n await chmod(wrapperScriptPath, 0o755);\n\n return { wrapperScript: wrapperScriptPath, monitorScript: monitorScriptPath };\n}\n","import { chmod, mkdir, writeFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport { join } from \"node:path\";\n\n/**\n * Generate sessions browser script (Node.js)\n */\nfunction generateSessionsBrowserScript(aliasName: string): string {\n const aliasJson = JSON.stringify(aliasName);\n return `#!/usr/bin/env node\n// Droid Sessions Browser - Interactive selector\n// Auto-generated by droid-patch\n\nconst fs = require('fs');\nconst path = require('path');\nconst readline = require('readline');\nconst { execSync, spawn } = require('child_process');\n\nconst FACTORY_HOME = path.join(require('os').homedir(), '.factory');\nconst SESSIONS_ROOT = path.join(FACTORY_HOME, 'sessions');\nconst ALIAS_NAME = ${aliasJson};\n\n// ANSI\nconst CYAN = '\\\\x1b[36m';\nconst GREEN = '\\\\x1b[32m';\nconst YELLOW = '\\\\x1b[33m';\nconst RED = '\\\\x1b[31m';\nconst DIM = '\\\\x1b[2m';\nconst RESET = '\\\\x1b[0m';\nconst BOLD = '\\\\x1b[1m';\nconst CLEAR = '\\\\x1b[2J\\\\x1b[H';\nconst HIDE_CURSOR = '\\\\x1b[?25l';\nconst SHOW_CURSOR = '\\\\x1b[?25h';\n\nfunction sanitizePath(p) {\n return p.replace(/:/g, '').replace(/[\\\\\\\\/]/g, '-');\n}\n\nfunction parseSessionFile(jsonlPath, settingsPath) {\n const sessionId = path.basename(jsonlPath, '.jsonl');\n const stats = fs.statSync(jsonlPath);\n \n const result = {\n id: sessionId,\n title: '',\n mtime: stats.mtimeMs,\n model: '',\n firstUserMsg: '',\n lastUserMsg: '',\n messageCount: 0,\n lastTimestamp: '',\n };\n\n try {\n const content = fs.readFileSync(jsonlPath, 'utf-8');\n const lines = content.split('\\\\n').filter(l => l.trim());\n const userMessages = [];\n \n for (const line of lines) {\n try {\n const obj = JSON.parse(line);\n if (obj.type === 'session_start') {\n result.title = obj.title || '';\n } else if (obj.type === 'message') {\n result.messageCount++;\n if (obj.timestamp) result.lastTimestamp = obj.timestamp;\n \n const msg = obj.message || {};\n if (msg.role === 'user' && Array.isArray(msg.content)) {\n for (const c of msg.content) {\n if (c && c.type === 'text' && c.text && !c.text.startsWith('<system-reminder>')) {\n userMessages.push(c.text.slice(0, 150).replace(/\\\\n/g, ' ').trim());\n break;\n }\n }\n }\n }\n } catch {}\n }\n \n if (userMessages.length > 0) {\n result.firstUserMsg = userMessages[0];\n result.lastUserMsg = userMessages.length > 1 ? userMessages[userMessages.length - 1] : '';\n }\n } catch {}\n\n if (fs.existsSync(settingsPath)) {\n try {\n const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf-8'));\n result.model = settings.model || '';\n } catch {}\n }\n\n return result;\n}\n\nfunction collectSessions() {\n const cwd = process.cwd();\n const cwdSanitized = sanitizePath(cwd);\n const sessions = [];\n\n if (!fs.existsSync(SESSIONS_ROOT)) return sessions;\n\n for (const wsDir of fs.readdirSync(SESSIONS_ROOT)) {\n if (wsDir !== cwdSanitized) continue;\n \n const wsPath = path.join(SESSIONS_ROOT, wsDir);\n if (!fs.statSync(wsPath).isDirectory()) continue;\n\n for (const file of fs.readdirSync(wsPath)) {\n if (!file.endsWith('.jsonl')) continue;\n \n const sessionId = file.slice(0, -6);\n const jsonlPath = path.join(wsPath, file);\n const settingsPath = path.join(wsPath, sessionId + '.settings.json');\n\n try {\n const session = parseSessionFile(jsonlPath, settingsPath);\n if (session.messageCount === 0 || !session.firstUserMsg) continue;\n sessions.push(session);\n } catch {}\n }\n }\n\n sessions.sort((a, b) => b.mtime - a.mtime);\n return sessions.slice(0, 50);\n}\n\nfunction formatTime(ts) {\n if (!ts) return '';\n try {\n const d = new Date(ts);\n return d.toLocaleString('zh-CN', { month: '2-digit', day: '2-digit', hour: '2-digit', minute: '2-digit' });\n } catch {\n return ts.slice(0, 16);\n }\n}\n\nfunction truncate(s, len) {\n if (!s) return '';\n s = s.replace(/\\\\n/g, ' ');\n return s.length > len ? s.slice(0, len - 3) + '...' : s;\n}\n\nfunction render(sessions, selected, offset, rows) {\n const cwd = process.cwd();\n const pageSize = rows - 6;\n const visible = sessions.slice(offset, offset + pageSize);\n \n let out = CLEAR;\n out += BOLD + 'Sessions: ' + RESET + DIM + cwd + RESET + '\\\\n';\n out += DIM + '[↑/↓] Select [Enter] Resume [q] Quit' + RESET + '\\\\n\\\\n';\n\n for (let i = 0; i < visible.length; i++) {\n const s = visible[i];\n const idx = offset + i;\n const isSelected = idx === selected;\n const prefix = isSelected ? GREEN + '▶ ' + RESET : ' ';\n \n const title = truncate(s.title || '(no title)', 35);\n const time = formatTime(s.lastTimestamp);\n const model = truncate(s.model, 20);\n \n if (isSelected) {\n out += prefix + YELLOW + title + RESET + '\\\\n';\n out += ' ' + DIM + 'ID: ' + RESET + CYAN + s.id + RESET + '\\\\n';\n out += ' ' + DIM + 'Last: ' + time + ' | Model: ' + model + ' | ' + s.messageCount + ' msgs' + RESET + '\\\\n';\n out += ' ' + DIM + 'First input: ' + RESET + truncate(s.firstUserMsg, 60) + '\\\\n';\n if (s.lastUserMsg && s.lastUserMsg !== s.firstUserMsg) {\n out += ' ' + DIM + 'Last input: ' + RESET + truncate(s.lastUserMsg, 60) + '\\\\n';\n }\n } else {\n out += prefix + title + DIM + ' (' + time + ')' + RESET + '\\\\n';\n }\n }\n\n out += '\\\\n' + DIM + 'Page ' + (Math.floor(offset / pageSize) + 1) + '/' + Math.ceil(sessions.length / pageSize) + ' (' + sessions.length + ' sessions)' + RESET;\n \n process.stdout.write(out);\n}\n\nasync function main() {\n const sessions = collectSessions();\n \n if (sessions.length === 0) {\n console.log(RED + 'No sessions with interactions found in current directory' + RESET);\n process.exit(0);\n }\n\n if (!process.stdin.isTTY) {\n for (const s of sessions) {\n console.log(s.id + ' ' + (s.title || '') + ' ' + formatTime(s.lastTimestamp));\n }\n process.exit(0);\n }\n\n const rows = process.stdout.rows || 24;\n const pageSize = rows - 6;\n let selected = 0;\n let offset = 0;\n\n function restoreTerminal() {\n try { process.stdout.write(SHOW_CURSOR); } catch {}\n try { process.stdin.setRawMode(false); } catch {}\n try { process.stdin.pause(); } catch {}\n }\n\n function clearScreen() {\n try { process.stdout.write(CLEAR); } catch {}\n }\n\n process.stdin.setRawMode(true);\n process.stdin.resume();\n process.stdout.write(HIDE_CURSOR);\n \n render(sessions, selected, offset, rows);\n\n const onKey = (key) => {\n const k = key.toString();\n \n if (k === 'q' || k === '\\\\x03') { // q or Ctrl+C\n restoreTerminal();\n clearScreen();\n process.exit(0);\n }\n \n if (k === '\\\\r' || k === '\\\\n') { // Enter\n // Stop reading input / stop reacting to arrow keys before handing off to droid.\n process.stdin.off('data', onKey);\n restoreTerminal();\n clearScreen();\n const session = sessions[selected];\n console.log(GREEN + 'Resuming session: ' + session.id + RESET);\n console.log(DIM + 'Using: ' + ALIAS_NAME + ' --resume ' + session.id + RESET + '\\\\n');\n\n // Avoid the sessions browser reacting to signals while droid is running.\n try { process.removeAllListeners('SIGINT'); } catch {}\n try { process.removeAllListeners('SIGTERM'); } catch {}\n try { process.on('SIGINT', () => {}); } catch {}\n try { process.on('SIGTERM', () => {}); } catch {}\n\n const child = spawn(ALIAS_NAME, ['--resume', session.id], { stdio: 'inherit' });\n child.on('exit', (code) => process.exit(code || 0));\n child.on('error', () => process.exit(1));\n return;\n }\n \n if (k === '\\\\x1b[A' || k === 'k') { // Up\n if (selected > 0) {\n selected--;\n if (selected < offset) offset = Math.max(0, offset - 1);\n }\n } else if (k === '\\\\x1b[B' || k === 'j') { // Down\n if (selected < sessions.length - 1) {\n selected++;\n if (selected >= offset + pageSize) offset++;\n }\n } else if (k === '\\\\x1b[5~') { // Page Up\n selected = Math.max(0, selected - pageSize);\n offset = Math.max(0, offset - pageSize);\n } else if (k === '\\\\x1b[6~') { // Page Down\n selected = Math.min(sessions.length - 1, selected + pageSize);\n offset = Math.min(Math.max(0, sessions.length - pageSize), offset + pageSize);\n }\n \n render(sessions, selected, offset, rows);\n };\n\n process.stdin.on('data', onKey);\n\n process.on('SIGINT', () => {\n restoreTerminal();\n clearScreen();\n process.exit(0);\n });\n}\n\nmain();\n`;\n}\n\n/**\n * Create sessions browser script file\n */\nexport async function createSessionsScript(\n outputDir: string,\n aliasName: string,\n): Promise<{ sessionsScript: string }> {\n if (!existsSync(outputDir)) {\n await mkdir(outputDir, { recursive: true });\n }\n\n const sessionsScriptPath = join(outputDir, `${aliasName}-sessions.js`);\n\n await writeFile(sessionsScriptPath, generateSessionsBrowserScript(aliasName));\n await chmod(sessionsScriptPath, 0o755);\n\n return { sessionsScript: sessionsScriptPath };\n}\n","import bin from \"tiny-bin\";\nimport { styleText } from \"node:util\";\nimport { existsSync, readFileSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport { homedir } from \"node:os\";\nimport { fileURLToPath } from \"node:url\";\nimport { execSync } from \"node:child_process\";\nimport { patchDroid, type Patch } from \"./patcher.ts\";\nimport {\n createAlias,\n removeAlias,\n listAliases,\n createAliasForWrapper,\n clearAllAliases,\n removeAliasesByFilter,\n type FilterFlag,\n} from \"./alias.ts\";\nimport { createWebSearchUnifiedFiles } from \"./websearch-patch.ts\";\nimport { createStatuslineFiles } from \"./statusline-patch.ts\";\nimport { createSessionsScript } from \"./sessions-patch.ts\";\nimport {\n saveAliasMetadata,\n createMetadata,\n loadAliasMetadata,\n listAllMetadata,\n formatPatches,\n} from \"./metadata.ts\";\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nfunction getVersion(): string {\n try {\n const pkgPath = join(__dirname, \"..\", \"package.json\");\n const pkg = JSON.parse(readFileSync(pkgPath, \"utf-8\"));\n return pkg.version || \"0.0.0\";\n } catch {\n return \"0.0.0\";\n }\n}\n\nconst version = getVersion();\n\nfunction getDroidVersion(droidPath: string): string | undefined {\n try {\n const result = execSync(`\"${droidPath}\" --version`, {\n encoding: \"utf-8\",\n stdio: [\"pipe\", \"pipe\", \"pipe\"],\n timeout: 5000,\n }).trim();\n // Parse version from output like \"droid 1.2.3\" or just \"1.2.3\"\n const match = result.match(/(\\d+\\.\\d+\\.\\d+)/);\n return match ? match[1] : result || undefined;\n } catch {\n return undefined;\n }\n}\n\nfunction findDefaultDroidPath(): string {\n const home = homedir();\n\n // Try `which droid` first to find droid in PATH\n try {\n const result = execSync(\"which droid\", {\n encoding: \"utf-8\",\n stdio: [\"pipe\", \"pipe\", \"pipe\"],\n }).trim();\n if (result && existsSync(result)) {\n return result;\n }\n } catch {\n // which command failed, continue with fallback paths\n }\n\n // Common installation paths\n const paths = [\n // Default sh install location\n join(home, \".droid\", \"bin\", \"droid\"),\n // Homebrew on Apple Silicon\n \"/opt/homebrew/bin/droid\",\n // Homebrew on Intel Mac / Linux\n \"/usr/local/bin/droid\",\n // Linux system-wide\n \"/usr/bin/droid\",\n // Current directory\n \"./droid\",\n ];\n\n for (const p of paths) {\n if (existsSync(p)) return p;\n }\n\n // Return default path even if not found (will error later with helpful message)\n return join(home, \".droid\", \"bin\", \"droid\");\n}\n\nbin(\"droid-patch\", \"CLI tool to patch droid binary with various modifications\")\n .package(\"droid-patch\", version)\n .option(\n \"--is-custom\",\n \"Patch isCustom:!0 to isCustom:!1 (enable context compression for custom models)\",\n )\n .option(\n \"--skip-login\",\n \"Inject a fake FACTORY_API_KEY to bypass login requirement (no real key needed)\",\n )\n .option(\n \"--api-base <url>\",\n \"Replace API URL (standalone: binary patch, max 22 chars; with --websearch: proxy forward target, no limit)\",\n )\n .option(\n \"--websearch\",\n \"Enable local WebSearch proxy (each instance runs own proxy, auto-cleanup on exit)\",\n )\n .option(\"--statusline\", \"Enable a Claude-style statusline (terminal UI)\")\n .option(\"--sessions\", \"Enable sessions browser (--sessions flag in alias)\")\n .option(\"--standalone\", \"Standalone mode: mock non-LLM Factory APIs (use with --websearch)\")\n .option(\n \"--reasoning-effort\",\n \"Enable reasoning effort for custom models (set to high, enable UI selector)\",\n )\n .option(\n \"--disable-telemetry\",\n \"Disable telemetry and Sentry error reporting (block data uploads)\",\n )\n .option(\"--dry-run\", \"Verify patches without actually modifying the binary\")\n .option(\"-p, --path <path>\", \"Path to the droid binary\")\n .option(\"-o, --output <dir>\", \"Output directory for patched binary\")\n .option(\"--no-backup\", \"Do not create backup of original binary\")\n .option(\"-v, --verbose\", \"Enable verbose output\")\n .argument(\"[alias]\", \"Alias name for the patched binary\")\n .action(async (options, args) => {\n const alias = args?.[0] as string | undefined;\n const isCustom = options[\"is-custom\"] as boolean;\n const skipLogin = options[\"skip-login\"] as boolean;\n const apiBase = options[\"api-base\"] as string | undefined;\n const websearch = options[\"websearch\"] as boolean;\n const statusline = options[\"statusline\"] as boolean;\n const sessions = options[\"sessions\"] as boolean;\n const standalone = options[\"standalone\"] as boolean;\n // When --websearch is used with --api-base, forward to custom URL\n // Otherwise forward to official Factory API\n const websearchTarget = websearch ? apiBase || \"https://api.factory.ai\" : undefined;\n const reasoningEffort = options[\"reasoning-effort\"] as boolean;\n const noTelemetry = options[\"disable-telemetry\"] as boolean;\n const dryRun = options[\"dry-run\"] as boolean;\n const path = (options.path as string) || findDefaultDroidPath();\n const outputDir = options.output as string | undefined;\n const backup = options.backup !== false;\n const verbose = options.verbose as boolean;\n\n // If -o is specified with alias, output to that directory with alias name\n const outputPath = outputDir && alias ? join(outputDir, alias) : undefined;\n\n const needsBinaryPatch =\n !!isCustom || !!skipLogin || !!reasoningEffort || !!noTelemetry || (!!apiBase && !websearch);\n\n const statuslineEnabled = statusline;\n\n // Wrapper-only mode (no binary patching needed):\n // - --websearch (optional --standalone)\n // - --statusline\n // - both combined (statusline wraps websearch)\n if (!needsBinaryPatch && (websearch || statuslineEnabled)) {\n if (!alias) {\n console.log(styleText(\"red\", \"Error: Alias name required for --websearch/--statusline\"));\n console.log(styleText(\"gray\", \"Usage: npx droid-patch --websearch <alias>\"));\n console.log(styleText(\"gray\", \"Usage: npx droid-patch --statusline <alias>\"));\n process.exit(1);\n }\n\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log(styleText([\"cyan\", \"bold\"], \" Droid Wrapper Setup\"));\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log();\n if (websearch) {\n console.log(styleText(\"white\", `WebSearch: enabled`));\n console.log(styleText(\"white\", `Forward target: ${websearchTarget}`));\n if (standalone) {\n console.log(styleText(\"white\", `Standalone mode: enabled`));\n }\n }\n if (statuslineEnabled) {\n console.log(styleText(\"white\", `Statusline: enabled`));\n }\n console.log();\n\n let execTargetPath = path;\n if (websearch) {\n // Create websearch proxy files (proxy script + wrapper)\n const proxyDir = join(homedir(), \".droid-patch\", \"proxy\");\n const { wrapperScript } = await createWebSearchUnifiedFiles(\n proxyDir,\n execTargetPath,\n alias,\n websearchTarget,\n standalone,\n );\n execTargetPath = wrapperScript;\n }\n\n if (statuslineEnabled) {\n const statuslineDir = join(homedir(), \".droid-patch\", \"statusline\");\n // Create sessions script only if --sessions is enabled\n let sessionsScript: string | undefined;\n if (sessions) {\n const result = await createSessionsScript(statuslineDir, alias);\n sessionsScript = result.sessionsScript;\n }\n const { wrapperScript } = await createStatuslineFiles(\n statuslineDir,\n execTargetPath,\n alias,\n sessionsScript,\n );\n execTargetPath = wrapperScript;\n }\n\n // Create alias pointing to outer wrapper\n const aliasResult = await createAliasForWrapper(execTargetPath, alias, verbose);\n\n // Save metadata for update command\n const droidVersion = getDroidVersion(path);\n const metadata = createMetadata(\n alias,\n path,\n {\n isCustom: false,\n skipLogin: false,\n apiBase: apiBase || null,\n websearch: !!websearch,\n statusline: !!statuslineEnabled,\n sessions: !!sessions,\n reasoningEffort: false,\n noTelemetry: false,\n standalone: standalone,\n },\n {\n droidPatchVersion: version,\n droidVersion,\n aliasPath: aliasResult.aliasPath,\n },\n );\n await saveAliasMetadata(metadata);\n\n console.log();\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log(styleText([\"green\", \"bold\"], \" Wrapper Ready!\"));\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log();\n console.log(\"Run directly:\");\n console.log(styleText(\"yellow\", ` ${alias}`));\n console.log();\n if (websearch) {\n console.log(styleText(\"cyan\", \"Auto-shutdown:\"));\n console.log(\n styleText(\"gray\", \" Proxy auto-shuts down after 5 min idle (no manual cleanup needed)\"),\n );\n console.log(styleText(\"gray\", \" To disable: export DROID_PROXY_IDLE_TIMEOUT=0\"));\n console.log();\n console.log(\"Search providers (in priority order):\");\n console.log(styleText(\"yellow\", \" 1. Smithery Exa (best quality):\"));\n console.log(styleText(\"gray\", \" export SMITHERY_API_KEY=your_api_key\"));\n console.log(styleText(\"gray\", \" export SMITHERY_PROFILE=your_profile\"));\n console.log(styleText(\"gray\", \" 2. Google PSE:\"));\n console.log(styleText(\"gray\", \" export GOOGLE_PSE_API_KEY=your_api_key\"));\n console.log(styleText(\"gray\", \" export GOOGLE_PSE_CX=your_search_engine_id\"));\n console.log(styleText(\"gray\", \" 3-6. Serper, Brave, SearXNG, DuckDuckGo (fallbacks)\"));\n console.log();\n console.log(\"Debug mode:\");\n console.log(styleText(\"gray\", \" export DROID_SEARCH_DEBUG=1\"));\n }\n return;\n }\n\n if (\n !isCustom &&\n !skipLogin &&\n !apiBase &&\n !websearch &&\n !statuslineEnabled &&\n !reasoningEffort &&\n !noTelemetry\n ) {\n console.log(styleText(\"yellow\", \"No patch flags specified. Available patches:\"));\n console.log(styleText(\"gray\", \" --is-custom Patch isCustom for custom models\"));\n console.log(\n styleText(\"gray\", \" --skip-login Bypass login by injecting a fake API key\"),\n );\n console.log(\n styleText(\n \"gray\",\n \" --api-base Replace API URL (standalone: max 22 chars; with --websearch: no limit)\",\n ),\n );\n console.log(styleText(\"gray\", \" --websearch Enable local WebSearch proxy\"));\n console.log(styleText(\"gray\", \" --statusline Enable Claude-style statusline\"));\n console.log(\n styleText(\"gray\", \" --reasoning-effort Set reasoning effort level for custom models\"),\n );\n console.log(\n styleText(\"gray\", \" --disable-telemetry Disable telemetry and Sentry error reporting\"),\n );\n console.log(\n styleText(\"gray\", \" --standalone Standalone mode: mock non-LLM Factory APIs\"),\n );\n console.log();\n console.log(\"Usage examples:\");\n console.log(styleText(\"cyan\", \" npx droid-patch --is-custom droid-custom\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --skip-login droid-nologin\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --is-custom --skip-login droid-patched\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --websearch droid-search\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --websearch --standalone droid-local\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --statusline droid-status\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --websearch --statusline droid-search-ui\"));\n console.log(styleText(\"cyan\", \" npx droid-patch --disable-telemetry droid-private\"));\n console.log(\n styleText(\n \"cyan\",\n \" npx droid-patch --websearch --api-base=http://127.0.0.1:20002 my-droid\",\n ),\n );\n process.exit(1);\n }\n\n if (!alias && !dryRun) {\n console.log(styleText(\"red\", \"Error: alias name is required\"));\n console.log(\n styleText(\n \"gray\",\n \"Usage: droid-patch [--is-custom] [--skip-login] [-o <dir>] <alias-name>\",\n ),\n );\n process.exit(1);\n }\n\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log(styleText([\"cyan\", \"bold\"], \" Droid Binary Patcher\"));\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log();\n\n const patches: Patch[] = [];\n if (isCustom) {\n patches.push({\n name: \"isCustom\",\n description: \"Change isCustom:!0 to isCustom:!1\",\n pattern: Buffer.from(\"isCustom:!0\"),\n replacement: Buffer.from(\"isCustom:!1\"),\n });\n }\n\n // Add skip-login patch: replace process.env.FACTORY_API_KEY with a fixed fake key\n // \"process.env.FACTORY_API_KEY\" is 27 chars, we replace with \"fk-droid-patch-skip-00000\" (25 chars + quotes = 27)\n if (skipLogin) {\n patches.push({\n name: \"skipLogin\",\n description: 'Replace process.env.FACTORY_API_KEY with \"fk-droid-patch-skip-00000\"',\n pattern: Buffer.from(\"process.env.FACTORY_API_KEY\"),\n replacement: Buffer.from('\"fk-droid-patch-skip-00000\"'),\n });\n }\n\n // Add api-base patch: replace the Factory API base URL\n // Original: \"https://api.factory.ai\" (22 chars)\n // We need to pad the replacement URL to be exactly 22 chars\n // Note: When --websearch is used, --api-base sets the forward target instead of binary patching\n if (apiBase && !websearch) {\n const originalUrl = \"https://api.factory.ai\";\n const originalLength = originalUrl.length; // 22 chars\n\n // Validate and normalize the URL\n let normalizedUrl = apiBase.replace(/\\/+$/, \"\"); // Remove trailing slashes\n\n if (normalizedUrl.length > originalLength) {\n console.log(\n styleText(\"red\", `Error: API base URL must be ${originalLength} characters or less`),\n );\n console.log(\n styleText(\"gray\", ` Your URL: \"${normalizedUrl}\" (${normalizedUrl.length} chars)`),\n );\n console.log(styleText(\"gray\", ` Maximum: ${originalLength} characters`));\n console.log();\n console.log(styleText(\"yellow\", \"Tip: Use a shorter URL or set up a local redirect.\"));\n console.log(styleText(\"gray\", \" Examples:\"));\n console.log(styleText(\"gray\", \" http://127.0.0.1:3000 (19 chars)\"));\n console.log(styleText(\"gray\", \" http://localhost:80 (19 chars)\"));\n process.exit(1);\n }\n\n // Pad the URL with spaces at the end to match original length\n // Note: trailing spaces in URL are generally ignored\n const paddedUrl = normalizedUrl.padEnd(originalLength, \" \");\n\n patches.push({\n name: \"apiBase\",\n description: `Replace Factory API URL with \"${normalizedUrl}\"`,\n pattern: Buffer.from(originalUrl),\n replacement: Buffer.from(paddedUrl),\n });\n }\n\n // Add reasoning-effort patch: set custom models to use \"high\" reasoning\n // Also modify UI conditions to show reasoning selector for custom models\n if (reasoningEffort) {\n // [\"none\"] is 8 chars, [\"high\"] is 8 chars - perfect match!\n patches.push({\n name: \"reasoningEffortSupported\",\n description: 'Change supportedReasoningEfforts:[\"none\"] to [\"high\"]',\n pattern: Buffer.from('supportedReasoningEfforts:[\"none\"]'),\n replacement: Buffer.from('supportedReasoningEfforts:[\"high\"]'),\n });\n\n // \"none\" is 4 chars, \"high\" is 4 chars - perfect match!\n patches.push({\n name: \"reasoningEffortDefault\",\n description: 'Change defaultReasoningEffort:\"none\" to \"high\"',\n pattern: Buffer.from('defaultReasoningEffort:\"none\"'),\n replacement: Buffer.from('defaultReasoningEffort:\"high\"'),\n });\n\n // Change UI condition from length>1 to length>0\n // This allows custom models with single reasoning option to show the selector\n patches.push({\n name: \"reasoningEffortUIShow\",\n description: \"Change supportedReasoningEfforts.length>1 to length>0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length>1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length>0\"),\n });\n\n // Change UI condition from length<=1 to length<=0\n // This enables the reasoning setting in /settings menu for custom models\n patches.push({\n name: \"reasoningEffortUIEnable\",\n description: \"Change supportedReasoningEfforts.length<=1 to length<=0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length<=1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length<=0\"),\n });\n\n // Bypass reasoning effort validation to allow settings.json override\n // This allows \"xhigh\" in settings.json to work even though default is \"high\"\n // v0.39.0+: T!==\"none\"&&T!==\"off\"&&!W.supportedReasoningEfforts.includes(T)\n // Changed: T!=\"none\"&&T!=\"off\"&&0&&W... - use != (2 chars less) + 0&& (2 chars more) = same length\n // Logic: && 0 && makes entire condition always false, bypassing validation\n patches.push({\n name: \"reasoningEffortValidationBypass\",\n description: \"Bypass reasoning effort validation (allows xhigh in settings.json)\",\n pattern: Buffer.from('T!==\"none\"&&T!==\"off\"&&!W.supportedReasoningEfforts.includes(T)'),\n replacement: Buffer.from('T!=\"none\"&&T!=\"off\"&&0&&W.supportedReasoningEfforts.includes(T)'),\n });\n }\n\n // Add no-telemetry patches: disable telemetry uploads and Sentry error reporting\n // Strategy:\n // 1. Break environment variable names so Sentry is never initialized (Q1() returns false)\n // 2. Invert flushToWeb condition so it returns early without making any fetch request\n if (noTelemetry) {\n // Patch 1: Break Sentry environment variable checks\n // Q1() function checks: VITE_VERCEL_ENV, ENABLE_SENTRY, NEXT_PUBLIC_ENABLE_SENTRY, FACTORY_ENABLE_SENTRY\n // By changing first letter to X, the env vars will never match, so Q1() returns false\n // and Sentry is never initialized\n patches.push({\n name: \"noTelemetrySentryEnv1\",\n description: \"Break ENABLE_SENTRY env var check (E->X)\",\n pattern: Buffer.from(\"ENABLE_SENTRY\"),\n replacement: Buffer.from(\"XNABLE_SENTRY\"),\n });\n\n patches.push({\n name: \"noTelemetrySentryEnv2\",\n description: \"Break VITE_VERCEL_ENV env var check (V->X)\",\n pattern: Buffer.from(\"VITE_VERCEL_ENV\"),\n replacement: Buffer.from(\"XITE_VERCEL_ENV\"),\n });\n\n // Patch 2: Make flushToWeb always return early to prevent ANY fetch request\n // Original: if(this.webEvents.length===0)return; // returns only when empty\n // Changed: if(!0||this.webEvents.length)return; // !0=true, ALWAYS returns\n // Result: Function always exits immediately, no telemetry is ever sent\n patches.push({\n name: \"noTelemetryFlushBlock\",\n description: \"Make flushToWeb always return (!0|| = always true)\",\n pattern: Buffer.from(\"this.webEvents.length===0\"),\n replacement: Buffer.from(\"!0||this.webEvents.length\"),\n });\n }\n\n try {\n const result = await patchDroid({\n inputPath: path,\n outputPath: outputPath,\n patches,\n dryRun,\n backup,\n verbose,\n });\n\n if (dryRun) {\n console.log();\n console.log(styleText(\"blue\", \"═\".repeat(60)));\n console.log(styleText([\"blue\", \"bold\"], \" DRY RUN COMPLETE\"));\n console.log(styleText(\"blue\", \"═\".repeat(60)));\n console.log();\n console.log(styleText(\"gray\", \"To apply the patches, run without --dry-run:\"));\n console.log(styleText(\"cyan\", ` npx droid-patch --is-custom ${alias || \"<alias-name>\"}`));\n process.exit(0);\n }\n\n // If -o is specified, just output the file without creating alias\n if (outputDir && result.success && result.outputPath) {\n console.log();\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log(styleText([\"green\", \"bold\"], \" PATCH SUCCESSFUL\"));\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log();\n console.log(styleText(\"white\", `Patched binary saved to: ${result.outputPath}`));\n process.exit(0);\n }\n\n if (result.success && result.outputPath && alias) {\n console.log();\n\n let execTargetPath = result.outputPath;\n\n if (websearch) {\n const proxyDir = join(homedir(), \".droid-patch\", \"proxy\");\n const { wrapperScript } = await createWebSearchUnifiedFiles(\n proxyDir,\n execTargetPath,\n alias,\n websearchTarget,\n standalone,\n );\n execTargetPath = wrapperScript;\n\n console.log();\n console.log(styleText(\"cyan\", \"WebSearch enabled\"));\n console.log(styleText(\"white\", ` Forward target: ${websearchTarget}`));\n if (standalone) {\n console.log(styleText(\"white\", ` Standalone mode: enabled`));\n }\n }\n\n if (statuslineEnabled) {\n const statuslineDir = join(homedir(), \".droid-patch\", \"statusline\");\n let sessionsScript: string | undefined;\n if (sessions) {\n const result = await createSessionsScript(statuslineDir, alias);\n sessionsScript = result.sessionsScript;\n }\n const { wrapperScript } = await createStatuslineFiles(\n statuslineDir,\n execTargetPath,\n alias,\n sessionsScript,\n );\n execTargetPath = wrapperScript;\n console.log();\n console.log(styleText(\"cyan\", \"Statusline enabled\"));\n }\n\n let aliasResult;\n if (websearch || statuslineEnabled) {\n aliasResult = await createAliasForWrapper(execTargetPath, alias, verbose);\n } else {\n aliasResult = await createAlias(result.outputPath, alias, verbose);\n }\n\n // Save metadata for update command\n const droidVersion = getDroidVersion(path);\n const metadata = createMetadata(\n alias,\n path,\n {\n isCustom: !!isCustom,\n skipLogin: !!skipLogin,\n apiBase: apiBase || null,\n websearch: !!websearch,\n statusline: !!statuslineEnabled,\n sessions: !!sessions,\n reasoningEffort: !!reasoningEffort,\n noTelemetry: !!noTelemetry,\n standalone: !!standalone,\n },\n {\n droidPatchVersion: version,\n droidVersion,\n aliasPath: aliasResult.aliasPath,\n },\n );\n await saveAliasMetadata(metadata);\n }\n\n if (result.success) {\n console.log();\n console.log(styleText(\"green\", \"═\".repeat(60)));\n console.log(styleText([\"green\", \"bold\"], \" PATCH SUCCESSFUL\"));\n console.log(styleText(\"green\", \"═\".repeat(60)));\n }\n\n process.exit(result.success ? 0 : 1);\n } catch (error) {\n console.error(styleText(\"red\", `Error: ${(error as Error).message}`));\n if (verbose) console.error((error as Error).stack);\n process.exit(1);\n }\n })\n .command(\"list\", \"List all droid-patch aliases\")\n .action(async () => {\n await listAliases();\n })\n .command(\"remove\", \"Remove alias(es) by name or filter\")\n .argument(\"[alias-or-path]\", \"Alias name or file path to remove\")\n .option(\"--patch-version <version>\", \"Remove aliases created by this droid-patch version\")\n .option(\"--droid-version <version>\", \"Remove aliases for this droid version\")\n .option(\n \"--flag <flag>\",\n \"Remove aliases with this flag (is-custom, skip-login, websearch, statusline, api-base, reasoning-effort, disable-telemetry, standalone)\",\n )\n .action(async (options, args) => {\n const target = args?.[0] as string | undefined;\n const patchVersion = options[\"patch-version\"] as string | undefined;\n const droidVersion = options[\"droid-version\"] as string | undefined;\n const flag = options.flag as FilterFlag | undefined;\n\n // If filter options are provided, use filter mode\n if (patchVersion || droidVersion || flag) {\n await removeAliasesByFilter({\n patchVersion,\n droidVersion,\n flags: flag ? [flag] : undefined,\n });\n return;\n }\n\n // If no target and no filter, show error\n if (!target) {\n console.error(\n styleText(\n \"red\",\n \"Error: Provide an alias name or use filter options (--patch-version, --droid-version, --flag)\",\n ),\n );\n process.exit(1);\n }\n\n // Check if it's a file path (contains / or .)\n if (target.includes(\"/\") || existsSync(target)) {\n // It's a file path, delete directly\n const { unlink } = await import(\"node:fs/promises\");\n try {\n await unlink(target);\n console.log(styleText(\"green\", `[*] Removed: ${target}`));\n } catch (error) {\n console.error(styleText(\"red\", `Error: ${(error as Error).message}`));\n process.exit(1);\n }\n } else {\n // It's an alias name\n await removeAlias(target);\n }\n })\n .command(\"version\", \"Print droid-patch version\")\n .action(() => {\n console.log(`droid-patch v${version}`);\n })\n .command(\"clear\", \"Remove all droid-patch aliases and related files\")\n .action(async () => {\n await clearAllAliases();\n })\n .command(\"update\", \"Update aliases with latest droid binary\")\n .argument(\"[alias]\", \"Specific alias to update (optional, updates all if not specified)\")\n .option(\"--dry-run\", \"Preview without making changes\")\n .option(\"-p, --path <path>\", \"Path to new droid binary\")\n .option(\"-v, --verbose\", \"Enable verbose output\")\n .action(async (options, args) => {\n const aliasName = args?.[0] as string | undefined;\n const dryRun = options[\"dry-run\"] as boolean;\n const newBinaryPath = (options.path as string) || findDefaultDroidPath();\n const verbose = options.verbose as boolean;\n\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log(styleText([\"cyan\", \"bold\"], \" Droid-Patch Update\"));\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n console.log();\n\n // Verify the new binary exists\n if (!existsSync(newBinaryPath)) {\n console.log(styleText(\"red\", `Error: Droid binary not found at ${newBinaryPath}`));\n console.log(styleText(\"gray\", \"Use -p to specify a different path\"));\n process.exit(1);\n }\n\n // Get aliases to update\n let metaList: Awaited<ReturnType<typeof loadAliasMetadata>>[];\n if (aliasName) {\n const meta = await loadAliasMetadata(aliasName);\n if (!meta) {\n console.log(styleText(\"red\", `Error: No metadata found for alias \"${aliasName}\"`));\n console.log(\n styleText(\"gray\", \"This alias may have been created before update tracking was added.\"),\n );\n console.log(styleText(\"gray\", \"Remove and recreate the alias to enable update support.\"));\n process.exit(1);\n }\n metaList = [meta];\n } else {\n metaList = await listAllMetadata();\n if (metaList.length === 0) {\n console.log(styleText(\"yellow\", \"No aliases with metadata found.\"));\n console.log(styleText(\"gray\", \"Create aliases with droid-patch to enable update support.\"));\n process.exit(0);\n }\n }\n\n console.log(styleText(\"white\", `Using droid binary: ${newBinaryPath}`));\n console.log(styleText(\"white\", `Found ${metaList.length} alias(es) to update`));\n if (dryRun) {\n console.log(styleText(\"blue\", \"(DRY RUN - no changes will be made)\"));\n }\n console.log();\n\n let successCount = 0;\n let failCount = 0;\n\n for (const meta of metaList) {\n if (!meta) continue;\n\n console.log(styleText(\"cyan\", `─`.repeat(40)));\n console.log(styleText(\"white\", `Updating: ${styleText([\"cyan\", \"bold\"], meta.name)}`));\n console.log(styleText(\"gray\", ` Patches: ${formatPatches(meta.patches)}`));\n\n if (dryRun) {\n console.log(styleText(\"blue\", ` [DRY RUN] Would re-apply patches`));\n successCount++;\n continue;\n }\n\n try {\n // Build patch list based on metadata\n const patches: Patch[] = [];\n\n if (meta.patches.isCustom) {\n patches.push({\n name: \"isCustom\",\n description: \"Change isCustom:!0 to isCustom:!1\",\n pattern: Buffer.from(\"isCustom:!0\"),\n replacement: Buffer.from(\"isCustom:!1\"),\n });\n }\n\n if (meta.patches.skipLogin) {\n patches.push({\n name: \"skipLogin\",\n description: \"Replace process.env.FACTORY_API_KEY with fake key\",\n pattern: Buffer.from(\"process.env.FACTORY_API_KEY\"),\n replacement: Buffer.from('\"fk-droid-patch-skip-00000\"'),\n });\n }\n\n // Only apply apiBase binary patch when NOT using websearch\n // When websearch is enabled, apiBase is used as forward target, not binary patch\n if (meta.patches.apiBase && !meta.patches.websearch) {\n const originalUrl = \"https://api.factory.ai\";\n const paddedUrl = meta.patches.apiBase.padEnd(originalUrl.length, \" \");\n patches.push({\n name: \"apiBase\",\n description: `Replace Factory API URL with \"${meta.patches.apiBase}\"`,\n pattern: Buffer.from(originalUrl),\n replacement: Buffer.from(paddedUrl),\n });\n }\n\n if (meta.patches.reasoningEffort) {\n patches.push({\n name: \"reasoningEffortSupported\",\n description: 'Change supportedReasoningEfforts:[\"none\"] to [\"high\"]',\n pattern: Buffer.from('supportedReasoningEfforts:[\"none\"]'),\n replacement: Buffer.from('supportedReasoningEfforts:[\"high\"]'),\n });\n patches.push({\n name: \"reasoningEffortDefault\",\n description: 'Change defaultReasoningEffort:\"none\" to \"high\"',\n pattern: Buffer.from('defaultReasoningEffort:\"none\"'),\n replacement: Buffer.from('defaultReasoningEffort:\"high\"'),\n });\n patches.push({\n name: \"reasoningEffortUIShow\",\n description: \"Change supportedReasoningEfforts.length>1 to length>0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length>1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length>0\"),\n });\n patches.push({\n name: \"reasoningEffortUIEnable\",\n description: \"Change supportedReasoningEfforts.length<=1 to length<=0\",\n pattern: Buffer.from(\"supportedReasoningEfforts.length<=1\"),\n replacement: Buffer.from(\"supportedReasoningEfforts.length<=0\"),\n });\n patches.push({\n name: \"reasoningEffortValidationBypass\",\n description: \"Bypass reasoning effort validation (allows xhigh in settings.json)\",\n pattern: Buffer.from('T!==\"none\"&&T!==\"off\"&&!W.supportedReasoningEfforts.includes(T)'),\n replacement: Buffer.from('T!=\"none\"&&T!=\"off\"&&0&&W.supportedReasoningEfforts.includes(T)'),\n });\n }\n\n if (meta.patches.noTelemetry) {\n patches.push({\n name: \"noTelemetrySentryEnv1\",\n description: \"Break ENABLE_SENTRY env var check (E->X)\",\n pattern: Buffer.from(\"ENABLE_SENTRY\"),\n replacement: Buffer.from(\"XNABLE_SENTRY\"),\n });\n patches.push({\n name: \"noTelemetrySentryEnv2\",\n description: \"Break VITE_VERCEL_ENV env var check (V->X)\",\n pattern: Buffer.from(\"VITE_VERCEL_ENV\"),\n replacement: Buffer.from(\"XITE_VERCEL_ENV\"),\n });\n patches.push({\n name: \"noTelemetryFlushBlock\",\n description: \"Make flushToWeb always return (!0|| = always true)\",\n pattern: Buffer.from(\"this.webEvents.length===0\"),\n replacement: Buffer.from(\"!0||this.webEvents.length\"),\n });\n }\n\n // Determine output path based on whether this is a websearch alias\n const binsDir = join(homedir(), \".droid-patch\", \"bins\");\n const outputPath = join(binsDir, `${meta.name}-patched`);\n\n // Apply patches (only if there are binary patches to apply)\n if (patches.length > 0) {\n const result = await patchDroid({\n inputPath: newBinaryPath,\n outputPath,\n patches,\n dryRun: false,\n backup: false,\n verbose,\n });\n\n if (!result.success) {\n console.log(styleText(\"red\", ` ✗ Failed to apply patches`));\n failCount++;\n continue;\n }\n\n // Re-sign on macOS\n if (process.platform === \"darwin\") {\n try {\n const { execSync } = await import(\"node:child_process\");\n execSync(`codesign --force --deep --sign - \"${outputPath}\"`, {\n stdio: \"pipe\",\n });\n if (verbose) {\n console.log(styleText(\"gray\", ` Re-signed binary`));\n }\n } catch {\n console.log(styleText(\"yellow\", ` [!] Could not re-sign binary`));\n }\n }\n }\n\n let execTargetPath = patches.length > 0 ? outputPath : newBinaryPath;\n\n // If websearch is enabled, regenerate wrapper files\n // Support both new 'websearch' field and old 'proxy' field for backward compatibility\n const hasWebsearch = meta.patches.websearch || !!meta.patches.proxy;\n if (hasWebsearch) {\n // Determine forward target: apiBase > proxy (legacy) > default\n const forwardTarget =\n meta.patches.apiBase || meta.patches.proxy || \"https://api.factory.ai\";\n const proxyDir = join(homedir(), \".droid-patch\", \"proxy\");\n const { wrapperScript } = await createWebSearchUnifiedFiles(\n proxyDir,\n execTargetPath,\n meta.name,\n forwardTarget,\n meta.patches.standalone || false,\n );\n execTargetPath = wrapperScript;\n if (verbose) {\n console.log(styleText(\"gray\", ` Regenerated websearch wrapper`));\n if (meta.patches.standalone) {\n console.log(styleText(\"gray\", ` Standalone mode: enabled`));\n }\n }\n // Migrate old proxy field to new websearch field\n if (meta.patches.proxy && !meta.patches.websearch) {\n meta.patches.websearch = true;\n meta.patches.apiBase = meta.patches.proxy;\n delete meta.patches.proxy;\n }\n }\n\n if (meta.patches.statusline) {\n const statuslineDir = join(homedir(), \".droid-patch\", \"statusline\");\n let sessionsScript: string | undefined;\n if (meta.patches.sessions) {\n const result = await createSessionsScript(statuslineDir, meta.name);\n sessionsScript = result.sessionsScript;\n }\n const { wrapperScript } = await createStatuslineFiles(\n statuslineDir,\n execTargetPath,\n meta.name,\n sessionsScript,\n );\n execTargetPath = wrapperScript;\n if (verbose) {\n console.log(styleText(\"gray\", ` Regenerated statusline wrapper`));\n }\n }\n\n // Update symlink - find existing or use stored aliasPath\n const { symlink, unlink, readlink, lstat } = await import(\"node:fs/promises\");\n let aliasPath = meta.aliasPath;\n\n // If aliasPath not stored (old version), try to find existing symlink\n if (!aliasPath) {\n const commonPathDirs = [\n join(homedir(), \".local/bin\"),\n join(homedir(), \"bin\"),\n join(homedir(), \".bin\"),\n \"/opt/homebrew/bin\",\n \"/usr/local/bin\",\n join(homedir(), \".droid-patch\", \"aliases\"),\n ];\n\n for (const dir of commonPathDirs) {\n const possiblePath = join(dir, meta.name);\n if (existsSync(possiblePath)) {\n try {\n const stats = await lstat(possiblePath);\n if (stats.isSymbolicLink()) {\n const target = await readlink(possiblePath);\n if (\n target.includes(\".droid-patch/bins\") ||\n target.includes(\".droid-patch/proxy\") ||\n target.includes(\".droid-patch/statusline\")\n ) {\n aliasPath = possiblePath;\n if (verbose) {\n console.log(styleText(\"gray\", ` Found existing symlink: ${aliasPath}`));\n }\n break;\n }\n }\n } catch {\n // Ignore errors, continue searching\n }\n }\n }\n }\n\n // Update symlink if we have a path\n if (aliasPath) {\n try {\n if (existsSync(aliasPath)) {\n const currentTarget = await readlink(aliasPath);\n if (currentTarget !== execTargetPath) {\n await unlink(aliasPath);\n await symlink(execTargetPath, aliasPath);\n if (verbose) {\n console.log(styleText(\"gray\", ` Updated symlink: ${aliasPath}`));\n }\n }\n } else {\n // Symlink doesn't exist, recreate it\n await symlink(execTargetPath, aliasPath);\n if (verbose) {\n console.log(styleText(\"gray\", ` Recreated symlink: ${aliasPath}`));\n }\n }\n // Store aliasPath in metadata for future updates\n meta.aliasPath = aliasPath;\n } catch (symlinkError) {\n console.log(\n styleText(\n \"yellow\",\n ` [!] Could not update symlink: ${(symlinkError as Error).message}`,\n ),\n );\n }\n }\n\n // Update metadata\n meta.updatedAt = new Date().toISOString();\n meta.originalBinaryPath = newBinaryPath;\n meta.droidVersion = getDroidVersion(newBinaryPath);\n meta.droidPatchVersion = version;\n await saveAliasMetadata(meta);\n\n console.log(styleText(\"green\", ` ✓ Updated successfully`));\n successCount++;\n } catch (error) {\n console.log(styleText(\"red\", ` ✗ Error: ${(error as Error).message}`));\n if (verbose) {\n console.error((error as Error).stack);\n }\n failCount++;\n }\n }\n\n console.log();\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n if (dryRun) {\n console.log(styleText([\"blue\", \"bold\"], \" DRY RUN COMPLETE\"));\n console.log(styleText(\"gray\", ` Would update ${successCount} alias(es)`));\n } else if (failCount === 0) {\n console.log(styleText([\"green\", \"bold\"], \" UPDATE COMPLETE\"));\n console.log(styleText(\"gray\", ` Updated ${successCount} alias(es)`));\n } else {\n console.log(styleText([\"yellow\", \"bold\"], \" UPDATE FINISHED WITH ERRORS\"));\n console.log(styleText(\"gray\", ` Success: ${successCount}, Failed: ${failCount}`));\n }\n console.log(styleText(\"cyan\", \"═\".repeat(60)));\n })\n .run()\n .catch((err: Error) => {\n console.error(err);\n process.exit(1);\n });\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAm9BA,SAAS,0BAA0B,gBAAwB,0BAAkC;AAC3F,QAAO;;;;;;;;;;;;uBAYc,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwerC,SAAS,uBACP,WACA,iBACA,aAAsB,OACd;CACR,MAAM,gBAAgB,aAAa,uBAAuB;AAC1D,QAAO;;;;;gBAKO,gBAAgB;aACnB,UAAU;;;cAGT,aAAa,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IA4DjC,cAAc;;IAEd,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyDlB,eAAsB,4BACpB,WACA,WACA,WACA,SACA,aAAsB,OACqC;AAC3D,KAAI,CAAC,WAAW,UAAU,CACxB,OAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;CAG7C,MAAM,kBAAkB,KAAK,WAAW,GAAG,UAAU,WAAW;CAChE,MAAM,oBAAoB,KAAK,WAAW,UAAU;AAIpD,OAAM,UAAU,iBAAiB,0BADX,WAAW,yBACwC,CAAC;AAC1E,SAAQ,IAAI,6BAA6B,kBAAkB;AAG3D,OAAM,UACJ,mBACA,uBAAuB,WAAW,iBAAiB,WAAW,CAC/D;AACD,OAAM,MAAM,mBAAmB,IAAM;AACrC,SAAQ,IAAI,wBAAwB,oBAAoB;AAExD,KAAI,WACF,SAAQ,IAAI,8BAA8B;AAG5C,QAAO;EACL,eAAe;EACf,eAAe;EAChB;;;;;AC5mDH,SAAS,kCAA0C;AAEjD,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwiCT,SAAS,gCACP,gBACA,mBACA,oBACQ;AACR,QAAO,mCAAmC,gBAAgB,mBAAmB,mBAAmB;;AAGlG,SAAS,mCACP,gBACA,mBACA,oBACQ;AAQR,QAAO;;;;sBAPgB,KAAK,UAAU,eAAe,CAWlB;6BAVT,KAAK,UAAU,kBAAkB,CAWd;0BAVlB,qBAAqB,KAAK,UAAU,mBAAmB,GAAG,OAW1C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAo6B7C,eAAsB,sBACpB,WACA,gBACA,WACA,oBAC2D;AAC3D,KAAI,CAAC,WAAW,UAAU,CACxB,OAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;CAG7C,MAAM,oBAAoB,KAAK,WAAW,GAAG,UAAU,gBAAgB;CACvE,MAAM,oBAAoB,KAAK,WAAW,UAAU;AAEpD,OAAM,UAAU,mBAAmB,iCAAiC,CAAC;AACrE,OAAM,MAAM,mBAAmB,IAAM;AAQrC,OAAM,UAAU,mBANA,gCACd,gBACA,mBACA,mBACD,CAE0C;AAC3C,OAAM,MAAM,mBAAmB,IAAM;AAErC,QAAO;EAAE,eAAe;EAAmB,eAAe;EAAmB;;;;;;;;AC9/D/E,SAAS,8BAA8B,WAA2B;AAEhE,QAAO;;;;;;;;;;;qBADW,KAAK,UAAU,UAAU,CAYd;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwQ/B,eAAsB,qBACpB,WACA,WACqC;AACrC,KAAI,CAAC,WAAW,UAAU,CACxB,OAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;CAG7C,MAAM,qBAAqB,KAAK,WAAW,GAAG,UAAU,cAAc;AAEtE,OAAM,UAAU,oBAAoB,8BAA8B,UAAU,CAAC;AAC7E,OAAM,MAAM,oBAAoB,IAAM;AAEtC,QAAO,EAAE,gBAAgB,oBAAoB;;;;;AC7Q/C,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAEzD,SAAS,aAAqB;AAC5B,KAAI;EACF,MAAM,UAAU,KAAK,WAAW,MAAM,eAAe;AAErD,SADY,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC,CAC3C,WAAW;SAChB;AACN,SAAO;;;AAIX,MAAM,UAAU,YAAY;AAE5B,SAAS,gBAAgB,WAAuC;AAC9D,KAAI;EACF,MAAM,SAAS,SAAS,IAAI,UAAU,cAAc;GAClD,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAC/B,SAAS;GACV,CAAC,CAAC,MAAM;EAET,MAAM,QAAQ,OAAO,MAAM,kBAAkB;AAC7C,SAAO,QAAQ,MAAM,KAAK,UAAU;SAC9B;AACN;;;AAIJ,SAAS,uBAA+B;CACtC,MAAM,OAAO,SAAS;AAGtB,KAAI;EACF,MAAM,SAAS,SAAS,eAAe;GACrC,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAChC,CAAC,CAAC,MAAM;AACT,MAAI,UAAU,WAAW,OAAO,CAC9B,QAAO;SAEH;CAKR,MAAM,QAAQ;EAEZ,KAAK,MAAM,UAAU,OAAO,QAAQ;EAEpC;EAEA;EAEA;EAEA;EACD;AAED,MAAK,MAAM,KAAK,MACd,KAAI,WAAW,EAAE,CAAE,QAAO;AAI5B,QAAO,KAAK,MAAM,UAAU,OAAO,QAAQ;;AAG7C,IAAI,eAAe,4DAA4D,CAC5E,QAAQ,eAAe,QAAQ,CAC/B,OACC,eACA,kFACD,CACA,OACC,gBACA,iFACD,CACA,OACC,oBACA,6GACD,CACA,OACC,eACA,oFACD,CACA,OAAO,gBAAgB,iDAAiD,CACxE,OAAO,cAAc,qDAAqD,CAC1E,OAAO,gBAAgB,oEAAoE,CAC3F,OACC,sBACA,8EACD,CACA,OACC,uBACA,oEACD,CACA,OAAO,aAAa,uDAAuD,CAC3E,OAAO,qBAAqB,2BAA2B,CACvD,OAAO,sBAAsB,sCAAsC,CACnE,OAAO,eAAe,0CAA0C,CAChE,OAAO,iBAAiB,wBAAwB,CAChD,SAAS,WAAW,oCAAoC,CACxD,OAAO,OAAO,SAAS,SAAS;CAC/B,MAAM,QAAQ,OAAO;CACrB,MAAM,WAAW,QAAQ;CACzB,MAAM,YAAY,QAAQ;CAC1B,MAAM,UAAU,QAAQ;CACxB,MAAM,YAAY,QAAQ;CAC1B,MAAM,aAAa,QAAQ;CAC3B,MAAM,WAAW,QAAQ;CACzB,MAAM,aAAa,QAAQ;CAG3B,MAAM,kBAAkB,YAAY,WAAW,2BAA2B;CAC1E,MAAM,kBAAkB,QAAQ;CAChC,MAAM,cAAc,QAAQ;CAC5B,MAAM,SAAS,QAAQ;CACvB,MAAM,OAAQ,QAAQ,QAAmB,sBAAsB;CAC/D,MAAM,YAAY,QAAQ;CAC1B,MAAM,SAAS,QAAQ,WAAW;CAClC,MAAM,UAAU,QAAQ;CAGxB,MAAM,aAAa,aAAa,QAAQ,KAAK,WAAW,MAAM,GAAG;CAEjE,MAAM,mBACJ,CAAC,CAAC,YAAY,CAAC,CAAC,aAAa,CAAC,CAAC,mBAAmB,CAAC,CAAC,eAAgB,CAAC,CAAC,WAAW,CAAC;CAEpF,MAAM,oBAAoB;AAM1B,KAAI,CAAC,qBAAqB,aAAa,oBAAoB;AACzD,MAAI,CAAC,OAAO;AACV,WAAQ,IAAI,UAAU,OAAO,0DAA0D,CAAC;AACxF,WAAQ,IAAI,UAAU,QAAQ,6CAA6C,CAAC;AAC5E,WAAQ,IAAI,UAAU,QAAQ,8CAA8C,CAAC;AAC7E,WAAQ,KAAK,EAAE;;AAGjB,UAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,UAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,wBAAwB,CAAC;AACjE,UAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,UAAQ,KAAK;AACb,MAAI,WAAW;AACb,WAAQ,IAAI,UAAU,SAAS,qBAAqB,CAAC;AACrD,WAAQ,IAAI,UAAU,SAAS,mBAAmB,kBAAkB,CAAC;AACrE,OAAI,WACF,SAAQ,IAAI,UAAU,SAAS,2BAA2B,CAAC;;AAG/D,MAAI,kBACF,SAAQ,IAAI,UAAU,SAAS,sBAAsB,CAAC;AAExD,UAAQ,KAAK;EAEb,IAAI,iBAAiB;AACrB,MAAI,WAAW;GAGb,MAAM,EAAE,kBAAkB,MAAM,4BADf,KAAK,SAAS,EAAE,gBAAgB,QAAQ,EAGvD,gBACA,OACA,iBACA,WACD;AACD,oBAAiB;;AAGnB,MAAI,mBAAmB;GACrB,MAAM,gBAAgB,KAAK,SAAS,EAAE,gBAAgB,aAAa;GAEnE,IAAIA;AACJ,OAAI,SAEF,mBADe,MAAM,qBAAqB,eAAe,MAAM,EACvC;GAE1B,MAAM,EAAE,kBAAkB,MAAM,sBAC9B,eACA,gBACA,OACA,eACD;AACD,oBAAiB;;EAInB,MAAM,cAAc,MAAM,sBAAsB,gBAAgB,OAAO,QAAQ;EAG/E,MAAM,eAAe,gBAAgB,KAAK;AAqB1C,QAAM,kBApBW,eACf,OACA,MACA;GACE,UAAU;GACV,WAAW;GACX,SAAS,WAAW;GACpB,WAAW,CAAC,CAAC;GACb,YAAY,CAAC,CAAC;GACd,UAAU,CAAC,CAAC;GACZ,iBAAiB;GACjB,aAAa;GACD;GACb,EACD;GACE,mBAAmB;GACnB;GACA,WAAW,YAAY;GACxB,CACF,CACgC;AAEjC,UAAQ,KAAK;AACb,UAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,UAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,mBAAmB,CAAC;AAC7D,UAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,UAAQ,KAAK;AACb,UAAQ,IAAI,gBAAgB;AAC5B,UAAQ,IAAI,UAAU,UAAU,KAAK,QAAQ,CAAC;AAC9C,UAAQ,KAAK;AACb,MAAI,WAAW;AACb,WAAQ,IAAI,UAAU,QAAQ,iBAAiB,CAAC;AAChD,WAAQ,IACN,UAAU,QAAQ,sEAAsE,CACzF;AACD,WAAQ,IAAI,UAAU,QAAQ,kDAAkD,CAAC;AACjF,WAAQ,KAAK;AACb,WAAQ,IAAI,wCAAwC;AACpD,WAAQ,IAAI,UAAU,UAAU,oCAAoC,CAAC;AACrE,WAAQ,IAAI,UAAU,QAAQ,4CAA4C,CAAC;AAC3E,WAAQ,IAAI,UAAU,QAAQ,4CAA4C,CAAC;AAC3E,WAAQ,IAAI,UAAU,QAAQ,mBAAmB,CAAC;AAClD,WAAQ,IAAI,UAAU,QAAQ,8CAA8C,CAAC;AAC7E,WAAQ,IAAI,UAAU,QAAQ,kDAAkD,CAAC;AACjF,WAAQ,IAAI,UAAU,QAAQ,wDAAwD,CAAC;AACvF,WAAQ,KAAK;AACb,WAAQ,IAAI,cAAc;AAC1B,WAAQ,IAAI,UAAU,QAAQ,gCAAgC,CAAC;;AAEjE;;AAGF,KACE,CAAC,YACD,CAAC,aACD,CAAC,WACD,CAAC,aACD,CAAC,qBACD,CAAC,mBACD,CAAC,aACD;AACA,UAAQ,IAAI,UAAU,UAAU,+CAA+C,CAAC;AAChF,UAAQ,IAAI,UAAU,QAAQ,yDAAyD,CAAC;AACxF,UAAQ,IACN,UAAU,QAAQ,iEAAiE,CACpF;AACD,UAAQ,IACN,UACE,QACA,+FACD,CACF;AACD,UAAQ,IAAI,UAAU,QAAQ,qDAAqD,CAAC;AACpF,UAAQ,IAAI,UAAU,QAAQ,uDAAuD,CAAC;AACtF,UAAQ,IACN,UAAU,QAAQ,qEAAqE,CACxF;AACD,UAAQ,IACN,UAAU,QAAQ,qEAAqE,CACxF;AACD,UAAQ,IACN,UAAU,QAAQ,mEAAmE,CACtF;AACD,UAAQ,KAAK;AACb,UAAQ,IAAI,kBAAkB;AAC9B,UAAQ,IAAI,UAAU,QAAQ,6CAA6C,CAAC;AAC5E,UAAQ,IAAI,UAAU,QAAQ,+CAA+C,CAAC;AAC9E,UAAQ,IAAI,UAAU,QAAQ,2DAA2D,CAAC;AAC1F,UAAQ,IAAI,UAAU,QAAQ,6CAA6C,CAAC;AAC5E,UAAQ,IAAI,UAAU,QAAQ,yDAAyD,CAAC;AACxF,UAAQ,IAAI,UAAU,QAAQ,8CAA8C,CAAC;AAC7E,UAAQ,IAAI,UAAU,QAAQ,6DAA6D,CAAC;AAC5F,UAAQ,IAAI,UAAU,QAAQ,sDAAsD,CAAC;AACrF,UAAQ,IACN,UACE,QACA,2EACD,CACF;AACD,UAAQ,KAAK,EAAE;;AAGjB,KAAI,CAAC,SAAS,CAAC,QAAQ;AACrB,UAAQ,IAAI,UAAU,OAAO,gCAAgC,CAAC;AAC9D,UAAQ,IACN,UACE,QACA,0EACD,CACF;AACD,UAAQ,KAAK,EAAE;;AAGjB,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,yBAAyB,CAAC;AAClE,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,KAAK;CAEb,MAAMC,UAAmB,EAAE;AAC3B,KAAI,SACF,SAAQ,KAAK;EACX,MAAM;EACN,aAAa;EACb,SAAS,OAAO,KAAK,cAAc;EACnC,aAAa,OAAO,KAAK,cAAc;EACxC,CAAC;AAKJ,KAAI,UACF,SAAQ,KAAK;EACX,MAAM;EACN,aAAa;EACb,SAAS,OAAO,KAAK,8BAA8B;EACnD,aAAa,OAAO,KAAK,gCAA8B;EACxD,CAAC;AAOJ,KAAI,WAAW,CAAC,WAAW;EACzB,MAAM,cAAc;EACpB,MAAM,iBAAiB;EAGvB,IAAI,gBAAgB,QAAQ,QAAQ,QAAQ,GAAG;AAE/C,MAAI,cAAc,SAAS,gBAAgB;AACzC,WAAQ,IACN,UAAU,OAAO,+BAA+B,eAAe,qBAAqB,CACrF;AACD,WAAQ,IACN,UAAU,QAAQ,gBAAgB,cAAc,KAAK,cAAc,OAAO,SAAS,CACpF;AACD,WAAQ,IAAI,UAAU,QAAQ,eAAe,eAAe,aAAa,CAAC;AAC1E,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,UAAU,qDAAqD,CAAC;AACtF,WAAQ,IAAI,UAAU,QAAQ,cAAc,CAAC;AAC7C,WAAQ,IAAI,UAAU,QAAQ,uCAAuC,CAAC;AACtE,WAAQ,IAAI,UAAU,QAAQ,sCAAsC,CAAC;AACrE,WAAQ,KAAK,EAAE;;EAKjB,MAAM,YAAY,cAAc,OAAO,gBAAgB,IAAI;AAE3D,UAAQ,KAAK;GACX,MAAM;GACN,aAAa,iCAAiC,cAAc;GAC5D,SAAS,OAAO,KAAK,YAAY;GACjC,aAAa,OAAO,KAAK,UAAU;GACpC,CAAC;;AAKJ,KAAI,iBAAiB;AAEnB,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,uCAAqC;GAC1D,aAAa,OAAO,KAAK,uCAAqC;GAC/D,CAAC;AAGF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,kCAAgC;GACrD,aAAa,OAAO,KAAK,kCAAgC;GAC1D,CAAC;AAIF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,qCAAqC;GAC1D,aAAa,OAAO,KAAK,qCAAqC;GAC/D,CAAC;AAIF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,sCAAsC;GAC3D,aAAa,OAAO,KAAK,sCAAsC;GAChE,CAAC;AAOF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,sEAAkE;GACvF,aAAa,OAAO,KAAK,sEAAkE;GAC5F,CAAC;;AAOJ,KAAI,aAAa;AAKf,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,gBAAgB;GACrC,aAAa,OAAO,KAAK,gBAAgB;GAC1C,CAAC;AAEF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,kBAAkB;GACvC,aAAa,OAAO,KAAK,kBAAkB;GAC5C,CAAC;AAMF,UAAQ,KAAK;GACX,MAAM;GACN,aAAa;GACb,SAAS,OAAO,KAAK,4BAA4B;GACjD,aAAa,OAAO,KAAK,4BAA4B;GACtD,CAAC;;AAGJ,KAAI;EACF,MAAM,SAAS,MAAM,WAAW;GAC9B,WAAW;GACC;GACZ;GACA;GACA;GACA;GACD,CAAC;AAEF,MAAI,QAAQ;AACV,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,WAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,qBAAqB,CAAC;AAC9D,WAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,QAAQ,+CAA+C,CAAC;AAC9E,WAAQ,IAAI,UAAU,QAAQ,iCAAiC,SAAS,iBAAiB,CAAC;AAC1F,WAAQ,KAAK,EAAE;;AAIjB,MAAI,aAAa,OAAO,WAAW,OAAO,YAAY;AACpD,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,WAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,qBAAqB,CAAC;AAC/D,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,SAAS,4BAA4B,OAAO,aAAa,CAAC;AAChF,WAAQ,KAAK,EAAE;;AAGjB,MAAI,OAAO,WAAW,OAAO,cAAc,OAAO;AAChD,WAAQ,KAAK;GAEb,IAAI,iBAAiB,OAAO;AAE5B,OAAI,WAAW;IAEb,MAAM,EAAE,kBAAkB,MAAM,4BADf,KAAK,SAAS,EAAE,gBAAgB,QAAQ,EAGvD,gBACA,OACA,iBACA,WACD;AACD,qBAAiB;AAEjB,YAAQ,KAAK;AACb,YAAQ,IAAI,UAAU,QAAQ,oBAAoB,CAAC;AACnD,YAAQ,IAAI,UAAU,SAAS,qBAAqB,kBAAkB,CAAC;AACvE,QAAI,WACF,SAAQ,IAAI,UAAU,SAAS,6BAA6B,CAAC;;AAIjE,OAAI,mBAAmB;IACrB,MAAM,gBAAgB,KAAK,SAAS,EAAE,gBAAgB,aAAa;IACnE,IAAID;AACJ,QAAI,SAEF,mBADe,MAAM,qBAAqB,eAAe,MAAM,EACvC;IAE1B,MAAM,EAAE,kBAAkB,MAAM,sBAC9B,eACA,gBACA,OACA,eACD;AACD,qBAAiB;AACjB,YAAQ,KAAK;AACb,YAAQ,IAAI,UAAU,QAAQ,qBAAqB,CAAC;;GAGtD,IAAI;AACJ,OAAI,aAAa,kBACf,eAAc,MAAM,sBAAsB,gBAAgB,OAAO,QAAQ;OAEzE,eAAc,MAAM,YAAY,OAAO,YAAY,OAAO,QAAQ;GAIpE,MAAM,eAAe,gBAAgB,KAAK;AAqB1C,SAAM,kBApBW,eACf,OACA,MACA;IACE,UAAU,CAAC,CAAC;IACZ,WAAW,CAAC,CAAC;IACb,SAAS,WAAW;IACpB,WAAW,CAAC,CAAC;IACb,YAAY,CAAC,CAAC;IACd,UAAU,CAAC,CAAC;IACZ,iBAAiB,CAAC,CAAC;IACnB,aAAa,CAAC,CAAC;IACf,YAAY,CAAC,CAAC;IACf,EACD;IACE,mBAAmB;IACnB;IACA,WAAW,YAAY;IACxB,CACF,CACgC;;AAGnC,MAAI,OAAO,SAAS;AAClB,WAAQ,KAAK;AACb,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;AAC/C,WAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,qBAAqB,CAAC;AAC/D,WAAQ,IAAI,UAAU,SAAS,IAAI,OAAO,GAAG,CAAC,CAAC;;AAGjD,UAAQ,KAAK,OAAO,UAAU,IAAI,EAAE;UAC7B,OAAO;AACd,UAAQ,MAAM,UAAU,OAAO,UAAW,MAAgB,UAAU,CAAC;AACrE,MAAI,QAAS,SAAQ,MAAO,MAAgB,MAAM;AAClD,UAAQ,KAAK,EAAE;;EAEjB,CACD,QAAQ,QAAQ,+BAA+B,CAC/C,OAAO,YAAY;AAClB,OAAM,aAAa;EACnB,CACD,QAAQ,UAAU,qCAAqC,CACvD,SAAS,mBAAmB,oCAAoC,CAChE,OAAO,6BAA6B,qDAAqD,CACzF,OAAO,6BAA6B,wCAAwC,CAC5E,OACC,iBACA,0IACD,CACA,OAAO,OAAO,SAAS,SAAS;CAC/B,MAAM,SAAS,OAAO;CACtB,MAAM,eAAe,QAAQ;CAC7B,MAAM,eAAe,QAAQ;CAC7B,MAAM,OAAO,QAAQ;AAGrB,KAAI,gBAAgB,gBAAgB,MAAM;AACxC,QAAM,sBAAsB;GAC1B;GACA;GACA,OAAO,OAAO,CAAC,KAAK,GAAG;GACxB,CAAC;AACF;;AAIF,KAAI,CAAC,QAAQ;AACX,UAAQ,MACN,UACE,OACA,gGACD,CACF;AACD,UAAQ,KAAK,EAAE;;AAIjB,KAAI,OAAO,SAAS,IAAI,IAAI,WAAW,OAAO,EAAE;EAE9C,MAAM,EAAE,qBAAW,MAAM,OAAO;AAChC,MAAI;AACF,SAAME,SAAO,OAAO;AACpB,WAAQ,IAAI,UAAU,SAAS,gBAAgB,SAAS,CAAC;WAClD,OAAO;AACd,WAAQ,MAAM,UAAU,OAAO,UAAW,MAAgB,UAAU,CAAC;AACrE,WAAQ,KAAK,EAAE;;OAIjB,OAAM,YAAY,OAAO;EAE3B,CACD,QAAQ,WAAW,4BAA4B,CAC/C,aAAa;AACZ,SAAQ,IAAI,gBAAgB,UAAU;EACtC,CACD,QAAQ,SAAS,mDAAmD,CACpE,OAAO,YAAY;AAClB,OAAM,iBAAiB;EACvB,CACD,QAAQ,UAAU,0CAA0C,CAC5D,SAAS,WAAW,oEAAoE,CACxF,OAAO,aAAa,iCAAiC,CACrD,OAAO,qBAAqB,2BAA2B,CACvD,OAAO,iBAAiB,wBAAwB,CAChD,OAAO,OAAO,SAAS,SAAS;CAC/B,MAAM,YAAY,OAAO;CACzB,MAAM,SAAS,QAAQ;CACvB,MAAM,gBAAiB,QAAQ,QAAmB,sBAAsB;CACxE,MAAM,UAAU,QAAQ;AAExB,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,uBAAuB,CAAC;AAChE,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,SAAQ,KAAK;AAGb,KAAI,CAAC,WAAW,cAAc,EAAE;AAC9B,UAAQ,IAAI,UAAU,OAAO,oCAAoC,gBAAgB,CAAC;AAClF,UAAQ,IAAI,UAAU,QAAQ,qCAAqC,CAAC;AACpE,UAAQ,KAAK,EAAE;;CAIjB,IAAIC;AACJ,KAAI,WAAW;EACb,MAAM,OAAO,MAAM,kBAAkB,UAAU;AAC/C,MAAI,CAAC,MAAM;AACT,WAAQ,IAAI,UAAU,OAAO,uCAAuC,UAAU,GAAG,CAAC;AAClF,WAAQ,IACN,UAAU,QAAQ,qEAAqE,CACxF;AACD,WAAQ,IAAI,UAAU,QAAQ,0DAA0D,CAAC;AACzF,WAAQ,KAAK,EAAE;;AAEjB,aAAW,CAAC,KAAK;QACZ;AACL,aAAW,MAAM,iBAAiB;AAClC,MAAI,SAAS,WAAW,GAAG;AACzB,WAAQ,IAAI,UAAU,UAAU,kCAAkC,CAAC;AACnE,WAAQ,IAAI,UAAU,QAAQ,4DAA4D,CAAC;AAC3F,WAAQ,KAAK,EAAE;;;AAInB,SAAQ,IAAI,UAAU,SAAS,uBAAuB,gBAAgB,CAAC;AACvE,SAAQ,IAAI,UAAU,SAAS,SAAS,SAAS,OAAO,sBAAsB,CAAC;AAC/E,KAAI,OACF,SAAQ,IAAI,UAAU,QAAQ,sCAAsC,CAAC;AAEvE,SAAQ,KAAK;CAEb,IAAI,eAAe;CACnB,IAAI,YAAY;AAEhB,MAAK,MAAM,QAAQ,UAAU;AAC3B,MAAI,CAAC,KAAM;AAEX,UAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,UAAQ,IAAI,UAAU,SAAS,aAAa,UAAU,CAAC,QAAQ,OAAO,EAAE,KAAK,KAAK,GAAG,CAAC;AACtF,UAAQ,IAAI,UAAU,QAAQ,cAAc,cAAc,KAAK,QAAQ,GAAG,CAAC;AAE3E,MAAI,QAAQ;AACV,WAAQ,IAAI,UAAU,QAAQ,qCAAqC,CAAC;AACpE;AACA;;AAGF,MAAI;GAEF,MAAMF,UAAmB,EAAE;AAE3B,OAAI,KAAK,QAAQ,SACf,SAAQ,KAAK;IACX,MAAM;IACN,aAAa;IACb,SAAS,OAAO,KAAK,cAAc;IACnC,aAAa,OAAO,KAAK,cAAc;IACxC,CAAC;AAGJ,OAAI,KAAK,QAAQ,UACf,SAAQ,KAAK;IACX,MAAM;IACN,aAAa;IACb,SAAS,OAAO,KAAK,8BAA8B;IACnD,aAAa,OAAO,KAAK,gCAA8B;IACxD,CAAC;AAKJ,OAAI,KAAK,QAAQ,WAAW,CAAC,KAAK,QAAQ,WAAW;IACnD,MAAM,cAAc;IACpB,MAAM,YAAY,KAAK,QAAQ,QAAQ,OAAO,IAAoB,IAAI;AACtE,YAAQ,KAAK;KACX,MAAM;KACN,aAAa,iCAAiC,KAAK,QAAQ,QAAQ;KACnE,SAAS,OAAO,KAAK,YAAY;KACjC,aAAa,OAAO,KAAK,UAAU;KACpC,CAAC;;AAGJ,OAAI,KAAK,QAAQ,iBAAiB;AAChC,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,uCAAqC;KAC1D,aAAa,OAAO,KAAK,uCAAqC;KAC/D,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,kCAAgC;KACrD,aAAa,OAAO,KAAK,kCAAgC;KAC1D,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,qCAAqC;KAC1D,aAAa,OAAO,KAAK,qCAAqC;KAC/D,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,sCAAsC;KAC3D,aAAa,OAAO,KAAK,sCAAsC;KAChE,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,sEAAkE;KACvF,aAAa,OAAO,KAAK,sEAAkE;KAC5F,CAAC;;AAGJ,OAAI,KAAK,QAAQ,aAAa;AAC5B,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,gBAAgB;KACrC,aAAa,OAAO,KAAK,gBAAgB;KAC1C,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,kBAAkB;KACvC,aAAa,OAAO,KAAK,kBAAkB;KAC5C,CAAC;AACF,YAAQ,KAAK;KACX,MAAM;KACN,aAAa;KACb,SAAS,OAAO,KAAK,4BAA4B;KACjD,aAAa,OAAO,KAAK,4BAA4B;KACtD,CAAC;;GAKJ,MAAM,aAAa,KADH,KAAK,SAAS,EAAE,gBAAgB,OAAO,EACtB,GAAG,KAAK,KAAK,UAAU;AAGxD,OAAI,QAAQ,SAAS,GAAG;AAUtB,QAAI,EATW,MAAM,WAAW;KAC9B,WAAW;KACX;KACA;KACA,QAAQ;KACR,QAAQ;KACR;KACD,CAAC,EAEU,SAAS;AACnB,aAAQ,IAAI,UAAU,OAAO,8BAA8B,CAAC;AAC5D;AACA;;AAIF,QAAI,QAAQ,aAAa,SACvB,KAAI;KACF,MAAM,EAAE,yBAAa,MAAM,OAAO;AAClC,gBAAS,qCAAqC,WAAW,IAAI,EAC3D,OAAO,QACR,CAAC;AACF,SAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,qBAAqB,CAAC;YAEhD;AACN,aAAQ,IAAI,UAAU,UAAU,iCAAiC,CAAC;;;GAKxE,IAAI,iBAAiB,QAAQ,SAAS,IAAI,aAAa;AAKvD,OADqB,KAAK,QAAQ,aAAa,CAAC,CAAC,KAAK,QAAQ,OAC5C;IAEhB,MAAM,gBACJ,KAAK,QAAQ,WAAW,KAAK,QAAQ,SAAS;IAEhD,MAAM,EAAE,kBAAkB,MAAM,4BADf,KAAK,SAAS,EAAE,gBAAgB,QAAQ,EAGvD,gBACA,KAAK,MACL,eACA,KAAK,QAAQ,cAAc,MAC5B;AACD,qBAAiB;AACjB,QAAI,SAAS;AACX,aAAQ,IAAI,UAAU,QAAQ,kCAAkC,CAAC;AACjE,SAAI,KAAK,QAAQ,WACf,SAAQ,IAAI,UAAU,QAAQ,6BAA6B,CAAC;;AAIhE,QAAI,KAAK,QAAQ,SAAS,CAAC,KAAK,QAAQ,WAAW;AACjD,UAAK,QAAQ,YAAY;AACzB,UAAK,QAAQ,UAAU,KAAK,QAAQ;AACpC,YAAO,KAAK,QAAQ;;;AAIxB,OAAI,KAAK,QAAQ,YAAY;IAC3B,MAAM,gBAAgB,KAAK,SAAS,EAAE,gBAAgB,aAAa;IACnE,IAAID;AACJ,QAAI,KAAK,QAAQ,SAEf,mBADe,MAAM,qBAAqB,eAAe,KAAK,KAAK,EAC3C;IAE1B,MAAM,EAAE,kBAAkB,MAAM,sBAC9B,eACA,gBACA,KAAK,MACL,eACD;AACD,qBAAiB;AACjB,QAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,mCAAmC,CAAC;;GAKtE,MAAM,EAAE,oBAAS,kBAAQ,sBAAU,UAAU,MAAM,OAAO;GAC1D,IAAI,YAAY,KAAK;AAGrB,OAAI,CAAC,WAAW;IACd,MAAM,iBAAiB;KACrB,KAAK,SAAS,EAAE,aAAa;KAC7B,KAAK,SAAS,EAAE,MAAM;KACtB,KAAK,SAAS,EAAE,OAAO;KACvB;KACA;KACA,KAAK,SAAS,EAAE,gBAAgB,UAAU;KAC3C;AAED,SAAK,MAAM,OAAO,gBAAgB;KAChC,MAAM,eAAe,KAAK,KAAK,KAAK,KAAK;AACzC,SAAI,WAAW,aAAa,CAC1B,KAAI;AAEF,WADc,MAAM,MAAM,aAAa,EAC7B,gBAAgB,EAAE;OAC1B,MAAM,SAAS,MAAMI,WAAS,aAAa;AAC3C,WACE,OAAO,SAAS,oBAAoB,IACpC,OAAO,SAAS,qBAAqB,IACrC,OAAO,SAAS,0BAA0B,EAC1C;AACA,oBAAY;AACZ,YAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,6BAA6B,YAAY,CAAC;AAE1E;;;aAGE;;;AAQd,OAAI,UACF,KAAI;AACF,QAAI,WAAW,UAAU,EAEvB;SADsB,MAAMA,WAAS,UAAU,KACzB,gBAAgB;AACpC,YAAMF,SAAO,UAAU;AACvB,YAAMG,UAAQ,gBAAgB,UAAU;AACxC,UAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,sBAAsB,YAAY,CAAC;;WAGhE;AAEL,WAAMA,UAAQ,gBAAgB,UAAU;AACxC,SAAI,QACF,SAAQ,IAAI,UAAU,QAAQ,wBAAwB,YAAY,CAAC;;AAIvE,SAAK,YAAY;YACV,cAAc;AACrB,YAAQ,IACN,UACE,UACA,mCAAoC,aAAuB,UAC5D,CACF;;AAKL,QAAK,6BAAY,IAAI,MAAM,EAAC,aAAa;AACzC,QAAK,qBAAqB;AAC1B,QAAK,eAAe,gBAAgB,cAAc;AAClD,QAAK,oBAAoB;AACzB,SAAM,kBAAkB,KAAK;AAE7B,WAAQ,IAAI,UAAU,SAAS,2BAA2B,CAAC;AAC3D;WACO,OAAO;AACd,WAAQ,IAAI,UAAU,OAAO,cAAe,MAAgB,UAAU,CAAC;AACvE,OAAI,QACF,SAAQ,MAAO,MAAgB,MAAM;AAEvC;;;AAIJ,SAAQ,KAAK;AACb,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;AAC9C,KAAI,QAAQ;AACV,UAAQ,IAAI,UAAU,CAAC,QAAQ,OAAO,EAAE,qBAAqB,CAAC;AAC9D,UAAQ,IAAI,UAAU,QAAQ,kBAAkB,aAAa,YAAY,CAAC;YACjE,cAAc,GAAG;AAC1B,UAAQ,IAAI,UAAU,CAAC,SAAS,OAAO,EAAE,oBAAoB,CAAC;AAC9D,UAAQ,IAAI,UAAU,QAAQ,aAAa,aAAa,YAAY,CAAC;QAChE;AACL,UAAQ,IAAI,UAAU,CAAC,UAAU,OAAO,EAAE,gCAAgC,CAAC;AAC3E,UAAQ,IAAI,UAAU,QAAQ,cAAc,aAAa,YAAY,YAAY,CAAC;;AAEpF,SAAQ,IAAI,UAAU,QAAQ,IAAI,OAAO,GAAG,CAAC,CAAC;EAC9C,CACD,KAAK,CACL,OAAO,QAAe;AACrB,SAAQ,MAAM,IAAI;AAClB,SAAQ,KAAK,EAAE;EACf"}
|