pi-headroom 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +102 -0
- package/package.json +27 -0
- package/src/format-bridge.ts +326 -0
- package/src/index.ts +341 -0
- package/src/proxy-manager.ts +364 -0
- package/tsconfig.json +15 -0
package/README.md
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# pi-headroom
|
|
2
|
+
|
|
3
|
+
Transparent LLM context compression for [Pi](https://github.com/mariozechner/pi-coding-agent) using [Headroom](https://github.com/chopratejas/headroom). Automatically compresses conversation context before every LLM call, saving 70–95% of tokens without changing your workflow.
|
|
4
|
+
|
|
5
|
+
**Zero-config:** The extension automatically installs the Headroom proxy (`pip install headroom-ai[proxy]`), starts it on session start, and stops it on exit. You don't need to touch the proxy manually.
|
|
6
|
+
|
|
7
|
+
## How It Works
|
|
8
|
+
|
|
9
|
+
```
|
|
10
|
+
Session start → auto-install headroom-ai[proxy] → spawn proxy on :8787
|
|
11
|
+
↓
|
|
12
|
+
User prompt → Pi builds context → pi-headroom compresses → LLM receives compressed context
|
|
13
|
+
↓
|
|
14
|
+
Session exit → proxy stopped automatically
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
1. **`session_start`**: Checks if proxy is running. If not, installs `headroom-ai[proxy]` via pip (if needed), spawns it as a background process, and polls until healthy.
|
|
18
|
+
2. **`context` event**: Before every LLM call, converts Pi messages to OpenAI format, sends them to the proxy for compression, converts back, and returns compressed messages.
|
|
19
|
+
3. **`session_shutdown`**: Gracefully stops the proxy (only if the extension started it).
|
|
20
|
+
|
|
21
|
+
## Prerequisites
|
|
22
|
+
|
|
23
|
+
- **Python ≥ 3.10** — needed to run the Headroom proxy (the extension auto-installs it via pip)
|
|
24
|
+
|
|
25
|
+
That's it. The extension handles everything else.
|
|
26
|
+
|
|
27
|
+
## Installation
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
# From local path (development)
|
|
31
|
+
pi install ./pi-headroom
|
|
32
|
+
|
|
33
|
+
# From npm (once published)
|
|
34
|
+
pi install npm:pi-headroom
|
|
35
|
+
|
|
36
|
+
# Quick test without installing
|
|
37
|
+
pi -e ./pi-headroom
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Configuration
|
|
41
|
+
|
|
42
|
+
| Env Variable | Default | Description |
|
|
43
|
+
|------------------|-------------------------|------------------------------------------------------------|
|
|
44
|
+
| `HEADROOM_URL` | _(none)_ | Set to use your own proxy. **Disables auto-management.** |
|
|
45
|
+
| `HEADROOM_PORT` | `8787` | Port for the auto-managed proxy |
|
|
46
|
+
|
|
47
|
+
### Auto-management vs. manual mode
|
|
48
|
+
|
|
49
|
+
- **No env vars set** (default): The extension auto-installs, auto-starts, and auto-stops the proxy. Zero-config.
|
|
50
|
+
- **`HEADROOM_URL` set**: The extension skips auto-management and health-checks the URL you provide. You manage the proxy yourself.
|
|
51
|
+
- **`HEADROOM_PORT` set**: The auto-managed proxy starts on your chosen port instead of 8787.
|
|
52
|
+
|
|
53
|
+
## Commands
|
|
54
|
+
|
|
55
|
+
### `/headroom [on|off|status]`
|
|
56
|
+
|
|
57
|
+
Toggle compression or show status.
|
|
58
|
+
|
|
59
|
+
- `/headroom` or `/headroom status` — Show current state, proxy mode, and session compression stats
|
|
60
|
+
- `/headroom on` — Enable compression (auto-starts proxy if needed)
|
|
61
|
+
- `/headroom off` — Disable compression (passthrough mode)
|
|
62
|
+
|
|
63
|
+
### `/headroom-health`
|
|
64
|
+
|
|
65
|
+
Check proxy health and show diagnostics. Shows whether the proxy is managed by the extension or external.
|
|
66
|
+
|
|
67
|
+
## Status Bar
|
|
68
|
+
|
|
69
|
+
The extension shows progress and compression status in Pi's footer:
|
|
70
|
+
|
|
71
|
+
- `⏳ Installing headroom-ai...` — Auto-installing the proxy
|
|
72
|
+
- `⏳ Starting Headroom proxy...` — Spawning the proxy
|
|
73
|
+
- `✓ Headroom` — Proxy online, ready to compress
|
|
74
|
+
- `✓ Headroom -42% (1,234 saved)` — Last compression result
|
|
75
|
+
- `⚠ Headroom offline` — Proxy unavailable, using uncompressed context
|
|
76
|
+
- `○ Headroom off` — Compression disabled by user
|
|
77
|
+
|
|
78
|
+
## Behavior
|
|
79
|
+
|
|
80
|
+
- **Zero-config**: Installs and starts the proxy automatically on first use
|
|
81
|
+
- **Smart detection**: Won't reinstall or restart if already running (e.g., you started it manually)
|
|
82
|
+
- **Graceful fallback**: If anything fails, Pi continues with uncompressed context
|
|
83
|
+
- **Crash recovery**: If the proxy crashes mid-session, one automatic restart is attempted
|
|
84
|
+
- **Clean shutdown**: The proxy is stopped on session exit (only if the extension started it)
|
|
85
|
+
- **Cross-platform**: Works on macOS, Linux, and Windows
|
|
86
|
+
|
|
87
|
+
## Architecture
|
|
88
|
+
|
|
89
|
+
```
|
|
90
|
+
pi-headroom/
|
|
91
|
+
├── package.json # Pi package manifest
|
|
92
|
+
├── tsconfig.json
|
|
93
|
+
├── src/
|
|
94
|
+
│ ├── index.ts # Extension: context hook, lifecycle, commands
|
|
95
|
+
│ ├── format-bridge.ts # Pi-AI ↔ OpenAI message format conversion
|
|
96
|
+
│ └── proxy-manager.ts # Auto-install, start, stop, health check
|
|
97
|
+
└── README.md
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
## License
|
|
101
|
+
|
|
102
|
+
MIT
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "pi-headroom",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Transparent LLM context compression for Pi using Headroom",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"pi-package"
|
|
7
|
+
],
|
|
8
|
+
"type": "module",
|
|
9
|
+
"main": "./src/index.ts",
|
|
10
|
+
"pi": {
|
|
11
|
+
"extensions": [
|
|
12
|
+
"./src/index.ts"
|
|
13
|
+
]
|
|
14
|
+
},
|
|
15
|
+
"dependencies": {
|
|
16
|
+
"headroom-ai": "^0.1.0"
|
|
17
|
+
},
|
|
18
|
+
"peerDependencies": {
|
|
19
|
+
"@mariozechner/pi-agent-core": "*",
|
|
20
|
+
"@mariozechner/pi-ai": "*",
|
|
21
|
+
"@mariozechner/pi-coding-agent": "*",
|
|
22
|
+
"@sinclair/typebox": "*"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"typescript": "^6.0.2"
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Format bridge between Pi-AI Message[] and Headroom OpenAI message format.
|
|
3
|
+
*
|
|
4
|
+
* Pi-AI types:
|
|
5
|
+
* UserMessage { role: "user", content: string | (TextContent | ImageContent)[], timestamp }
|
|
6
|
+
* AssistantMessage { role: "assistant", content: (TextContent | ThinkingContent | ToolCall)[], api, provider, model, usage, stopReason, timestamp, ... }
|
|
7
|
+
* ToolResultMessage { role: "toolResult", toolCallId, toolName, content: (TextContent | ImageContent)[], details?, isError, timestamp }
|
|
8
|
+
*
|
|
9
|
+
* Headroom OpenAI types:
|
|
10
|
+
* SystemMessage { role: "system", content: string }
|
|
11
|
+
* UserMessage { role: "user", content: string | ContentPart[] }
|
|
12
|
+
* AssistantMessage { role: "assistant", content: string | null, tool_calls?: ToolCall[] }
|
|
13
|
+
* ToolMessage { role: "tool", content: string, tool_call_id: string }
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import type {
|
|
17
|
+
Message,
|
|
18
|
+
UserMessage as PiUserMessage,
|
|
19
|
+
AssistantMessage as PiAssistantMessage,
|
|
20
|
+
ToolResultMessage as PiToolResultMessage,
|
|
21
|
+
TextContent,
|
|
22
|
+
ImageContent,
|
|
23
|
+
ToolCall as PiToolCall,
|
|
24
|
+
} from "@mariozechner/pi-ai";
|
|
25
|
+
|
|
26
|
+
import type {
|
|
27
|
+
OpenAIMessage,
|
|
28
|
+
ToolCall as OpenAIToolCall,
|
|
29
|
+
} from "headroom-ai";
|
|
30
|
+
|
|
31
|
+
// ─── Pi-AI → OpenAI ────────────────────────────────────────────────────
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Convert Pi-AI Message[] to Headroom OpenAI format.
|
|
35
|
+
*
|
|
36
|
+
* - Strips ThinkingContent from assistant messages (opaque/encrypted, not useful for compression)
|
|
37
|
+
* - Serializes Pi tool call arguments (Record<string,any>) to JSON strings
|
|
38
|
+
* - Converts Pi ImageContent (base64) to OpenAI image_url content parts
|
|
39
|
+
*/
|
|
40
|
+
export function piToOpenAI(messages: Message[]): OpenAIMessage[] {
|
|
41
|
+
const result: OpenAIMessage[] = [];
|
|
42
|
+
|
|
43
|
+
for (const msg of messages) {
|
|
44
|
+
switch (msg.role) {
|
|
45
|
+
case "user":
|
|
46
|
+
result.push(convertUserMessage(msg));
|
|
47
|
+
break;
|
|
48
|
+
case "assistant":
|
|
49
|
+
result.push(convertAssistantMessage(msg));
|
|
50
|
+
break;
|
|
51
|
+
case "toolResult":
|
|
52
|
+
result.push(convertToolResultMessage(msg));
|
|
53
|
+
break;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
return result;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function convertUserMessage(msg: PiUserMessage): OpenAIMessage {
|
|
61
|
+
if (typeof msg.content === "string") {
|
|
62
|
+
return { role: "user", content: msg.content };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Check if there are any images
|
|
66
|
+
const hasImages = msg.content.some((part) => part.type === "image");
|
|
67
|
+
|
|
68
|
+
if (!hasImages) {
|
|
69
|
+
// Text-only: join into a single string
|
|
70
|
+
const text = msg.content
|
|
71
|
+
.filter((p): p is TextContent => p.type === "text")
|
|
72
|
+
.map((p) => p.text)
|
|
73
|
+
.join("\n");
|
|
74
|
+
return { role: "user", content: text };
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Mixed content: convert to OpenAI content parts
|
|
78
|
+
const parts: Array<{ type: "text"; text: string } | { type: "image_url"; image_url: { url: string } }> = [];
|
|
79
|
+
for (const part of msg.content) {
|
|
80
|
+
if (part.type === "text") {
|
|
81
|
+
parts.push({ type: "text", text: part.text });
|
|
82
|
+
} else if (part.type === "image") {
|
|
83
|
+
const imgPart = part as ImageContent;
|
|
84
|
+
parts.push({
|
|
85
|
+
type: "image_url",
|
|
86
|
+
image_url: { url: `data:${imgPart.mimeType};base64,${imgPart.data}` },
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
return { role: "user", content: parts as any };
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function convertAssistantMessage(msg: PiAssistantMessage): OpenAIMessage {
|
|
94
|
+
// Extract text parts (skip ThinkingContent)
|
|
95
|
+
const textParts = msg.content.filter((p): p is TextContent => p.type === "text");
|
|
96
|
+
const text = textParts.map((p) => p.text).join("");
|
|
97
|
+
|
|
98
|
+
// Extract tool calls
|
|
99
|
+
const toolCalls = msg.content.filter((p): p is PiToolCall => p.type === "toolCall");
|
|
100
|
+
|
|
101
|
+
const openaiMsg: any = {
|
|
102
|
+
role: "assistant",
|
|
103
|
+
content: text || null,
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
if (toolCalls.length > 0) {
|
|
107
|
+
openaiMsg.tool_calls = toolCalls.map(
|
|
108
|
+
(tc): OpenAIToolCall => ({
|
|
109
|
+
id: tc.id,
|
|
110
|
+
type: "function",
|
|
111
|
+
function: {
|
|
112
|
+
name: tc.name,
|
|
113
|
+
arguments: JSON.stringify(tc.arguments),
|
|
114
|
+
},
|
|
115
|
+
}),
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return openaiMsg;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
function convertToolResultMessage(msg: PiToolResultMessage): OpenAIMessage {
|
|
123
|
+
const text = msg.content
|
|
124
|
+
.filter((p): p is TextContent => p.type === "text")
|
|
125
|
+
.map((p) => p.text)
|
|
126
|
+
.join("\n");
|
|
127
|
+
|
|
128
|
+
return {
|
|
129
|
+
role: "tool",
|
|
130
|
+
content: text,
|
|
131
|
+
tool_call_id: msg.toolCallId,
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// ─── OpenAI → Pi-AI ────────────────────────────────────────────────────
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Convert compressed OpenAI messages back to Pi-AI Message[] format.
|
|
139
|
+
*
|
|
140
|
+
* Strategy: positional alignment with the original messages.
|
|
141
|
+
* - If message counts match, copy structural metadata from originals, take text from compressed.
|
|
142
|
+
* - If counts differ (compression merged/dropped messages), build fresh Pi messages.
|
|
143
|
+
*
|
|
144
|
+
* Note: The returned messages are used as a deep copy for a single LLM call,
|
|
145
|
+
* so losing metadata (timestamps, usage) is acceptable.
|
|
146
|
+
*/
|
|
147
|
+
export function openAIToPi(compressed: OpenAIMessage[], original: Message[]): Message[] {
|
|
148
|
+
// If counts match, use positional alignment
|
|
149
|
+
if (compressed.length === original.length) {
|
|
150
|
+
return compressed.map((compMsg, i) => alignMessage(compMsg, original[i]));
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Counts differ: build fresh messages
|
|
154
|
+
return compressed.map((compMsg) => buildFreshMessage(compMsg));
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Align a compressed OpenAI message with its original Pi message,
|
|
159
|
+
* preserving structural metadata from the original.
|
|
160
|
+
*/
|
|
161
|
+
function alignMessage(comp: OpenAIMessage, orig: Message): Message {
|
|
162
|
+
switch (comp.role) {
|
|
163
|
+
case "system":
|
|
164
|
+
case "user":
|
|
165
|
+
return alignUserMessage(comp, orig);
|
|
166
|
+
case "assistant":
|
|
167
|
+
return alignAssistantMessage(comp, orig);
|
|
168
|
+
case "tool":
|
|
169
|
+
return alignToolResultMessage(comp, orig);
|
|
170
|
+
default:
|
|
171
|
+
return buildFreshMessage(comp);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function alignUserMessage(comp: OpenAIMessage & { role: "system" | "user" }, orig: Message): Message {
|
|
176
|
+
const content = typeof comp.content === "string"
|
|
177
|
+
? comp.content
|
|
178
|
+
: Array.isArray(comp.content)
|
|
179
|
+
? (comp.content as any[]).filter((p: any) => p.type === "text").map((p: any) => p.text).join("\n")
|
|
180
|
+
: "";
|
|
181
|
+
|
|
182
|
+
if (orig.role === "user") {
|
|
183
|
+
return {
|
|
184
|
+
...orig,
|
|
185
|
+
content: [{ type: "text", text: content }],
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Role mismatch: build fresh
|
|
190
|
+
return {
|
|
191
|
+
role: "user",
|
|
192
|
+
content: [{ type: "text", text: content }],
|
|
193
|
+
timestamp: orig.timestamp ?? Date.now(),
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
function alignAssistantMessage(comp: OpenAIMessage & { role: "assistant" }, orig: Message): Message {
|
|
198
|
+
const contentParts: PiAssistantMessage["content"] = [];
|
|
199
|
+
|
|
200
|
+
// Add text content
|
|
201
|
+
const text = typeof comp.content === "string" ? comp.content : null;
|
|
202
|
+
if (text) {
|
|
203
|
+
contentParts.push({ type: "text", text });
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Add tool calls
|
|
207
|
+
if (comp.tool_calls) {
|
|
208
|
+
for (const tc of comp.tool_calls) {
|
|
209
|
+
contentParts.push({
|
|
210
|
+
type: "toolCall",
|
|
211
|
+
id: tc.id,
|
|
212
|
+
name: tc.function.name,
|
|
213
|
+
arguments: safeJsonParse(tc.function.arguments),
|
|
214
|
+
});
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// Preserve thinking content from original if it was an assistant message
|
|
219
|
+
if (orig.role === "assistant") {
|
|
220
|
+
const thinkingParts = orig.content.filter((p) => p.type === "thinking");
|
|
221
|
+
return {
|
|
222
|
+
...orig,
|
|
223
|
+
content: [...thinkingParts, ...contentParts],
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// Role mismatch: build fresh
|
|
228
|
+
return buildFreshAssistantMessage(comp);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
function alignToolResultMessage(comp: OpenAIMessage & { role: "tool" }, orig: Message): Message {
|
|
232
|
+
if (orig.role === "toolResult") {
|
|
233
|
+
return {
|
|
234
|
+
...orig,
|
|
235
|
+
content: [{ type: "text", text: comp.content }],
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Role mismatch: build fresh
|
|
240
|
+
return buildFreshToolResultMessage(comp);
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// ─── Fresh message builders (when positional alignment fails) ───────────
|
|
244
|
+
|
|
245
|
+
function buildFreshMessage(comp: OpenAIMessage): Message {
|
|
246
|
+
switch (comp.role) {
|
|
247
|
+
case "system":
|
|
248
|
+
case "user":
|
|
249
|
+
return buildFreshUserMessage(comp);
|
|
250
|
+
case "assistant":
|
|
251
|
+
return buildFreshAssistantMessage(comp);
|
|
252
|
+
case "tool":
|
|
253
|
+
return buildFreshToolResultMessage(comp);
|
|
254
|
+
default:
|
|
255
|
+
return {
|
|
256
|
+
role: "user",
|
|
257
|
+
content: [{ type: "text", text: String((comp as any).content ?? "") }],
|
|
258
|
+
timestamp: Date.now(),
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
function buildFreshUserMessage(comp: { role: string; content: any }): PiUserMessage {
|
|
264
|
+
const content = typeof comp.content === "string"
|
|
265
|
+
? comp.content
|
|
266
|
+
: Array.isArray(comp.content)
|
|
267
|
+
? (comp.content as any[]).filter((p: any) => p.type === "text").map((p: any) => p.text).join("\n")
|
|
268
|
+
: "";
|
|
269
|
+
|
|
270
|
+
return {
|
|
271
|
+
role: "user",
|
|
272
|
+
content: [{ type: "text", text: content }],
|
|
273
|
+
timestamp: Date.now(),
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
function buildFreshAssistantMessage(comp: OpenAIMessage & { role: "assistant" }): PiAssistantMessage {
|
|
278
|
+
const contentParts: PiAssistantMessage["content"] = [];
|
|
279
|
+
|
|
280
|
+
if (typeof comp.content === "string" && comp.content) {
|
|
281
|
+
contentParts.push({ type: "text", text: comp.content });
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
if (comp.tool_calls) {
|
|
285
|
+
for (const tc of comp.tool_calls) {
|
|
286
|
+
contentParts.push({
|
|
287
|
+
type: "toolCall",
|
|
288
|
+
id: tc.id,
|
|
289
|
+
name: tc.function.name,
|
|
290
|
+
arguments: safeJsonParse(tc.function.arguments),
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
return {
|
|
296
|
+
role: "assistant",
|
|
297
|
+
content: contentParts,
|
|
298
|
+
api: "openai-completions",
|
|
299
|
+
provider: "unknown",
|
|
300
|
+
model: "unknown",
|
|
301
|
+
usage: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, totalTokens: 0, cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 } },
|
|
302
|
+
stopReason: "stop",
|
|
303
|
+
timestamp: Date.now(),
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
function buildFreshToolResultMessage(comp: OpenAIMessage & { role: "tool" }): PiToolResultMessage {
|
|
308
|
+
return {
|
|
309
|
+
role: "toolResult",
|
|
310
|
+
toolCallId: comp.tool_call_id,
|
|
311
|
+
toolName: "unknown",
|
|
312
|
+
content: [{ type: "text", text: comp.content }],
|
|
313
|
+
isError: false,
|
|
314
|
+
timestamp: Date.now(),
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// ─── Helpers ────────────────────────────────────────────────────────────
|
|
319
|
+
|
|
320
|
+
function safeJsonParse(str: string): Record<string, any> {
|
|
321
|
+
try {
|
|
322
|
+
return JSON.parse(str);
|
|
323
|
+
} catch {
|
|
324
|
+
return { _raw: str };
|
|
325
|
+
}
|
|
326
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* pi-headroom — Transparent LLM context compression for Pi using Headroom.
|
|
3
|
+
*
|
|
4
|
+
* Hooks into Pi's `context` event to compress messages before every LLM call.
|
|
5
|
+
* Automatically installs and manages the Headroom proxy (zero-config).
|
|
6
|
+
*
|
|
7
|
+
* Set HEADROOM_URL to skip auto-management and use your own proxy.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
|
11
|
+
import { convertToLlm } from "@mariozechner/pi-coding-agent";
|
|
12
|
+
import { HeadroomClient, compress } from "headroom-ai";
|
|
13
|
+
import type { CompressResult } from "headroom-ai";
|
|
14
|
+
import { piToOpenAI, openAIToPi } from "./format-bridge.js";
|
|
15
|
+
import { ProxyManager } from "./proxy-manager.js";
|
|
16
|
+
|
|
17
|
+
export default function headroomExtension(pi: ExtensionAPI) {
|
|
18
|
+
// ─── State ──────────────────────────────────────────────────────────
|
|
19
|
+
|
|
20
|
+
let enabled = true;
|
|
21
|
+
let proxyAvailable: boolean | null = null;
|
|
22
|
+
let proxyWarningShown = false;
|
|
23
|
+
let restartAttempted = false;
|
|
24
|
+
|
|
25
|
+
let lastStats: {
|
|
26
|
+
tokensBefore: number;
|
|
27
|
+
tokensAfter: number;
|
|
28
|
+
tokensSaved: number;
|
|
29
|
+
ratio: number;
|
|
30
|
+
transforms: string[];
|
|
31
|
+
} = { tokensBefore: 0, tokensAfter: 0, tokensSaved: 0, ratio: 1.0, transforms: [] };
|
|
32
|
+
|
|
33
|
+
let sessionTotals = { calls: 0, tokensSaved: 0 };
|
|
34
|
+
|
|
35
|
+
// ─── Configuration ──────────────────────────────────────────────────
|
|
36
|
+
|
|
37
|
+
const userUrl = process.env.HEADROOM_URL;
|
|
38
|
+
const autoManage = !userUrl;
|
|
39
|
+
const port = parseInt(process.env.HEADROOM_PORT || "8787", 10);
|
|
40
|
+
const proxyManager = autoManage ? new ProxyManager({ port }) : null;
|
|
41
|
+
const baseUrl = userUrl || `http://127.0.0.1:${port}`;
|
|
42
|
+
const client = new HeadroomClient({ baseUrl, fallback: true, timeout: 15_000 });
|
|
43
|
+
|
|
44
|
+
/** Simple health check — the SDK doesn't expose one, so we hit the proxy directly. */
|
|
45
|
+
async function checkProxyHealth(): Promise<boolean> {
|
|
46
|
+
try {
|
|
47
|
+
const res = await fetch(`${baseUrl}/health`, { signal: AbortSignal.timeout(5_000) });
|
|
48
|
+
return res.ok;
|
|
49
|
+
} catch {
|
|
50
|
+
return false;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// ─── Session start: install/start proxy or health-check ─────────────
|
|
55
|
+
|
|
56
|
+
pi.on("session_start", async (_event, ctx) => {
|
|
57
|
+
proxyWarningShown = false;
|
|
58
|
+
restartAttempted = false;
|
|
59
|
+
sessionTotals = { calls: 0, tokensSaved: 0 };
|
|
60
|
+
|
|
61
|
+
if (proxyManager) {
|
|
62
|
+
// Auto-manage mode
|
|
63
|
+
ctx.ui.setStatus("headroom", ctx.ui.theme.fg("dim", "⏳ Headroom starting..."));
|
|
64
|
+
|
|
65
|
+
const ok = await proxyManager.ensureRunning((msg) => {
|
|
66
|
+
ctx.ui.setStatus("headroom", ctx.ui.theme.fg("dim", `⏳ ${msg}`));
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
if (ok) {
|
|
70
|
+
proxyAvailable = true;
|
|
71
|
+
ctx.ui.setStatus(
|
|
72
|
+
"headroom",
|
|
73
|
+
ctx.ui.theme.fg("success", "✓") + ctx.ui.theme.fg("dim", " Headroom"),
|
|
74
|
+
);
|
|
75
|
+
} else {
|
|
76
|
+
proxyAvailable = false;
|
|
77
|
+
ctx.ui.setStatus(
|
|
78
|
+
"headroom",
|
|
79
|
+
ctx.ui.theme.fg("warning", "⚠") + ctx.ui.theme.fg("dim", " Headroom offline"),
|
|
80
|
+
);
|
|
81
|
+
ctx.ui.notify(
|
|
82
|
+
"Headroom proxy could not be started. Context compression disabled.\nRun /headroom-health for details.",
|
|
83
|
+
"warning",
|
|
84
|
+
);
|
|
85
|
+
}
|
|
86
|
+
} else {
|
|
87
|
+
// User-managed mode: just health-check
|
|
88
|
+
const healthy = await checkProxyHealth();
|
|
89
|
+
if (healthy) {
|
|
90
|
+
proxyAvailable = true;
|
|
91
|
+
ctx.ui.setStatus(
|
|
92
|
+
"headroom",
|
|
93
|
+
ctx.ui.theme.fg("success", "✓") + ctx.ui.theme.fg("dim", " Headroom"),
|
|
94
|
+
);
|
|
95
|
+
} else {
|
|
96
|
+
proxyAvailable = false;
|
|
97
|
+
ctx.ui.setStatus(
|
|
98
|
+
"headroom",
|
|
99
|
+
ctx.ui.theme.fg("warning", "⚠") + ctx.ui.theme.fg("dim", " Headroom offline"),
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
// ─── Session shutdown: stop proxy if we started it ──────────────────
|
|
106
|
+
|
|
107
|
+
pi.on("session_shutdown", async () => {
|
|
108
|
+
if (proxyManager) {
|
|
109
|
+
await proxyManager.stop();
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
// ─── Core: compress context before every LLM call ───────────────────
|
|
114
|
+
|
|
115
|
+
pi.on("context", async (event, ctx) => {
|
|
116
|
+
if (!enabled || proxyAvailable === false) return;
|
|
117
|
+
|
|
118
|
+
// Convert AgentMessage[] → Pi-AI Message[] → OpenAI format
|
|
119
|
+
const piMessages = convertToLlm(event.messages);
|
|
120
|
+
if (piMessages.length === 0) return;
|
|
121
|
+
|
|
122
|
+
const openaiMessages = piToOpenAI(piMessages);
|
|
123
|
+
if (openaiMessages.length === 0) return;
|
|
124
|
+
|
|
125
|
+
try {
|
|
126
|
+
const result: CompressResult = await compress(openaiMessages, {
|
|
127
|
+
client,
|
|
128
|
+
model: ctx.model?.id ?? "gpt-4o",
|
|
129
|
+
fallback: true,
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
if (!result.compressed || result.tokensSaved <= 0) {
|
|
133
|
+
ctx.ui.setStatus(
|
|
134
|
+
"headroom",
|
|
135
|
+
ctx.ui.theme.fg("success", "✓") +
|
|
136
|
+
ctx.ui.theme.fg("dim", ` Headroom (${openaiMessages.length} msgs, no compression needed)`),
|
|
137
|
+
);
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Convert compressed OpenAI → Pi-AI Message[]
|
|
142
|
+
const compressedPiMessages = openAIToPi(result.messages, piMessages);
|
|
143
|
+
|
|
144
|
+
// Update stats
|
|
145
|
+
lastStats = {
|
|
146
|
+
tokensBefore: result.tokensBefore,
|
|
147
|
+
tokensAfter: result.tokensAfter,
|
|
148
|
+
tokensSaved: result.tokensSaved,
|
|
149
|
+
ratio: result.compressionRatio,
|
|
150
|
+
transforms: result.transformsApplied,
|
|
151
|
+
};
|
|
152
|
+
sessionTotals.calls++;
|
|
153
|
+
sessionTotals.tokensSaved += result.tokensSaved;
|
|
154
|
+
|
|
155
|
+
// Update status bar
|
|
156
|
+
const saved = result.tokensSaved.toLocaleString();
|
|
157
|
+
const pct = Math.round((1 - result.compressionRatio) * 100);
|
|
158
|
+
const theme = ctx.ui.theme;
|
|
159
|
+
ctx.ui.setStatus(
|
|
160
|
+
"headroom",
|
|
161
|
+
theme.fg("success", "✓") + theme.fg("dim", ` Headroom -${pct}% (${saved} saved)`),
|
|
162
|
+
);
|
|
163
|
+
|
|
164
|
+
return { messages: compressedPiMessages as any };
|
|
165
|
+
} catch (error) {
|
|
166
|
+
if (!proxyWarningShown) {
|
|
167
|
+
proxyWarningShown = true;
|
|
168
|
+
proxyAvailable = false;
|
|
169
|
+
|
|
170
|
+
const errMsg = error instanceof Error ? error.message : String(error);
|
|
171
|
+
ctx.ui.notify(`Headroom proxy unavailable: ${errMsg}`, "warning");
|
|
172
|
+
ctx.ui.setStatus(
|
|
173
|
+
"headroom",
|
|
174
|
+
ctx.ui.theme.fg("warning", "⚠") + ctx.ui.theme.fg("dim", " Headroom offline"),
|
|
175
|
+
);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Mid-session crash recovery (one attempt per session)
|
|
179
|
+
if (proxyManager && !restartAttempted) {
|
|
180
|
+
restartAttempted = true;
|
|
181
|
+
const recovered = await proxyManager.tryRestart((msg) => {
|
|
182
|
+
ctx.ui.setStatus("headroom", ctx.ui.theme.fg("dim", `⏳ ${msg}`));
|
|
183
|
+
});
|
|
184
|
+
if (recovered) {
|
|
185
|
+
proxyAvailable = true;
|
|
186
|
+
proxyWarningShown = false;
|
|
187
|
+
ctx.ui.setStatus(
|
|
188
|
+
"headroom",
|
|
189
|
+
ctx.ui.theme.fg("success", "✓") + ctx.ui.theme.fg("dim", " Headroom"),
|
|
190
|
+
);
|
|
191
|
+
// Don't retry compression this call — next context event will use it
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
return;
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
// ─── /headroom command — toggle and status ──────────────────────────
|
|
200
|
+
|
|
201
|
+
pi.registerCommand("headroom", {
|
|
202
|
+
description: "Toggle Headroom compression or show status. Usage: /headroom [on|off|status]",
|
|
203
|
+
handler: async (args, ctx) => {
|
|
204
|
+
const arg = args.trim().toLowerCase();
|
|
205
|
+
|
|
206
|
+
if (arg === "on") {
|
|
207
|
+
enabled = true;
|
|
208
|
+
proxyWarningShown = false;
|
|
209
|
+
restartAttempted = false;
|
|
210
|
+
|
|
211
|
+
if (proxyManager) {
|
|
212
|
+
// Try to start the proxy
|
|
213
|
+
ctx.ui.setStatus("headroom", ctx.ui.theme.fg("dim", "⏳ Starting..."));
|
|
214
|
+
const ok = await proxyManager.ensureRunning((msg) => {
|
|
215
|
+
ctx.ui.setStatus("headroom", ctx.ui.theme.fg("dim", `⏳ ${msg}`));
|
|
216
|
+
});
|
|
217
|
+
if (ok) {
|
|
218
|
+
proxyAvailable = true;
|
|
219
|
+
ctx.ui.notify("Headroom compression enabled", "info");
|
|
220
|
+
ctx.ui.setStatus(
|
|
221
|
+
"headroom",
|
|
222
|
+
ctx.ui.theme.fg("success", "✓") + ctx.ui.theme.fg("dim", " Headroom"),
|
|
223
|
+
);
|
|
224
|
+
} else {
|
|
225
|
+
proxyAvailable = false;
|
|
226
|
+
ctx.ui.notify("Headroom enabled but proxy could not be started", "warning");
|
|
227
|
+
ctx.ui.setStatus(
|
|
228
|
+
"headroom",
|
|
229
|
+
ctx.ui.theme.fg("warning", "⚠") + ctx.ui.theme.fg("dim", " Headroom offline"),
|
|
230
|
+
);
|
|
231
|
+
}
|
|
232
|
+
} else {
|
|
233
|
+
// User-managed: just health-check
|
|
234
|
+
const ok2 = await checkProxyHealth();
|
|
235
|
+
if (ok2) {
|
|
236
|
+
proxyAvailable = true;
|
|
237
|
+
ctx.ui.notify("Headroom compression enabled", "info");
|
|
238
|
+
ctx.ui.setStatus(
|
|
239
|
+
"headroom",
|
|
240
|
+
ctx.ui.theme.fg("success", "✓") + ctx.ui.theme.fg("dim", " Headroom"),
|
|
241
|
+
);
|
|
242
|
+
} else {
|
|
243
|
+
proxyAvailable = false;
|
|
244
|
+
ctx.ui.notify("Headroom enabled but proxy is offline", "warning");
|
|
245
|
+
ctx.ui.setStatus(
|
|
246
|
+
"headroom",
|
|
247
|
+
ctx.ui.theme.fg("warning", "⚠") + ctx.ui.theme.fg("dim", " Headroom offline"),
|
|
248
|
+
);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
return;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
if (arg === "off") {
|
|
255
|
+
enabled = false;
|
|
256
|
+
ctx.ui.notify("Headroom compression disabled", "info");
|
|
257
|
+
ctx.ui.setStatus("headroom", ctx.ui.theme.fg("dim", "○ Headroom off"));
|
|
258
|
+
return;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// Status (default)
|
|
262
|
+
const managedStr = proxyManager
|
|
263
|
+
? proxyManager.isManaged
|
|
264
|
+
? "auto (managed by extension)"
|
|
265
|
+
: "auto (external proxy detected)"
|
|
266
|
+
: "manual (HEADROOM_URL set)";
|
|
267
|
+
|
|
268
|
+
const lines = [
|
|
269
|
+
`Headroom Context Compression`,
|
|
270
|
+
` Enabled: ${enabled ? "yes" : "no"}`,
|
|
271
|
+
` Proxy: ${baseUrl} (${proxyAvailable === true ? "online" : proxyAvailable === false ? "offline" : "unknown"})`,
|
|
272
|
+
` Mode: ${managedStr}`,
|
|
273
|
+
``,
|
|
274
|
+
`Session stats:`,
|
|
275
|
+
` Compressions: ${sessionTotals.calls}`,
|
|
276
|
+
` Tokens saved: ${sessionTotals.tokensSaved.toLocaleString()}`,
|
|
277
|
+
];
|
|
278
|
+
|
|
279
|
+
if (lastStats.tokensBefore > 0) {
|
|
280
|
+
const pct = Math.round((1 - lastStats.ratio) * 100);
|
|
281
|
+
lines.push(
|
|
282
|
+
``,
|
|
283
|
+
`Last compression:`,
|
|
284
|
+
` ${lastStats.tokensBefore.toLocaleString()} → ${lastStats.tokensAfter.toLocaleString()} tokens (-${pct}%)`,
|
|
285
|
+
` Transforms: ${lastStats.transforms.join(", ") || "none"}`,
|
|
286
|
+
);
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
ctx.ui.notify(lines.join("\n"), "info");
|
|
290
|
+
},
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
// ─── /headroom-health command — proxy diagnostics ───────────────────
|
|
294
|
+
|
|
295
|
+
pi.registerCommand("headroom-health", {
|
|
296
|
+
description: "Check Headroom proxy health and show diagnostics",
|
|
297
|
+
handler: async (_args, ctx) => {
|
|
298
|
+
ctx.ui.notify(`Checking Headroom proxy at ${baseUrl}...`, "info");
|
|
299
|
+
|
|
300
|
+
const isHealthy = await checkProxyHealth();
|
|
301
|
+
if (isHealthy) {
|
|
302
|
+
proxyAvailable = true;
|
|
303
|
+
|
|
304
|
+
const lines = [
|
|
305
|
+
`Headroom proxy: online`,
|
|
306
|
+
` URL: ${baseUrl}`,
|
|
307
|
+
];
|
|
308
|
+
|
|
309
|
+
if (proxyManager) {
|
|
310
|
+
lines.push(` Managed: ${proxyManager.isManaged ? "yes (started by extension)" : "no (external)"}`);
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
ctx.ui.notify(lines.join("\n"), "info");
|
|
314
|
+
ctx.ui.setStatus(
|
|
315
|
+
"headroom",
|
|
316
|
+
ctx.ui.theme.fg("success", "✓") + ctx.ui.theme.fg("dim", " Headroom"),
|
|
317
|
+
);
|
|
318
|
+
} else {
|
|
319
|
+
proxyAvailable = false;
|
|
320
|
+
const errMsg = "proxy did not respond";
|
|
321
|
+
const helpLines = [
|
|
322
|
+
`Headroom proxy offline`,
|
|
323
|
+
` URL: ${baseUrl}`,
|
|
324
|
+
` Error: ${errMsg}`,
|
|
325
|
+
];
|
|
326
|
+
|
|
327
|
+
if (proxyManager) {
|
|
328
|
+
helpLines.push(``, `The extension will auto-start the proxy on next session.`, `Or run: /headroom on`);
|
|
329
|
+
} else {
|
|
330
|
+
helpLines.push(``, `Start the proxy manually:`, ` headroom proxy`, ` # or`, ` pip install "headroom-ai[proxy]" && headroom proxy`);
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
ctx.ui.notify(helpLines.join("\n"), "error");
|
|
334
|
+
ctx.ui.setStatus(
|
|
335
|
+
"headroom",
|
|
336
|
+
ctx.ui.theme.fg("warning", "⚠") + ctx.ui.theme.fg("dim", " Headroom offline"),
|
|
337
|
+
);
|
|
338
|
+
}
|
|
339
|
+
},
|
|
340
|
+
});
|
|
341
|
+
}
|
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Headroom proxy lifecycle manager.
|
|
3
|
+
*
|
|
4
|
+
* Handles: Python detection, venv creation, pip install, background proxy spawn,
|
|
5
|
+
* health polling, graceful shutdown, and crash recovery.
|
|
6
|
+
*
|
|
7
|
+
* Uses a dedicated venv (~/.pi/headroom-venv/) to avoid PEP 668 issues on
|
|
8
|
+
* macOS/Homebrew and to keep the system Python clean.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { execFile, spawn, type ChildProcess } from "node:child_process";
|
|
12
|
+
import { existsSync } from "node:fs";
|
|
13
|
+
import { join } from "node:path";
|
|
14
|
+
import { homedir } from "node:os";
|
|
15
|
+
|
|
16
|
+
const IS_WINDOWS = process.platform === "win32";
|
|
17
|
+
const VENV_DIR = join(homedir(), ".pi", "headroom-venv");
|
|
18
|
+
const VENV_BIN = IS_WINDOWS ? join(VENV_DIR, "Scripts") : join(VENV_DIR, "bin");
|
|
19
|
+
const VENV_PYTHON = join(VENV_BIN, IS_WINDOWS ? "python.exe" : "python");
|
|
20
|
+
const VENV_HEADROOM = join(VENV_BIN, IS_WINDOWS ? "headroom.exe" : "headroom");
|
|
21
|
+
|
|
22
|
+
// ─── Python detection ─────────────────────────────────────────────────
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Find a Python >=3.10 interpreter. Tries python3 then python.
|
|
26
|
+
* Returns the command string or null if not found.
|
|
27
|
+
*/
|
|
28
|
+
export async function findPython(): Promise<string | null> {
|
|
29
|
+
for (const cmd of ["python3", "python"]) {
|
|
30
|
+
const version = await getPythonVersion(cmd);
|
|
31
|
+
if (version && version.major >= 3 && version.minor >= 10) {
|
|
32
|
+
return cmd;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async function getPythonVersion(
|
|
39
|
+
cmd: string,
|
|
40
|
+
): Promise<{ major: number; minor: number } | null> {
|
|
41
|
+
try {
|
|
42
|
+
const output = await execAsync(cmd, ["--version"]);
|
|
43
|
+
// "Python 3.12.4"
|
|
44
|
+
const match = output.match(/Python (\d+)\.(\d+)/);
|
|
45
|
+
if (match) {
|
|
46
|
+
return { major: parseInt(match[1], 10), minor: parseInt(match[2], 10) };
|
|
47
|
+
}
|
|
48
|
+
} catch {
|
|
49
|
+
// Command not found or errored
|
|
50
|
+
}
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// ─── Venv + install ───────────────────────────────────────────────────
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Ensure the headroom venv exists and headroom-ai[proxy] is installed.
|
|
58
|
+
* Returns the path to the headroom CLI in the venv, or null on failure.
|
|
59
|
+
*/
|
|
60
|
+
async function ensureVenv(
|
|
61
|
+
onStatus: (msg: string) => void,
|
|
62
|
+
): Promise<string | null> {
|
|
63
|
+
// 1. If venv already has headroom, we're done
|
|
64
|
+
if (existsSync(VENV_HEADROOM)) {
|
|
65
|
+
return VENV_HEADROOM;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// 2. Find system Python
|
|
69
|
+
const python = await findPython();
|
|
70
|
+
if (!python) {
|
|
71
|
+
onStatus("Python >=3.10 not found — cannot install Headroom");
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// 3. Create venv if it doesn't exist
|
|
76
|
+
if (!existsSync(VENV_PYTHON)) {
|
|
77
|
+
onStatus("Creating Headroom venv...");
|
|
78
|
+
try {
|
|
79
|
+
await execAsync(python, ["-m", "venv", VENV_DIR], 60_000);
|
|
80
|
+
} catch (err) {
|
|
81
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
82
|
+
onStatus(`Failed to create venv: ${msg}`);
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// 4. Install headroom-ai[proxy] into the venv
|
|
88
|
+
onStatus("Installing headroom-ai (this may take a minute)...");
|
|
89
|
+
try {
|
|
90
|
+
await execAsync(
|
|
91
|
+
VENV_PYTHON,
|
|
92
|
+
["-m", "pip", "install", "headroom-ai[proxy]", "--quiet", "--disable-pip-version-check"],
|
|
93
|
+
180_000, // 3 minute timeout — first install downloads many deps
|
|
94
|
+
);
|
|
95
|
+
} catch (err) {
|
|
96
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
97
|
+
onStatus(`pip install failed: ${msg}`);
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// 5. Verify
|
|
102
|
+
if (existsSync(VENV_HEADROOM)) {
|
|
103
|
+
return VENV_HEADROOM;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Fallback: try via python -m
|
|
107
|
+
try {
|
|
108
|
+
await execAsync(VENV_PYTHON, ["-m", "headroom.cli", "--help"], 10_000);
|
|
109
|
+
return null; // CLI binary doesn't exist, but module works — handled separately
|
|
110
|
+
} catch {
|
|
111
|
+
// pass
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
onStatus("headroom installed but CLI not found in venv");
|
|
115
|
+
return null;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Ensure headroom is available. Checks system PATH first, then venv.
|
|
120
|
+
* Returns the invocation method: { cmd, args } to spawn the proxy,
|
|
121
|
+
* or null if installation failed.
|
|
122
|
+
*/
|
|
123
|
+
export async function ensureInstalled(
|
|
124
|
+
onStatus: (msg: string) => void,
|
|
125
|
+
): Promise<{ cmd: string; args: string[] } | null> {
|
|
126
|
+
// 1. Check if `headroom` CLI is already on system PATH
|
|
127
|
+
if (await isCommandAvailable("headroom", ["--help"])) {
|
|
128
|
+
return { cmd: "headroom", args: [] };
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// 2. Check if venv already has headroom
|
|
132
|
+
if (existsSync(VENV_HEADROOM) && await isCommandAvailable(VENV_HEADROOM, ["--help"])) {
|
|
133
|
+
return { cmd: VENV_HEADROOM, args: [] };
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// 3. Create venv and install
|
|
137
|
+
const headroomPath = await ensureVenv(onStatus);
|
|
138
|
+
if (headroomPath) {
|
|
139
|
+
return { cmd: headroomPath, args: [] };
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// 4. Fallback: try module invocation in venv
|
|
143
|
+
if (existsSync(VENV_PYTHON) && await isCommandAvailable(VENV_PYTHON, ["-m", "headroom.cli", "--help"])) {
|
|
144
|
+
return { cmd: VENV_PYTHON, args: ["-m", "headroom.cli"] };
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
async function isCommandAvailable(cmd: string, args: string[]): Promise<boolean> {
|
|
151
|
+
try {
|
|
152
|
+
await execAsync(cmd, args, 10_000);
|
|
153
|
+
return true;
|
|
154
|
+
} catch {
|
|
155
|
+
return false;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// ─── ProxyManager ─────────────────────────────────────────────────────
|
|
160
|
+
|
|
161
|
+
export class ProxyManager {
|
|
162
|
+
private proc: ChildProcess | null = null;
|
|
163
|
+
private weStartedIt = false;
|
|
164
|
+
private stopping = false;
|
|
165
|
+
private port: number;
|
|
166
|
+
private host: string;
|
|
167
|
+
/** Stored invocation method from ensureInstalled */
|
|
168
|
+
private invocation: { cmd: string; args: string[] } | null = null;
|
|
169
|
+
|
|
170
|
+
constructor(options?: { port?: number; host?: string }) {
|
|
171
|
+
this.port = options?.port ?? 8787;
|
|
172
|
+
this.host = options?.host ?? "127.0.0.1";
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
get baseUrl(): string {
|
|
176
|
+
return `http://${this.host}:${this.port}`;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
get isManaged(): boolean {
|
|
180
|
+
return this.weStartedIt;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// ── Full lifecycle: detect → install → start → health-check ───────
|
|
184
|
+
|
|
185
|
+
async ensureRunning(onStatus: (msg: string) => void): Promise<boolean> {
|
|
186
|
+
if (this.stopping) return false;
|
|
187
|
+
|
|
188
|
+
// 1. Already running? (external or our own)
|
|
189
|
+
onStatus("Checking for running proxy...");
|
|
190
|
+
if (await this.healthCheck()) {
|
|
191
|
+
return true; // Don't touch it — someone else's proxy or our still-alive one
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// 2. Ensure headroom is installed (venv-based)
|
|
195
|
+
const invocation = await ensureInstalled(onStatus);
|
|
196
|
+
if (!invocation) return false;
|
|
197
|
+
this.invocation = invocation;
|
|
198
|
+
|
|
199
|
+
// 3. Spawn proxy
|
|
200
|
+
this.startProxy(onStatus);
|
|
201
|
+
|
|
202
|
+
// 4. Poll for health with backoff
|
|
203
|
+
const delays = [500, 1000, 1000, 2000, 2000, 2000, 2000, 2000];
|
|
204
|
+
for (const delay of delays) {
|
|
205
|
+
if (this.stopping) return false;
|
|
206
|
+
await sleep(delay);
|
|
207
|
+
|
|
208
|
+
// If process exited already, bail early
|
|
209
|
+
if (this.proc && this.proc.exitCode !== null) {
|
|
210
|
+
onStatus("Headroom proxy exited unexpectedly");
|
|
211
|
+
this.proc = null;
|
|
212
|
+
return false;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
if (await this.healthCheck()) {
|
|
216
|
+
this.weStartedIt = true;
|
|
217
|
+
return true;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// 5. Timed out — kill and report failure
|
|
222
|
+
onStatus("Headroom proxy failed to start (health check timeout)");
|
|
223
|
+
this.killProcess();
|
|
224
|
+
return false;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// ── Health check ──────────────────────────────────────────────────
|
|
228
|
+
|
|
229
|
+
async healthCheck(): Promise<boolean> {
|
|
230
|
+
try {
|
|
231
|
+
const res = await fetch(`${this.baseUrl}/health`, {
|
|
232
|
+
signal: AbortSignal.timeout(3000),
|
|
233
|
+
});
|
|
234
|
+
return res.ok;
|
|
235
|
+
} catch {
|
|
236
|
+
return false;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// ── Stop proxy (if we started it) ─────────────────────────────────
|
|
241
|
+
|
|
242
|
+
async stop(): Promise<void> {
|
|
243
|
+
this.stopping = true;
|
|
244
|
+
|
|
245
|
+
if (!this.proc || !this.weStartedIt) {
|
|
246
|
+
this.proc = null;
|
|
247
|
+
this.weStartedIt = false;
|
|
248
|
+
return;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
const proc = this.proc;
|
|
252
|
+
this.proc = null;
|
|
253
|
+
this.weStartedIt = false;
|
|
254
|
+
|
|
255
|
+
// Send SIGTERM (or hard-kill on Windows)
|
|
256
|
+
try {
|
|
257
|
+
if (IS_WINDOWS) {
|
|
258
|
+
proc.kill();
|
|
259
|
+
} else {
|
|
260
|
+
proc.kill("SIGTERM");
|
|
261
|
+
|
|
262
|
+
// Wait up to 3s for graceful exit
|
|
263
|
+
const exited = await Promise.race([
|
|
264
|
+
new Promise<boolean>((resolve) => {
|
|
265
|
+
proc.on("exit", () => resolve(true));
|
|
266
|
+
}),
|
|
267
|
+
sleep(3000).then(() => false),
|
|
268
|
+
]);
|
|
269
|
+
|
|
270
|
+
if (!exited && proc.exitCode === null) {
|
|
271
|
+
proc.kill("SIGKILL");
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
} catch {
|
|
275
|
+
// Process may already be dead
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// ── Crash recovery ────────────────────────────────────────────────
|
|
280
|
+
|
|
281
|
+
/**
|
|
282
|
+
* Try to restart the proxy once if it crashed.
|
|
283
|
+
* Returns true if recovered.
|
|
284
|
+
*/
|
|
285
|
+
async tryRestart(onStatus: (msg: string) => void): Promise<boolean> {
|
|
286
|
+
if (!this.weStartedIt) return false;
|
|
287
|
+
if (this.proc && this.proc.exitCode === null) return false; // still running
|
|
288
|
+
|
|
289
|
+
onStatus("Headroom proxy crashed, restarting...");
|
|
290
|
+
this.proc = null;
|
|
291
|
+
this.weStartedIt = false;
|
|
292
|
+
this.stopping = false;
|
|
293
|
+
return this.ensureRunning(onStatus);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// ── Private: spawn the proxy ──────────────────────────────────────
|
|
297
|
+
|
|
298
|
+
private startProxy(onStatus: (msg: string) => void): void {
|
|
299
|
+
onStatus("Starting Headroom proxy...");
|
|
300
|
+
|
|
301
|
+
const inv = this.invocation;
|
|
302
|
+
if (!inv) return;
|
|
303
|
+
|
|
304
|
+
// Build args: e.g. ["proxy", "--port", "8787", "--host", "127.0.0.1"]
|
|
305
|
+
// or ["-m", "headroom.cli", "proxy", "--port", "8787", ...]
|
|
306
|
+
const spawnArgs = [...inv.args, "proxy", "--port", String(this.port), "--host", this.host];
|
|
307
|
+
|
|
308
|
+
const proc = spawn(inv.cmd, spawnArgs, {
|
|
309
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
310
|
+
detached: false,
|
|
311
|
+
env: { ...process.env },
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
proc.on("error", () => {
|
|
315
|
+
// spawn error (e.g. command not found) — don't crash
|
|
316
|
+
});
|
|
317
|
+
|
|
318
|
+
proc.on("exit", () => {
|
|
319
|
+
if (this.proc === proc) {
|
|
320
|
+
this.proc = null;
|
|
321
|
+
this.weStartedIt = false;
|
|
322
|
+
}
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
// Unref streams so they don't keep the event loop alive on shutdown
|
|
326
|
+
(proc.stdout as any)?.unref?.();
|
|
327
|
+
(proc.stderr as any)?.unref?.();
|
|
328
|
+
|
|
329
|
+
this.proc = proc;
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
private killProcess(): void {
|
|
333
|
+
if (this.proc) {
|
|
334
|
+
try {
|
|
335
|
+
this.proc.kill(IS_WINDOWS ? undefined : "SIGKILL");
|
|
336
|
+
} catch {
|
|
337
|
+
// Already dead
|
|
338
|
+
}
|
|
339
|
+
this.proc = null;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// ─── Helpers ──────────────────────────────────────────────────────────
|
|
345
|
+
|
|
346
|
+
function sleep(ms: number): Promise<void> {
|
|
347
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
function execAsync(
|
|
351
|
+
cmd: string,
|
|
352
|
+
args: string[],
|
|
353
|
+
timeoutMs = 15_000,
|
|
354
|
+
): Promise<string> {
|
|
355
|
+
return new Promise((resolve, reject) => {
|
|
356
|
+
execFile(cmd, args, { timeout: timeoutMs }, (error, stdout, stderr) => {
|
|
357
|
+
if (error) {
|
|
358
|
+
reject(new Error(stderr || error.message));
|
|
359
|
+
} else {
|
|
360
|
+
resolve((stdout || "") + (stderr || ""));
|
|
361
|
+
}
|
|
362
|
+
});
|
|
363
|
+
});
|
|
364
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "ESNext",
|
|
5
|
+
"moduleResolution": "bundler",
|
|
6
|
+
"strict": true,
|
|
7
|
+
"esModuleInterop": true,
|
|
8
|
+
"skipLibCheck": true,
|
|
9
|
+
"outDir": "./dist",
|
|
10
|
+
"rootDir": "./src",
|
|
11
|
+
"declaration": true,
|
|
12
|
+
"sourceMap": true
|
|
13
|
+
},
|
|
14
|
+
"include": ["src/**/*.ts"]
|
|
15
|
+
}
|