@oh-my-pi/pi-coding-agent 5.8.0 → 6.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,249 @@
1
+ import { homedir, platform } from "node:os";
2
+ import { join } from "node:path";
3
+ import { TOML } from "bun";
4
+ import { logger } from "../../logger";
5
+
6
+ /**
7
+ * lspmux integration for LSP server multiplexing.
8
+ *
9
+ * When lspmux is available and running, this module wraps supported LSP server
10
+ * commands to use lspmux client mode, enabling server instance sharing across
11
+ * multiple editor windows.
12
+ *
13
+ * Integration is transparent: if lspmux is unavailable, falls back to direct spawning.
14
+ */
15
+
16
+ // =============================================================================
17
+ // Types
18
+ // =============================================================================
19
+
20
+ interface LspmuxConfig {
21
+ instance_timeout?: number;
22
+ gc_interval?: number;
23
+ listen?: [string, number] | string;
24
+ connect?: [string, number] | string;
25
+ log_filters?: string;
26
+ pass_environment?: string[];
27
+ }
28
+
29
+ interface LspmuxState {
30
+ available: boolean;
31
+ running: boolean;
32
+ binaryPath: string | null;
33
+ config: LspmuxConfig | null;
34
+ }
35
+
36
+ // =============================================================================
37
+ // Constants
38
+ // =============================================================================
39
+
40
+ /**
41
+ * Servers that benefit from lspmux multiplexing.
42
+ *
43
+ * lspmux can multiplex any LSP server, but it's most beneficial for servers
44
+ * with high startup cost or significant memory usage.
45
+ */
46
+ const DEFAULT_SUPPORTED_SERVERS = new Set([
47
+ "rust-analyzer",
48
+ // Other servers can be added after testing with lspmux
49
+ ]);
50
+
51
+ /** Timeout for liveness check (ms) */
52
+ const LIVENESS_TIMEOUT_MS = 1000;
53
+
54
+ /** Cache duration for lspmux state (5 minutes) */
55
+ const STATE_CACHE_TTL_MS = 5 * 60 * 1000;
56
+
57
+ // =============================================================================
58
+ // Config Path
59
+ // =============================================================================
60
+
61
+ /**
62
+ * Get the lspmux config path based on platform.
63
+ * Matches Rust's `dirs::config_dir()` behavior.
64
+ */
65
+ function getConfigPath(): string {
66
+ const home = homedir();
67
+ switch (platform()) {
68
+ case "win32":
69
+ return join(process.env.APPDATA ?? join(home, "AppData", "Roaming"), "lspmux", "config.toml");
70
+ case "darwin":
71
+ return join(home, "Library", "Application Support", "lspmux", "config.toml");
72
+ default:
73
+ return join(process.env.XDG_CONFIG_HOME ?? join(home, ".config"), "lspmux", "config.toml");
74
+ }
75
+ }
76
+
77
+ // =============================================================================
78
+ // State Management
79
+ // =============================================================================
80
+
81
+ let cachedState: LspmuxState | null = null;
82
+ let cacheTimestamp = 0;
83
+
84
+ /**
85
+ * Parse lspmux config.toml file.
86
+ */
87
+ async function parseConfig(): Promise<LspmuxConfig | null> {
88
+ try {
89
+ const file = Bun.file(getConfigPath());
90
+ if (!(await file.exists())) {
91
+ return null;
92
+ }
93
+ return TOML.parse(await file.text()) as LspmuxConfig;
94
+ } catch {
95
+ return null;
96
+ }
97
+ }
98
+
99
+ /**
100
+ * Check if lspmux server is running via `lspmux status`.
101
+ */
102
+ async function checkServerRunning(binaryPath: string): Promise<boolean> {
103
+ try {
104
+ const proc = Bun.spawn([binaryPath, "status"], {
105
+ stdout: "pipe",
106
+ stderr: "pipe",
107
+ });
108
+
109
+ const exited = await Promise.race([
110
+ proc.exited,
111
+ new Promise<null>((resolve) => setTimeout(() => resolve(null), LIVENESS_TIMEOUT_MS)),
112
+ ]);
113
+
114
+ if (exited === null) {
115
+ proc.kill();
116
+ return false;
117
+ }
118
+
119
+ return exited === 0;
120
+ } catch {
121
+ return false;
122
+ }
123
+ }
124
+
125
+ /**
126
+ * Detect lspmux availability and state.
127
+ * Results are cached for STATE_CACHE_TTL_MS.
128
+ *
129
+ * Set OMP_DISABLE_LSPMUX=1 to disable.
130
+ */
131
+ export async function detectLspmux(): Promise<LspmuxState> {
132
+ const now = Date.now();
133
+ if (cachedState && now - cacheTimestamp < STATE_CACHE_TTL_MS) {
134
+ return cachedState;
135
+ }
136
+
137
+ if (process.env.OMP_DISABLE_LSPMUX === "1") {
138
+ cachedState = { available: false, running: false, binaryPath: null, config: null };
139
+ cacheTimestamp = now;
140
+ return cachedState;
141
+ }
142
+
143
+ const binaryPath = Bun.which("lspmux");
144
+ if (!binaryPath) {
145
+ cachedState = { available: false, running: false, binaryPath: null, config: null };
146
+ cacheTimestamp = now;
147
+ return cachedState;
148
+ }
149
+
150
+ const [config, running] = await Promise.all([parseConfig(), checkServerRunning(binaryPath)]);
151
+
152
+ cachedState = { available: true, running, binaryPath, config };
153
+ cacheTimestamp = now;
154
+
155
+ if (running) {
156
+ logger.debug("lspmux detected and running", { binaryPath });
157
+ }
158
+
159
+ return cachedState;
160
+ }
161
+
162
+ /**
163
+ * Invalidate the cached lspmux state.
164
+ * Call this if you know the server state has changed.
165
+ */
166
+ export function invalidateLspmuxCache(): void {
167
+ cachedState = null;
168
+ cacheTimestamp = 0;
169
+ }
170
+
171
+ // =============================================================================
172
+ // Command Wrapping
173
+ // =============================================================================
174
+
175
+ /**
176
+ * Check if a server command is supported by lspmux.
177
+ */
178
+ export function isLspmuxSupported(command: string): boolean {
179
+ // Extract base command name (handle full paths)
180
+ const baseName = command.split("/").pop() ?? command;
181
+ return DEFAULT_SUPPORTED_SERVERS.has(baseName);
182
+ }
183
+
184
+ export interface LspmuxWrappedCommand {
185
+ command: string;
186
+ args: string[];
187
+ env?: Record<string, string>;
188
+ }
189
+
190
+ /**
191
+ * Wrap a server command to use lspmux client mode.
192
+ *
193
+ * @param originalCommand - The original LSP server command (e.g., "rust-analyzer")
194
+ * @param originalArgs - Original command arguments
195
+ * @param state - lspmux state from detectLspmux()
196
+ * @returns Wrapped command, args, and env vars; or original if lspmux unavailable
197
+ */
198
+ export function wrapWithLspmux(
199
+ originalCommand: string,
200
+ originalArgs: string[] | undefined,
201
+ state: LspmuxState,
202
+ ): LspmuxWrappedCommand {
203
+ if (!state.available || !state.running || !state.binaryPath) {
204
+ return { command: originalCommand, args: originalArgs ?? [] };
205
+ }
206
+
207
+ if (!isLspmuxSupported(originalCommand)) {
208
+ return { command: originalCommand, args: originalArgs ?? [] };
209
+ }
210
+
211
+ const baseName = originalCommand.split("/").pop() ?? originalCommand;
212
+ const isDefaultRustAnalyzer = baseName === "rust-analyzer" && originalCommand === "rust-analyzer";
213
+ const hasArgs = originalArgs && originalArgs.length > 0;
214
+
215
+ // rust-analyzer from $PATH with no args - lspmux's default, simplest case
216
+ if (isDefaultRustAnalyzer && !hasArgs) {
217
+ return { command: state.binaryPath, args: [] };
218
+ }
219
+
220
+ // Use explicit `client` subcommand with LSPMUX_SERVER env var
221
+ // Use `--` to separate lspmux options from server args
222
+ const args = hasArgs ? ["client", "--", ...originalArgs] : ["client"];
223
+ return {
224
+ command: state.binaryPath,
225
+ args,
226
+ env: { LSPMUX_SERVER: originalCommand },
227
+ };
228
+ }
229
+
230
+ /**
231
+ * Get lspmux-wrapped command if available, otherwise return original.
232
+ * This is the main entry point for config.ts integration.
233
+ *
234
+ * @param command - Original LSP server command
235
+ * @param args - Original command arguments
236
+ * @returns Command and args to use (possibly wrapped with lspmux)
237
+ */
238
+ export async function getLspmuxCommand(command: string, args?: string[]): Promise<LspmuxWrappedCommand> {
239
+ const state = await detectLspmux();
240
+ return wrapWithLspmux(command, args, state);
241
+ }
242
+
243
+ /**
244
+ * Check if lspmux is currently active and usable.
245
+ */
246
+ export async function isLspmuxActive(): Promise<boolean> {
247
+ const state = await detectLspmux();
248
+ return state.available && state.running;
249
+ }
@@ -12,6 +12,8 @@ import type { MCPManager } from "../../mcp/manager";
12
12
  import type { ModelRegistry } from "../../model-registry";
13
13
  import { checkPythonKernelAvailability } from "../../python-kernel";
14
14
  import type { ToolSession } from "..";
15
+ import { createLspTool } from "../lsp/index";
16
+ import type { LspParams } from "../lsp/types";
15
17
  import { createPythonTool } from "../python";
16
18
  import { ensureArtifactsDir, getArtifactPaths } from "./artifacts";
17
19
  import { resolveModelPattern } from "./model-resolver";
@@ -27,6 +29,7 @@ import {
27
29
  TASK_SUBAGENT_PROGRESS_CHANNEL,
28
30
  } from "./types";
29
31
  import type {
32
+ LspToolCallRequest,
30
33
  MCPToolCallRequest,
31
34
  MCPToolMetadata,
32
35
  PythonToolCallCancel,
@@ -307,6 +310,9 @@ export async function runSubprocess(options: ExecutorOptions): Promise<SingleRes
307
310
  pythonProxyEnabled = availability.ok;
308
311
  }
309
312
 
313
+ const lspEnabled = enableLsp ?? true;
314
+ const lspToolRequested = lspEnabled && (toolNames === undefined || toolNames.includes("lsp"));
315
+
310
316
  let worker: Worker;
311
317
  try {
312
318
  worker = new Worker(new URL("./worker.ts", import.meta.url), { type: "module" });
@@ -385,6 +391,17 @@ export async function runSubprocess(options: ExecutorOptions): Promise<SingleRes
385
391
  const pythonTool = pythonProxyEnabled ? createPythonTool(pythonToolSession) : null;
386
392
  const pythonCallControllers = new Map<string, AbortController>();
387
393
 
394
+ const lspToolSession: ToolSession = {
395
+ cwd,
396
+ hasUI: false,
397
+ enableLsp: lspEnabled,
398
+ getSessionFile: () => pythonSessionFile,
399
+ getSessionSpawns: () => spawnsEnv,
400
+ settings: options.settingsManager as ToolSession["settings"],
401
+ settingsManager: options.settingsManager,
402
+ };
403
+ const lspTool = lspToolRequested ? createLspTool(lspToolSession) : null;
404
+
388
405
  // Accumulate usage incrementally from message_end events (no memory for streaming events)
389
406
  const accumulatedUsage = {
390
407
  input: 0,
@@ -678,12 +695,13 @@ export async function runSubprocess(options: ExecutorOptions): Promise<SingleRes
678
695
  outputSchema,
679
696
  sessionFile,
680
697
  spawnsEnv,
681
- enableLsp,
698
+ enableLsp: lspEnabled,
682
699
  serializedAuth: options.authStorage?.serialize(),
683
700
  serializedModels: options.modelRegistry?.serialize(),
684
701
  serializedSettings,
685
702
  mcpTools: options.mcpManager ? extractMCPToolMetadata(options.mcpManager) : undefined,
686
703
  pythonToolProxy: pythonProxyEnabled,
704
+ lspToolProxy: Boolean(lspTool),
687
705
  },
688
706
  };
689
707
 
@@ -792,6 +810,40 @@ export async function runSubprocess(options: ExecutorOptions): Promise<SingleRes
792
810
  }
793
811
  };
794
812
 
813
+ const handleLspCall = async (request: LspToolCallRequest) => {
814
+ if (!lspTool) {
815
+ worker.postMessage({
816
+ type: "lsp_tool_result",
817
+ callId: request.callId,
818
+ error: "LSP proxy not available",
819
+ });
820
+ return;
821
+ }
822
+ try {
823
+ const result = await withTimeout(
824
+ lspTool.execute(request.callId, request.params as LspParams, signal),
825
+ request.timeoutMs,
826
+ );
827
+ worker.postMessage({
828
+ type: "lsp_tool_result",
829
+ callId: request.callId,
830
+ result: { content: result.content ?? [], details: result.details },
831
+ });
832
+ } catch (error) {
833
+ const message =
834
+ request.timeoutMs !== undefined && error instanceof Error && error.message.includes("timed out")
835
+ ? `LSP tool call timed out after ${request.timeoutMs}ms`
836
+ : error instanceof Error
837
+ ? error.message
838
+ : String(error);
839
+ worker.postMessage({
840
+ type: "lsp_tool_result",
841
+ callId: request.callId,
842
+ error: message,
843
+ });
844
+ }
845
+ };
846
+
795
847
  const onMessage = (event: WorkerMessageEvent<SubagentWorkerResponse>) => {
796
848
  const message = event.data;
797
849
  if (!message || resolved) return;
@@ -807,6 +859,10 @@ export async function runSubprocess(options: ExecutorOptions): Promise<SingleRes
807
859
  handlePythonCancel(message as PythonToolCallCancel);
808
860
  return;
809
861
  }
862
+ if (message.type === "lsp_tool_call") {
863
+ handleLspCall(message as LspToolCallRequest);
864
+ return;
865
+ }
810
866
  if (message.type === "event") {
811
867
  try {
812
868
  processEvent(message.event);
@@ -64,6 +64,24 @@ export interface PythonToolCallCancel {
64
64
  reason?: string;
65
65
  }
66
66
 
67
+ export interface LspToolCallRequest {
68
+ type: "lsp_tool_call";
69
+ callId: string;
70
+ params: Record<string, unknown>;
71
+ timeoutMs?: number;
72
+ }
73
+
74
+ export interface LspToolCallResponse {
75
+ type: "lsp_tool_result";
76
+ callId: string;
77
+ result?: {
78
+ content: Array<{ type: string; text?: string; [key: string]: unknown }>;
79
+ details?: unknown;
80
+ isError?: boolean;
81
+ };
82
+ error?: string;
83
+ }
84
+
67
85
  export interface SubagentWorkerStartPayload {
68
86
  cwd: string;
69
87
  task: string;
@@ -80,6 +98,7 @@ export interface SubagentWorkerStartPayload {
80
98
  serializedSettings?: Settings;
81
99
  mcpTools?: MCPToolMetadata[];
82
100
  pythonToolProxy?: boolean;
101
+ lspToolProxy?: boolean;
83
102
  }
84
103
 
85
104
  export type SubagentWorkerRequest =
@@ -87,11 +106,13 @@ export type SubagentWorkerRequest =
87
106
  | { type: "abort" }
88
107
  | MCPToolCallResponse
89
108
  | PythonToolCallResponse
90
- | PythonToolCallCancel;
109
+ | PythonToolCallCancel
110
+ | LspToolCallResponse;
91
111
 
92
112
  export type SubagentWorkerResponse =
93
113
  | { type: "event"; event: AgentEvent }
94
114
  | { type: "done"; exitCode: number; durationMs: number; error?: string; aborted?: boolean }
95
115
  | MCPToolCallRequest
96
116
  | PythonToolCallRequest
97
- | PythonToolCallCancel;
117
+ | PythonToolCallCancel
118
+ | LspToolCallRequest;
@@ -16,18 +16,22 @@
16
16
  import type { AgentEvent, ThinkingLevel } from "@oh-my-pi/pi-agent-core";
17
17
  import type { Api, Model } from "@oh-my-pi/pi-ai";
18
18
  import type { TSchema } from "@sinclair/typebox";
19
+ import lspDescription from "../../../prompts/tools/lsp.md" with { type: "text" };
19
20
  import type { AgentSessionEvent } from "../../agent-session";
20
21
  import { AuthStorage } from "../../auth-storage";
21
22
  import type { CustomTool } from "../../custom-tools/types";
22
23
  import { logger } from "../../logger";
23
24
  import { ModelRegistry } from "../../model-registry";
24
25
  import { parseModelPattern, parseModelString } from "../../model-resolver";
26
+ import { renderPromptTemplate } from "../../prompt-templates";
25
27
  import { createAgentSession, discoverAuthStorage, discoverModels } from "../../sdk";
26
28
  import { SessionManager } from "../../session-manager";
27
29
  import { SettingsManager } from "../../settings-manager";
28
30
  import { untilAborted } from "../../utils";
31
+ import { type LspToolDetails, lspSchema } from "../lsp/types";
29
32
  import { getPythonToolDescription, type PythonToolDetails, type PythonToolParams, pythonSchema } from "../python";
30
33
  import type {
34
+ LspToolCallResponse,
31
35
  MCPToolCallResponse,
32
36
  MCPToolMetadata,
33
37
  PythonToolCallResponse,
@@ -58,11 +62,19 @@ interface PendingPythonCall {
58
62
  timeoutId?: ReturnType<typeof setTimeout>;
59
63
  }
60
64
 
65
+ interface PendingLspCall {
66
+ resolve: (result: LspToolCallResponse["result"]) => void;
67
+ reject: (error: Error) => void;
68
+ timeoutId?: ReturnType<typeof setTimeout>;
69
+ }
70
+
61
71
  const pendingMCPCalls = new Map<string, PendingMCPCall>();
62
72
  const pendingPythonCalls = new Map<string, PendingPythonCall>();
73
+ const pendingLspCalls = new Map<string, PendingLspCall>();
63
74
  const MCP_CALL_TIMEOUT_MS = 60_000;
64
75
  let mcpCallIdCounter = 0;
65
76
  let pythonCallIdCounter = 0;
77
+ let lspCallIdCounter = 0;
66
78
 
67
79
  function generateMCPCallId(): string {
68
80
  return `mcp_${Date.now()}_${++mcpCallIdCounter}`;
@@ -72,6 +84,10 @@ function generatePythonCallId(): string {
72
84
  return `python_${Date.now()}_${++pythonCallIdCounter}`;
73
85
  }
74
86
 
87
+ function generateLspCallId(): string {
88
+ return `lsp_${Date.now()}_${++lspCallIdCounter}`;
89
+ }
90
+
75
91
  function callMCPToolViaParent(
76
92
  toolName: string,
77
93
  params: Record<string, unknown>,
@@ -193,6 +209,65 @@ function callPythonToolViaParent(
193
209
  });
194
210
  }
195
211
 
212
+ function callLspToolViaParent(
213
+ params: Record<string, unknown>,
214
+ signal?: AbortSignal,
215
+ timeoutMs?: number,
216
+ ): Promise<LspToolCallResponse["result"]> {
217
+ return new Promise((resolve, reject) => {
218
+ const callId = generateLspCallId();
219
+ if (signal?.aborted) {
220
+ reject(new Error("Aborted"));
221
+ return;
222
+ }
223
+
224
+ const timeoutId =
225
+ typeof timeoutMs === "number" && Number.isFinite(timeoutMs)
226
+ ? setTimeout(() => {
227
+ pendingLspCalls.delete(callId);
228
+ reject(new Error(`LSP call timed out after ${timeoutMs}ms`));
229
+ }, timeoutMs)
230
+ : undefined;
231
+
232
+ const cleanup = () => {
233
+ if (timeoutId) {
234
+ clearTimeout(timeoutId);
235
+ }
236
+ pendingLspCalls.delete(callId);
237
+ };
238
+
239
+ if (typeof signal?.addEventListener === "function") {
240
+ signal.addEventListener(
241
+ "abort",
242
+ () => {
243
+ cleanup();
244
+ reject(new Error("Aborted"));
245
+ },
246
+ { once: true },
247
+ );
248
+ }
249
+
250
+ pendingLspCalls.set(callId, {
251
+ resolve: (result) => {
252
+ cleanup();
253
+ resolve(result ?? { content: [] });
254
+ },
255
+ reject: (error) => {
256
+ cleanup();
257
+ reject(error);
258
+ },
259
+ timeoutId,
260
+ });
261
+
262
+ postMessageSafe({
263
+ type: "lsp_tool_call",
264
+ callId,
265
+ params,
266
+ timeoutMs,
267
+ } as SubagentWorkerResponse);
268
+ });
269
+ }
270
+
196
271
  function handleMCPToolResult(response: MCPToolCallResponse): void {
197
272
  const pending = pendingMCPCalls.get(response.callId);
198
273
  if (!pending) return;
@@ -213,12 +288,24 @@ function handlePythonToolResult(response: PythonToolCallResponse): void {
213
288
  }
214
289
  }
215
290
 
291
+ function handleLspToolResult(response: LspToolCallResponse): void {
292
+ const pending = pendingLspCalls.get(response.callId);
293
+ if (!pending) return;
294
+ if (response.error) {
295
+ pending.reject(new Error(response.error));
296
+ } else {
297
+ pending.resolve(response.result);
298
+ }
299
+ }
300
+
216
301
  function rejectPendingCalls(reason: string): void {
217
302
  const error = new Error(reason);
218
303
  const mcpCalls = Array.from(pendingMCPCalls.values());
219
304
  const pythonCalls = Array.from(pendingPythonCalls.values());
305
+ const lspCalls = Array.from(pendingLspCalls.values());
220
306
  pendingMCPCalls.clear();
221
307
  pendingPythonCalls.clear();
308
+ pendingLspCalls.clear();
222
309
  for (const pending of mcpCalls) {
223
310
  clearTimeout(pending.timeoutId);
224
311
  pending.reject(error);
@@ -227,6 +314,10 @@ function rejectPendingCalls(reason: string): void {
227
314
  clearTimeout(pending.timeoutId);
228
315
  pending.reject(error);
229
316
  }
317
+ for (const pending of lspCalls) {
318
+ clearTimeout(pending.timeoutId);
319
+ pending.reject(error);
320
+ }
230
321
  }
231
322
 
232
323
  function createMCPProxyTool(metadata: MCPToolMetadata): CustomTool<TSchema> {
@@ -296,6 +387,40 @@ function createPythonProxyTool(): CustomTool<typeof pythonSchema> {
296
387
  };
297
388
  }
298
389
 
390
+ function createLspProxyTool(): CustomTool<typeof lspSchema> {
391
+ return {
392
+ name: "lsp",
393
+ label: "LSP",
394
+ description: renderPromptTemplate(lspDescription),
395
+ parameters: lspSchema,
396
+ execute: async (_toolCallId, params, _onUpdate, _ctx, signal) => {
397
+ try {
398
+ const result = await callLspToolViaParent(params as Record<string, unknown>, signal);
399
+ return {
400
+ content:
401
+ result?.content?.map((c) =>
402
+ c.type === "text"
403
+ ? { type: "text" as const, text: c.text ?? "" }
404
+ : { type: "text" as const, text: JSON.stringify(c) },
405
+ ) ?? [],
406
+ details: result?.details as LspToolDetails | undefined,
407
+ };
408
+ } catch (error) {
409
+ const { action } = params;
410
+ return {
411
+ content: [
412
+ {
413
+ type: "text" as const,
414
+ text: `LSP error: ${error instanceof Error ? error.message : String(error)}`,
415
+ },
416
+ ],
417
+ details: { action, success: false } as LspToolDetails,
418
+ };
419
+ }
420
+ },
421
+ };
422
+ }
423
+
299
424
  interface WorkerMessageEvent<T> {
300
425
  data: T;
301
426
  }
@@ -423,12 +548,17 @@ async function runTask(runState: RunState, payload: SubagentWorkerStartPayload):
423
548
  checkAbort();
424
549
  }
425
550
 
426
- // Create MCP/python proxy tools if provided
551
+ // Create MCP/python/LSP proxy tools if provided
427
552
  const mcpProxyTools: CustomTool<TSchema>[] = payload.mcpTools?.map(createMCPProxyTool) ?? [];
428
553
  const pythonProxyTools: CustomTool<TSchema>[] = payload.pythonToolProxy
429
554
  ? [createPythonProxyTool() as unknown as CustomTool<TSchema>]
430
555
  : [];
431
- const proxyTools = [...mcpProxyTools, ...pythonProxyTools];
556
+ const lspProxyTools: CustomTool<TSchema>[] = payload.lspToolProxy
557
+ ? [createLspProxyTool() as unknown as CustomTool<TSchema>]
558
+ : [];
559
+ const proxyTools = [...mcpProxyTools, ...pythonProxyTools, ...lspProxyTools];
560
+ const enableLsp = payload.enableLsp ?? true;
561
+ const lspProxyEnabled = payload.lspToolProxy ?? false;
432
562
 
433
563
  // Resolve model override (equivalent to CLI's parseModelPattern with --model)
434
564
  const { model, thinkingLevel: modelThinkingLevel } = resolveModelOverride(payload.model, modelRegistry);
@@ -465,7 +595,7 @@ async function runTask(runState: RunState, payload: SubagentWorkerStartPayload):
465
595
  hasUI: false,
466
596
  // Pass spawn restrictions to nested tasks
467
597
  spawns: payload.spawnsEnv,
468
- enableLsp: payload.enableLsp ?? true,
598
+ enableLsp: enableLsp && !lspProxyEnabled,
469
599
  // Disable local MCP discovery if using proxy tools
470
600
  enableMCP: !payload.mcpTools,
471
601
  // Add proxy tools
@@ -703,7 +833,7 @@ self.addEventListener("messageerror", () => {
703
833
  reportFatal("Failed to deserialize parent message");
704
834
  });
705
835
 
706
- // Message handler - receives start/abort/mcp_tool_result commands from parent
836
+ // Message handler - receives start/abort/tool_result commands from parent
707
837
  globalThis.addEventListener("message", (event: WorkerMessageEvent<SubagentWorkerRequest>) => {
708
838
  const message = event.data;
709
839
  if (!message) return;
@@ -723,6 +853,11 @@ globalThis.addEventListener("message", (event: WorkerMessageEvent<SubagentWorker
723
853
  return;
724
854
  }
725
855
 
856
+ if (message.type === "lsp_tool_result") {
857
+ handleLspToolResult(message);
858
+ return;
859
+ }
860
+
726
861
  if (message.type === "start") {
727
862
  // Only allow one task per worker
728
863
  if (activeRun) return;