pi-subagents 0.3.1 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,19 @@
1
1
  # Changelog
2
2
 
3
+ ## [0.3.2] - 2026-01-25
4
+
5
+ ### Performance
6
+ - **4x faster polling** - Reduced poll interval from 1000ms to 250ms (efficient with mtime caching)
7
+ - **Mtime-based caching** - status.json and output tail reads cached to avoid redundant I/O
8
+ - **Unified throttled updates** - All onUpdate calls consolidated under 50ms throttle
9
+ - **Widget change detection** - Hash-based change detection skips no-op re-renders
10
+ - **Array optimizations** - Use concat instead of spread for chain progress updates
11
+
12
+ ### Fixed
13
+ - **Timer leaks** - Track and clear pendingTimer and cleanupTimers properly
14
+ - **Updates after close** - processClosed flag prevents updates after process terminates
15
+ - **Session cleanup** - Clear cleanup timers on session_start/switch/branch/shutdown
16
+
3
17
  ## [0.3.1] - 2026-01-24
4
18
 
5
19
  ### Changed
@@ -250,18 +250,22 @@ export async function executeChain(params: ChainExecutionParams): Promise<ChainE
250
250
  artifactsDir: artifactConfig.enabled ? artifactsDir : undefined,
251
251
  artifactConfig,
252
252
  onUpdate: onUpdate
253
- ? (p) =>
253
+ ? (p) => {
254
+ // Use concat instead of spread for better performance
255
+ const stepResults = p.details?.results || [];
256
+ const stepProgress = p.details?.progress || [];
254
257
  onUpdate({
255
258
  ...p,
256
259
  details: {
257
260
  mode: "chain",
258
- results: [...results, ...(p.details?.results || [])],
259
- progress: [...allProgress, ...(p.details?.progress || [])],
261
+ results: results.concat(stepResults),
262
+ progress: allProgress.concat(stepProgress),
260
263
  chainAgents,
261
264
  totalSteps,
262
265
  currentStepIndex: stepIndex,
263
266
  },
264
- })
267
+ });
268
+ }
265
269
  : undefined,
266
270
  });
267
271
 
@@ -372,18 +376,22 @@ export async function executeChain(params: ChainExecutionParams): Promise<ChainE
372
376
  artifactsDir: artifactConfig.enabled ? artifactsDir : undefined,
373
377
  artifactConfig,
374
378
  onUpdate: onUpdate
375
- ? (p) =>
379
+ ? (p) => {
380
+ // Use concat instead of spread for better performance
381
+ const stepResults = p.details?.results || [];
382
+ const stepProgress = p.details?.progress || [];
376
383
  onUpdate({
377
384
  ...p,
378
385
  details: {
379
386
  mode: "chain",
380
- results: [...results, ...(p.details?.results || [])],
381
- progress: [...allProgress, ...(p.details?.progress || [])],
387
+ results: results.concat(stepResults),
388
+ progress: allProgress.concat(stepProgress),
382
389
  chainAgents,
383
390
  totalSteps,
384
391
  currentStepIndex: stepIndex,
385
392
  },
386
- })
393
+ });
394
+ }
387
395
  : undefined,
388
396
  });
389
397
 
package/execution.ts CHANGED
@@ -132,6 +132,50 @@ export async function runSync(
132
132
  const proc = spawn("pi", args, { cwd: cwd ?? runtimeCwd, stdio: ["ignore", "pipe", "pipe"] });
133
133
  let buf = "";
134
134
 
135
+ // Throttled update mechanism - consolidates all updates
136
+ let lastUpdateTime = 0;
137
+ let updatePending = false;
138
+ let pendingTimer: ReturnType<typeof setTimeout> | null = null;
139
+ let processClosed = false;
140
+ const UPDATE_THROTTLE_MS = 50; // Reduced from 75ms for faster responsiveness
141
+
142
+ const scheduleUpdate = () => {
143
+ if (!onUpdate || processClosed) return;
144
+ const now = Date.now();
145
+ const elapsed = now - lastUpdateTime;
146
+
147
+ if (elapsed >= UPDATE_THROTTLE_MS) {
148
+ // Enough time passed, update immediately
149
+ // Clear any pending timer to avoid double-updates
150
+ if (pendingTimer) {
151
+ clearTimeout(pendingTimer);
152
+ pendingTimer = null;
153
+ }
154
+ lastUpdateTime = now;
155
+ updatePending = false;
156
+ progress.durationMs = now - startTime;
157
+ onUpdate({
158
+ content: [{ type: "text", text: getFinalOutput(result.messages) || "(running...)" }],
159
+ details: { mode: "single", results: [result], progress: [progress] },
160
+ });
161
+ } else if (!updatePending) {
162
+ // Schedule update for later
163
+ updatePending = true;
164
+ pendingTimer = setTimeout(() => {
165
+ pendingTimer = null;
166
+ if (updatePending && !processClosed) {
167
+ updatePending = false;
168
+ lastUpdateTime = Date.now();
169
+ progress.durationMs = Date.now() - startTime;
170
+ onUpdate({
171
+ content: [{ type: "text", text: getFinalOutput(result.messages) || "(running...)" }],
172
+ details: { mode: "single", results: [result], progress: [progress] },
173
+ });
174
+ }
175
+ }, UPDATE_THROTTLE_MS - elapsed);
176
+ }
177
+ };
178
+
135
179
  const processLine = (line: string) => {
136
180
  if (!line.trim()) return;
137
181
  jsonlLines.push(line);
@@ -144,11 +188,9 @@ export async function runSync(
144
188
  progress.toolCount++;
145
189
  progress.currentTool = evt.toolName;
146
190
  progress.currentToolArgs = extractToolArgsPreview((evt.args || {}) as Record<string, unknown>);
147
- if (onUpdate)
148
- onUpdate({
149
- content: [{ type: "text", text: getFinalOutput(result.messages) || "(running...)" }],
150
- details: { mode: "single", results: [result], progress: [progress] },
151
- });
191
+ // Tool start is important - update immediately by forcing throttle reset
192
+ lastUpdateTime = 0;
193
+ scheduleUpdate();
152
194
  }
153
195
 
154
196
  if (evt.type === "tool_execution_end") {
@@ -164,11 +206,7 @@ export async function runSync(
164
206
  }
165
207
  progress.currentTool = undefined;
166
208
  progress.currentToolArgs = undefined;
167
- if (onUpdate)
168
- onUpdate({
169
- content: [{ type: "text", text: getFinalOutput(result.messages) || "(running...)" }],
170
- details: { mode: "single", results: [result], progress: [progress] },
171
- });
209
+ scheduleUpdate();
172
210
  }
173
211
 
174
212
  if (evt.type === "message_end" && evt.message) {
@@ -193,15 +231,14 @@ export async function runSync(
193
231
  .split("\n")
194
232
  .filter((l) => l.trim())
195
233
  .slice(-10);
196
- // Append to existing recentOutput (keep last 50 total)
197
- progress.recentOutput = [...progress.recentOutput, ...lines].slice(-50);
234
+ // Append to existing recentOutput (keep last 50 total) - mutate in place for efficiency
235
+ progress.recentOutput.push(...lines);
236
+ if (progress.recentOutput.length > 50) {
237
+ progress.recentOutput.splice(0, progress.recentOutput.length - 50);
238
+ }
198
239
  }
199
240
  }
200
- if (onUpdate)
201
- onUpdate({
202
- content: [{ type: "text", text: getFinalOutput(result.messages) || "(running...)" }],
203
- details: { mode: "single", results: [result], progress: [progress] },
204
- });
241
+ scheduleUpdate();
205
242
  }
206
243
  if (evt.type === "tool_result_end" && evt.message) {
207
244
  result.messages.push(evt.message);
@@ -212,21 +249,18 @@ export async function runSync(
212
249
  .split("\n")
213
250
  .filter((l) => l.trim())
214
251
  .slice(-10);
215
- // Append to existing recentOutput (keep last 50 total)
216
- progress.recentOutput = [...progress.recentOutput, ...toolLines].slice(-50);
252
+ // Append to existing recentOutput (keep last 50 total) - mutate in place for efficiency
253
+ progress.recentOutput.push(...toolLines);
254
+ if (progress.recentOutput.length > 50) {
255
+ progress.recentOutput.splice(0, progress.recentOutput.length - 50);
256
+ }
217
257
  }
218
- if (onUpdate)
219
- onUpdate({
220
- content: [{ type: "text", text: getFinalOutput(result.messages) || "(running...)" }],
221
- details: { mode: "single", results: [result], progress: [progress] },
222
- });
258
+ scheduleUpdate();
223
259
  }
224
260
  } catch {}
225
261
  };
226
262
 
227
263
  let stderrBuf = "";
228
- let lastUpdateTime = 0;
229
- const UPDATE_THROTTLE_MS = 75;
230
264
 
231
265
  proc.stdout.on("data", (d) => {
232
266
  buf += d.toString();
@@ -234,21 +268,18 @@ export async function runSync(
234
268
  buf = lines.pop() || "";
235
269
  lines.forEach(processLine);
236
270
 
237
- // Throttled periodic update for smoother progress display
238
- const now = Date.now();
239
- if (onUpdate && now - lastUpdateTime > UPDATE_THROTTLE_MS) {
240
- lastUpdateTime = now;
241
- progress.durationMs = now - startTime;
242
- onUpdate({
243
- content: [{ type: "text", text: getFinalOutput(result.messages) || "(running...)" }],
244
- details: { mode: "single", results: [result], progress: [progress] },
245
- });
246
- }
271
+ // Also schedule an update on data received (handles streaming output)
272
+ scheduleUpdate();
247
273
  });
248
274
  proc.stderr.on("data", (d) => {
249
275
  stderrBuf += d.toString();
250
276
  });
251
277
  proc.on("close", (code) => {
278
+ processClosed = true;
279
+ if (pendingTimer) {
280
+ clearTimeout(pendingTimer);
281
+ pendingTimer = null;
282
+ }
252
283
  if (buf.trim()) processLine(buf);
253
284
  if (code !== 0 && stderrBuf.trim() && !result.error) {
254
285
  result.error = stderrBuf.trim();
package/index.ts CHANGED
@@ -73,6 +73,7 @@ export default function registerSubagentExtension(pi: ExtensionAPI): void {
73
73
  let baseCwd = process.cwd();
74
74
  let currentSessionId: string | null = null;
75
75
  const asyncJobs = new Map<string, AsyncJobState>();
76
+ const cleanupTimers = new Map<string, ReturnType<typeof setTimeout>>(); // Track cleanup timeouts
76
77
  let lastUiContext: ExtensionContext | null = null;
77
78
  let poller: NodeJS.Timeout | null = null;
78
79
 
@@ -88,6 +89,10 @@ export default function registerSubagentExtension(pi: ExtensionAPI): void {
88
89
  }
89
90
 
90
91
  for (const job of asyncJobs.values()) {
92
+ // Skip status reads for finished jobs - they won't change
93
+ if (job.status === "complete" || job.status === "failed") {
94
+ continue;
95
+ }
91
96
  const status = readStatus(job.asyncDir);
92
97
  if (status) {
93
98
  job.status = status.state;
@@ -589,10 +594,13 @@ For "scout → planner" or multi-step flows, use chain (not multiple single call
589
594
  if (lastUiContext) {
590
595
  renderWidget(lastUiContext, Array.from(asyncJobs.values()));
591
596
  }
592
- setTimeout(() => {
597
+ // Schedule cleanup after 10 seconds (track timer for cleanup on shutdown)
598
+ const timer = setTimeout(() => {
599
+ cleanupTimers.delete(asyncId);
593
600
  asyncJobs.delete(asyncId);
594
601
  if (lastUiContext) renderWidget(lastUiContext, Array.from(asyncJobs.values()));
595
602
  }, 10000);
603
+ cleanupTimers.set(asyncId, timer);
596
604
  });
597
605
 
598
606
  pi.on("tool_result", (event, ctx) => {
@@ -608,6 +616,8 @@ For "scout → planner" or multi-step flows, use chain (not multiple single call
608
616
  pi.on("session_start", (_event, ctx) => {
609
617
  baseCwd = ctx.cwd;
610
618
  currentSessionId = ctx.sessionManager.getSessionFile() ?? `session-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
619
+ for (const timer of cleanupTimers.values()) clearTimeout(timer);
620
+ cleanupTimers.clear();
611
621
  asyncJobs.clear();
612
622
  if (ctx.hasUI) {
613
623
  lastUiContext = ctx;
@@ -617,6 +627,8 @@ For "scout → planner" or multi-step flows, use chain (not multiple single call
617
627
  pi.on("session_switch", (_event, ctx) => {
618
628
  baseCwd = ctx.cwd;
619
629
  currentSessionId = ctx.sessionManager.getSessionFile() ?? `session-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
630
+ for (const timer of cleanupTimers.values()) clearTimeout(timer);
631
+ cleanupTimers.clear();
620
632
  asyncJobs.clear();
621
633
  if (ctx.hasUI) {
622
634
  lastUiContext = ctx;
@@ -626,6 +638,8 @@ For "scout → planner" or multi-step flows, use chain (not multiple single call
626
638
  pi.on("session_branch", (_event, ctx) => {
627
639
  baseCwd = ctx.cwd;
628
640
  currentSessionId = ctx.sessionManager.getSessionFile() ?? `session-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
641
+ for (const timer of cleanupTimers.values()) clearTimeout(timer);
642
+ cleanupTimers.clear();
629
643
  asyncJobs.clear();
630
644
  if (ctx.hasUI) {
631
645
  lastUiContext = ctx;
@@ -636,6 +650,11 @@ For "scout → planner" or multi-step flows, use chain (not multiple single call
636
650
  watcher.close();
637
651
  if (poller) clearInterval(poller);
638
652
  poller = null;
653
+ // Clear all pending cleanup timers
654
+ for (const timer of cleanupTimers.values()) {
655
+ clearTimeout(timer);
656
+ }
657
+ cleanupTimers.clear();
639
658
  asyncJobs.clear();
640
659
  if (lastUiContext?.hasUI) {
641
660
  lastUiContext.ui.setWidget(WIDGET_KEY, undefined);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pi-subagents",
3
- "version": "0.3.1",
3
+ "version": "0.3.2",
4
4
  "description": "Pi extension for delegating tasks to subagents with chains, parallel execution, and TUI clarification",
5
5
  "author": "Nico Bailon",
6
6
  "license": "MIT",
package/render.ts CHANGED
@@ -16,21 +16,46 @@ import { getFinalOutput, getDisplayItems, getOutputTail, getLastActivity } from
16
16
 
17
17
  type Theme = ExtensionContext["ui"]["theme"];
18
18
 
19
+ // Track last rendered widget state to avoid no-op re-renders
20
+ let lastWidgetHash = "";
21
+
22
+ /**
23
+ * Compute a simple hash of job states for change detection
24
+ */
25
+ function computeWidgetHash(jobs: AsyncJobState[]): string {
26
+ return jobs.slice(0, MAX_WIDGET_JOBS).map(job =>
27
+ `${job.asyncId}:${job.status}:${job.currentStep}:${job.updatedAt}:${job.totalTokens?.total ?? 0}`
28
+ ).join("|");
29
+ }
30
+
19
31
  /**
20
32
  * Render the async jobs widget
21
33
  */
22
34
  export function renderWidget(ctx: ExtensionContext, jobs: AsyncJobState[]): void {
23
35
  if (!ctx.hasUI) return;
24
36
  if (jobs.length === 0) {
25
- ctx.ui.setWidget(WIDGET_KEY, undefined);
37
+ if (lastWidgetHash !== "") {
38
+ lastWidgetHash = "";
39
+ ctx.ui.setWidget(WIDGET_KEY, undefined);
40
+ }
26
41
  return;
27
42
  }
28
43
 
44
+ // Check if anything changed since last render
45
+ // Always re-render if any displayed job is running (output tail updates constantly)
46
+ const displayedJobs = jobs.slice(0, MAX_WIDGET_JOBS);
47
+ const hasRunningJobs = displayedJobs.some(job => job.status === "running");
48
+ const newHash = computeWidgetHash(jobs);
49
+ if (!hasRunningJobs && newHash === lastWidgetHash) {
50
+ return; // Skip re-render, nothing changed
51
+ }
52
+ lastWidgetHash = newHash;
53
+
29
54
  const theme = ctx.ui.theme;
30
55
  const lines: string[] = [];
31
56
  lines.push(theme.fg("accent", "Async subagents"));
32
57
 
33
- for (const job of jobs.slice(0, MAX_WIDGET_JOBS)) {
58
+ for (const job of displayedJobs) {
34
59
  const id = job.asyncId.slice(0, 6);
35
60
  const status =
36
61
  job.status === "complete"
package/types.ts CHANGED
@@ -222,7 +222,7 @@ export const MAX_CONCURRENCY = 4;
222
222
  export const RESULTS_DIR = "/tmp/pi-async-subagent-results";
223
223
  export const ASYNC_DIR = "/tmp/pi-async-subagent-runs";
224
224
  export const WIDGET_KEY = "subagent-async";
225
- export const POLL_INTERVAL_MS = 1000;
225
+ export const POLL_INTERVAL_MS = 250;
226
226
  export const MAX_WIDGET_JOBS = 4;
227
227
 
228
228
  // ============================================================================
package/utils.ts CHANGED
@@ -12,37 +12,71 @@ import type { AsyncStatus, DisplayItem, ErrorInfo } from "./types.js";
12
12
  // File System Utilities
13
13
  // ============================================================================
14
14
 
15
+ // Cache for status file reads - avoid re-reading unchanged files
16
+ const statusCache = new Map<string, { mtime: number; status: AsyncStatus }>();
17
+
15
18
  /**
16
- * Read async job status from disk
19
+ * Read async job status from disk (with mtime-based caching)
17
20
  */
18
21
  export function readStatus(asyncDir: string): AsyncStatus | null {
19
22
  const statusPath = path.join(asyncDir, "status.json");
20
- if (!fs.existsSync(statusPath)) return null;
21
23
  try {
24
+ const stat = fs.statSync(statusPath);
25
+ const cached = statusCache.get(statusPath);
26
+ if (cached && cached.mtime === stat.mtimeMs) {
27
+ return cached.status;
28
+ }
22
29
  const content = fs.readFileSync(statusPath, "utf-8");
23
- return JSON.parse(content) as AsyncStatus;
30
+ const status = JSON.parse(content) as AsyncStatus;
31
+ statusCache.set(statusPath, { mtime: stat.mtimeMs, status });
32
+ // Limit cache size to prevent memory leaks
33
+ if (statusCache.size > 50) {
34
+ const firstKey = statusCache.keys().next().value;
35
+ if (firstKey) statusCache.delete(firstKey);
36
+ }
37
+ return status;
24
38
  } catch {
25
39
  return null;
26
40
  }
27
41
  }
28
42
 
43
+ // Cache for output tail reads - avoid re-reading unchanged files
44
+ const outputTailCache = new Map<string, { mtime: number; size: number; lines: string[] }>();
45
+
29
46
  /**
30
- * Get the last N lines from an output file
47
+ * Get the last N lines from an output file (with mtime/size-based caching)
31
48
  */
32
49
  export function getOutputTail(outputFile: string | undefined, maxLines: number = 3): string[] {
33
- if (!outputFile || !fs.existsSync(outputFile)) return [];
50
+ if (!outputFile) return [];
34
51
  let fd: number | null = null;
35
52
  try {
36
53
  const stat = fs.statSync(outputFile);
37
54
  if (stat.size === 0) return [];
55
+
56
+ // Check cache using both mtime and size (size changes more frequently during writes)
57
+ const cached = outputTailCache.get(outputFile);
58
+ if (cached && cached.mtime === stat.mtimeMs && cached.size === stat.size) {
59
+ return cached.lines;
60
+ }
61
+
38
62
  const tailBytes = 4096;
39
63
  const start = Math.max(0, stat.size - tailBytes);
40
64
  fd = fs.openSync(outputFile, "r");
41
65
  const buffer = Buffer.alloc(Math.min(tailBytes, stat.size));
42
66
  fs.readSync(fd, buffer, 0, buffer.length, start);
43
67
  const content = buffer.toString("utf-8");
44
- const lines = content.split("\n").filter((l) => l.trim());
45
- return lines.slice(-maxLines).map((l) => l.slice(0, 120) + (l.length > 120 ? "..." : ""));
68
+ const allLines = content.split("\n").filter((l) => l.trim());
69
+ const lines = allLines.slice(-maxLines).map((l) => l.slice(0, 120) + (l.length > 120 ? "..." : ""));
70
+
71
+ // Cache the result
72
+ outputTailCache.set(outputFile, { mtime: stat.mtimeMs, size: stat.size, lines });
73
+ // Limit cache size
74
+ if (outputTailCache.size > 20) {
75
+ const firstKey = outputTailCache.keys().next().value;
76
+ if (firstKey) outputTailCache.delete(firstKey);
77
+ }
78
+
79
+ return lines;
46
80
  } catch {
47
81
  return [];
48
82
  } finally {
@@ -58,8 +92,9 @@ export function getOutputTail(outputFile: string | undefined, maxLines: number =
58
92
  * Get human-readable last activity time for a file
59
93
  */
60
94
  export function getLastActivity(outputFile: string | undefined): string {
61
- if (!outputFile || !fs.existsSync(outputFile)) return "";
95
+ if (!outputFile) return "";
62
96
  try {
97
+ // Single stat call - throws if file doesn't exist
63
98
  const stat = fs.statSync(outputFile);
64
99
  const ago = Date.now() - stat.mtimeMs;
65
100
  if (ago < 1000) return "active now";
@@ -91,11 +126,14 @@ export function findLatestSessionFile(sessionDir: string): string | null {
91
126
  if (!fs.existsSync(sessionDir)) return null;
92
127
  const files = fs.readdirSync(sessionDir)
93
128
  .filter((f) => f.endsWith(".jsonl"))
94
- .map((f) => ({
95
- name: f,
96
- path: path.join(sessionDir, f),
97
- mtime: fs.statSync(path.join(sessionDir, f)).mtimeMs,
98
- }))
129
+ .map((f) => {
130
+ const filePath = path.join(sessionDir, f);
131
+ return {
132
+ name: f,
133
+ path: filePath,
134
+ mtime: fs.statSync(filePath).mtimeMs,
135
+ };
136
+ })
99
137
  .sort((a, b) => b.mtime - a.mtime);
100
138
  return files.length > 0 ? files[0].path : null;
101
139
  }