omni-pi 0.11.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,43 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.12.0 - 2026-04-27
4
+
5
+ ### Security and robustness
6
+
7
+ - Sanitized untrusted text before it reaches brain prompts and `DECISIONS.md`
8
+ - Sanitized generated `SKILL.md` files and fixed `Set` mutation in loop
9
+ - Added atomic writes for `.omni/` and `.pi/` state files
10
+ - Hardened version parsing in the self-updater
11
+
12
+ ### Bug fixes
13
+
14
+ - Fixed `isRequestRelated` to actually use the overlap ratio for planning continuity
15
+ - Fixed repo-map indexing, fingerprints, and dirty-path tracking
16
+ - Fixed backtick code span tracking in the task table parser
17
+ - Fixed prerelease ordering so pre-releases sort below their matching release in the updater
18
+ - Used `os.homedir()` for reliable home directory resolution in the updater
19
+ - Cached package version at module load to avoid repeated filesystem reads in the header
20
+
21
+ ### Dependencies
22
+
23
+ - Upgraded `@mariozechner/pi-coding-agent` to `0.70.2`
24
+ - Upgraded `@anthropic-ai/claude-agent-sdk` to `0.2.119`
25
+ - Upgraded `@juanibiapina/pi-powerbar` to `0.9.1`
26
+ - Upgraded `pi-interview` to `0.8.6`
27
+ - Upgraded `pi-prompt-template-model` to `0.9.1`
28
+ - Upgraded `glimpseui` to `0.8.0`
29
+ - Upgraded `@biomejs/biome` to `2.4.13`
30
+ - Upgraded `typescript` to `6.0.3`
31
+ - Upgraded `vitest` to `4.1.5`
32
+ - Upgraded `@types/node` to `25.6.0`
33
+
34
+ ### Housekeeping
35
+
36
+ - Hoisted control-char regexes and applied Biome formatting
37
+ - Included tests in `tsc` check and added launcher type declarations
38
+ - Added `tsbuildinfo` and coverage artifacts to `.gitignore`
39
+ - Enforced commit-after-every-task rule in `AGENTS.md`
40
+
3
41
  ## 0.11.0 - 2026-04-24
4
42
 
5
43
  ### Removed
package/bin/omni.d.ts ADDED
@@ -0,0 +1,22 @@
1
+ export function getOmniPackageDir(): string;
2
+ export function resolvePiCliPath(): string;
3
+ export function buildOmniEnvironment(
4
+ baseEnv?: NodeJS.ProcessEnv,
5
+ ): NodeJS.ProcessEnv;
6
+ export function ensureQuietStartupDefault(baseEnv?: NodeJS.ProcessEnv): void;
7
+ export function buildPiProcessSpec(
8
+ argv?: string[],
9
+ baseEnv?: NodeJS.ProcessEnv,
10
+ ): {
11
+ command: string;
12
+ args: string[];
13
+ env: NodeJS.ProcessEnv;
14
+ };
15
+ export function runOmni(
16
+ argv?: string[],
17
+ options?: { cwd?: string; env?: NodeJS.ProcessEnv },
18
+ ): Promise<number>;
19
+ export function isOmniEntrypointInvocation(
20
+ argvPath?: string,
21
+ moduleUrl?: string,
22
+ ): boolean;
package/bin/omni.js CHANGED
@@ -1,11 +1,45 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  import { spawn } from "node:child_process";
4
- import { mkdirSync, readFileSync, realpathSync, writeFileSync } from "node:fs";
4
+ import { randomBytes } from "node:crypto";
5
+ import {
6
+ chmodSync,
7
+ mkdirSync,
8
+ readFileSync,
9
+ realpathSync,
10
+ renameSync,
11
+ statSync,
12
+ unlinkSync,
13
+ writeFileSync,
14
+ } from "node:fs";
5
15
  import os from "node:os";
6
16
  import path from "node:path";
7
17
  import { fileURLToPath } from "node:url";
8
18
 
19
+ function writeFileAtomicSync(filePath, content) {
20
+ const tempPath = `${filePath}.${randomBytes(6).toString("hex")}.tmp`;
21
+ let mode;
22
+ try {
23
+ mode = statSync(filePath).mode & 0o777;
24
+ } catch {
25
+ // New file: keep Node's default creation mode.
26
+ }
27
+ try {
28
+ writeFileSync(tempPath, content, "utf8");
29
+ if (mode !== undefined) {
30
+ chmodSync(tempPath, mode);
31
+ }
32
+ renameSync(tempPath, filePath);
33
+ } catch (error) {
34
+ try {
35
+ unlinkSync(tempPath);
36
+ } catch {
37
+ // temp may not exist
38
+ }
39
+ throw error;
40
+ }
41
+ }
42
+
9
43
  export function getOmniPackageDir() {
10
44
  return path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
11
45
  }
@@ -22,8 +56,12 @@ export function resolvePiCliPath() {
22
56
  }
23
57
 
24
58
  export function buildOmniEnvironment(baseEnv = process.env) {
59
+ // Pi has its own update prompt; Omni-Pi runs its own (registerUpdater).
60
+ // Suppress Pi's check at the launcher boundary so the Omni updater is
61
+ // the only one that surfaces upgrade prompts.
25
62
  return {
26
63
  ...baseEnv,
64
+ PI_SKIP_VERSION_CHECK: "1",
27
65
  };
28
66
  }
29
67
 
@@ -56,10 +94,9 @@ export function ensureQuietStartupDefault(baseEnv = process.env) {
56
94
  typeof parsed === "object" &&
57
95
  parsed.quietStartup === undefined
58
96
  ) {
59
- writeFileSync(
97
+ writeFileAtomicSync(
60
98
  settingsFile,
61
99
  `${JSON.stringify({ ...parsed, quietStartup: true }, null, 2)}\n`,
62
- "utf8",
63
100
  );
64
101
  }
65
102
  } catch (error) {
@@ -73,10 +110,9 @@ export function ensureQuietStartupDefault(baseEnv = process.env) {
73
110
  }
74
111
 
75
112
  mkdirSync(agentDir, { recursive: true });
76
- writeFileSync(
113
+ writeFileAtomicSync(
77
114
  settingsFile,
78
115
  `${JSON.stringify({ quietStartup: true }, null, 2)}\n`,
79
- "utf8",
80
116
  );
81
117
  }
82
118
  }
@@ -96,7 +132,7 @@ export async function runOmni(argv = process.argv.slice(2), options = {}) {
96
132
  ensureQuietStartupDefault(options.env);
97
133
  const spec = buildPiProcessSpec(argv, options.env);
98
134
 
99
- await new Promise((resolve, reject) => {
135
+ return await new Promise((resolve, reject) => {
100
136
  const child = spawn(spec.command, spec.args, {
101
137
  cwd: options.cwd ?? process.cwd(),
102
138
  env: spec.env,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "omni-pi",
3
- "version": "0.11.0",
3
+ "version": "0.12.0",
4
4
  "description": "Single-agent Pi package that interviews the user, documents the spec, and implements work in bounded slices.",
5
5
  "type": "module",
6
6
  "license": "MIT",
@@ -51,10 +51,10 @@
51
51
  "prepublishOnly": "npm run verify"
52
52
  },
53
53
  "devDependencies": {
54
- "@biomejs/biome": "2.4.9",
55
- "@types/node": "^24.12.0",
56
- "typescript": "^5.9.3",
57
- "vitest": "^3.2.4"
54
+ "@biomejs/biome": "2.4.13",
55
+ "@types/node": "^25.6.0",
56
+ "typescript": "^6.0.3",
57
+ "vitest": "^4.1.5"
58
58
  },
59
59
  "pi": {
60
60
  "extensions": [
@@ -79,14 +79,14 @@
79
79
  ]
80
80
  },
81
81
  "dependencies": {
82
- "@anthropic-ai/claude-agent-sdk": "0.2.84",
82
+ "@anthropic-ai/claude-agent-sdk": "^0.2.119",
83
83
  "@juanibiapina/pi-extension-settings": "^0.6.1",
84
- "@juanibiapina/pi-powerbar": "^0.8.0",
85
- "@mariozechner/pi-coding-agent": "^0.70.0",
86
- "glimpseui": "^0.7.0",
84
+ "@juanibiapina/pi-powerbar": "^0.9.1",
85
+ "@mariozechner/pi-coding-agent": "^0.70.2",
86
+ "glimpseui": "^0.8.0",
87
87
  "pi-diff-review": "file:./vendor/pi-diff-review",
88
- "pi-interview": "^0.6.2",
89
- "pi-prompt-template-model": "^0.8.2",
88
+ "pi-interview": "^0.8.6",
89
+ "pi-prompt-template-model": "^0.9.1",
90
90
  "pi-web-access": "^0.10.6",
91
91
  "zod": "^4.3.6"
92
92
  },
package/src/atomic.ts ADDED
@@ -0,0 +1,94 @@
1
+ import { randomBytes } from "node:crypto";
2
+ import {
3
+ chmodSync,
4
+ renameSync,
5
+ statSync,
6
+ unlinkSync,
7
+ writeFileSync,
8
+ } from "node:fs";
9
+ import {
10
+ chmod,
11
+ mkdir,
12
+ rename,
13
+ stat,
14
+ unlink,
15
+ writeFile,
16
+ } from "node:fs/promises";
17
+ import path from "node:path";
18
+
19
+ // Atomic write helpers. Writes to a sibling temp file in the same
20
+ // directory, then renames over the destination. POSIX rename(2) is
21
+ // atomic when the source and destination are on the same filesystem,
22
+ // so a reader (or a concurrent writer) never observes a partially
23
+ // written state file. On Windows the rename is also atomic when the
24
+ // destination exists.
25
+ //
26
+ // The temp suffix includes a random nonce so two concurrent writers
27
+ // to the same path don't collide on their temp files.
28
+
29
+ function tempPathFor(filePath: string): string {
30
+ return `${filePath}.${randomBytes(6).toString("hex")}.tmp`;
31
+ }
32
+
33
+ async function existingMode(filePath: string): Promise<number | undefined> {
34
+ try {
35
+ return (await stat(filePath)).mode & 0o777;
36
+ } catch {
37
+ return undefined;
38
+ }
39
+ }
40
+
41
+ function existingModeSync(filePath: string): number | undefined {
42
+ try {
43
+ return statSync(filePath).mode & 0o777;
44
+ } catch {
45
+ return undefined;
46
+ }
47
+ }
48
+
49
+ export async function writeFileAtomic(
50
+ filePath: string,
51
+ content: string | Uint8Array,
52
+ ): Promise<void> {
53
+ await mkdir(path.dirname(filePath), { recursive: true });
54
+ const tempPath = tempPathFor(filePath);
55
+ const mode = await existingMode(filePath);
56
+ try {
57
+ await writeFile(tempPath, content, "utf8");
58
+ if (mode !== undefined) {
59
+ await chmod(tempPath, mode);
60
+ }
61
+ await rename(tempPath, filePath);
62
+ } catch (error) {
63
+ // Best-effort cleanup of the orphaned temp file; ignore failure
64
+ // since the original error is what matters.
65
+ try {
66
+ await unlink(tempPath);
67
+ } catch {
68
+ /* temp may not exist */
69
+ }
70
+ throw error;
71
+ }
72
+ }
73
+
74
+ export function writeFileAtomicSync(
75
+ filePath: string,
76
+ content: string | Uint8Array,
77
+ ): void {
78
+ const tempPath = tempPathFor(filePath);
79
+ const mode = existingModeSync(filePath);
80
+ try {
81
+ writeFileSync(tempPath, content, "utf8");
82
+ if (mode !== undefined) {
83
+ chmodSync(tempPath, mode);
84
+ }
85
+ renameSync(tempPath, filePath);
86
+ } catch (error) {
87
+ try {
88
+ unlinkSync(tempPath);
89
+ } catch {
90
+ /* temp may not exist */
91
+ }
92
+ throw error;
93
+ }
94
+ }
package/src/config.ts CHANGED
@@ -1,6 +1,7 @@
1
- import { mkdir, readFile, writeFile } from "node:fs/promises";
1
+ import { mkdir, readFile } from "node:fs/promises";
2
2
  import path from "node:path";
3
3
 
4
+ import { writeFileAtomic } from "./atomic.js";
4
5
  import type { OmniConfig } from "./contracts.js";
5
6
  import { AVAILABLE_MODELS } from "./providers.js";
6
7
 
@@ -84,7 +85,7 @@ export async function writeConfig(
84
85
  ): Promise<void> {
85
86
  const configPath = path.join(rootDir, CONFIG_PATH);
86
87
  await mkdir(path.dirname(configPath), { recursive: true });
87
- await writeFile(configPath, renderConfigContent(config), "utf8");
88
+ await writeFileAtomic(configPath, renderConfigContent(config));
88
89
  }
89
90
 
90
91
  export async function updateModelConfig(
package/src/header.ts CHANGED
@@ -40,7 +40,7 @@ export function centerIn(text: string, width: number): string {
40
40
  return " ".repeat(pad) + text;
41
41
  }
42
42
 
43
- function readVersion(): string {
43
+ function loadVersion(): string {
44
44
  try {
45
45
  const pkgPath = path.resolve(
46
46
  path.dirname(fileURLToPath(import.meta.url)),
@@ -56,16 +56,19 @@ function readVersion(): string {
56
56
  }
57
57
  }
58
58
 
59
+ // Read once at module load — package.json doesn't change while the
60
+ // process is running, so re-parsing on every render is wasted work.
61
+ const VERSION = loadVersion();
62
+
59
63
  export function pickWelcome(): string {
60
64
  return WELCOME_MESSAGES[Math.floor(Math.random() * WELCOME_MESSAGES.length)];
61
65
  }
62
66
 
63
67
  export function renderHeader(theme: Theme): Text {
64
- const version = readVersion();
65
68
  const welcome = pickWelcome();
66
69
 
67
70
  const logo = ASCII_LOGO.map((line) => brand(line)).join("\n");
68
- const subtitleText = `— P I v${version} —`;
71
+ const subtitleText = `— P I v${VERSION} —`;
69
72
  const subtitle = theme.fg("muted", centerIn(subtitleText, LOGO_WIDTH));
70
73
  const taglineText = "plan · build · verify";
71
74
  const tagline = theme.fg("muted", centerIn(taglineText, LOGO_WIDTH));
package/src/planning.ts CHANGED
@@ -162,6 +162,9 @@ function buildBootstrapTasks(repoSignals: RepoSignals): TaskBrief[] {
162
162
  return tasks;
163
163
  }
164
164
 
165
+ const RELATION_OVERLAP_THRESHOLD = 0.34;
166
+ const RELATION_SMALL_SET_LIMIT = 3;
167
+
165
168
  const RELATION_STOPWORDS = new Set([
166
169
  "a",
167
170
  "an",
@@ -245,13 +248,20 @@ export function isRequestRelated(
245
248
  const overlap = [...currentTokens].filter((token) =>
246
249
  previousTokens.has(token),
247
250
  );
248
- if (overlap.length >= 1) {
249
- return true;
251
+ if (overlap.length === 0) {
252
+ return false;
250
253
  }
251
254
 
252
- return (
253
- overlap.length / Math.min(previousTokens.size, currentTokens.size) >= 0.34
254
- );
255
+ // Compare against the smaller token set: short follow-up summaries
256
+ // ("auth bug fix") have very few tokens, so even one match is meaningful
257
+ // there. With more text on either side, demand a real overlap ratio so a
258
+ // single incidental shared word ("users", "config") doesn't keep an
259
+ // unrelated plan alive.
260
+ const smaller = Math.min(previousTokens.size, currentTokens.size);
261
+ if (smaller <= RELATION_SMALL_SET_LIMIT) {
262
+ return true;
263
+ }
264
+ return overlap.length / smaller >= RELATION_OVERLAP_THRESHOLD;
255
265
  }
256
266
 
257
267
  export function createInitialSpec(
package/src/plans.ts CHANGED
@@ -1,6 +1,7 @@
1
- import { mkdir, readFile, unlink, writeFile } from "node:fs/promises";
1
+ import { mkdir, readFile, unlink } from "node:fs/promises";
2
2
  import path from "node:path";
3
3
 
4
+ import { writeFileAtomic } from "./atomic.js";
4
5
  import type { PlanEntry, PlanStatus } from "./contracts.js";
5
6
  import { OMNI_DIR } from "./contracts.js";
6
7
 
@@ -109,7 +110,7 @@ async function writeIndex(
109
110
  entries: PlanEntry[],
110
111
  ): Promise<void> {
111
112
  await ensurePlansDir(rootDir);
112
- await writeFile(indexPath(rootDir), renderIndex(entries), "utf8");
113
+ await writeFileAtomic(indexPath(rootDir), renderIndex(entries));
113
114
  }
114
115
 
115
116
  export async function createPlan(
@@ -123,10 +124,9 @@ export async function createPlan(
123
124
  const entry: PlanEntry = { id, title, status: "active", createdAt };
124
125
 
125
126
  await ensurePlansDir(rootDir);
126
- await writeFile(
127
+ await writeFileAtomic(
127
128
  planFilePath(rootDir, id),
128
129
  renderPlanFile(entry, description, tasks),
129
- "utf8",
130
130
  );
131
131
 
132
132
  const entries = await readPlanIndex(rootDir);
@@ -157,7 +157,7 @@ export async function updatePlanStatus(
157
157
  const filePath = planFilePath(rootDir, planId);
158
158
  const content = await readFile(filePath, "utf8");
159
159
  const updated = content.replace(/^Status:\s*.+$/mu, `Status: ${status}`);
160
- await writeFile(filePath, updated, "utf8");
160
+ await writeFileAtomic(filePath, updated);
161
161
  } catch {
162
162
  // file may have been cleaned up already
163
163
  }
@@ -198,13 +198,12 @@ export async function appendProgress(
198
198
 
199
199
  try {
200
200
  const content = await readFile(filePath, "utf8");
201
- await writeFile(filePath, `${content.trimEnd()}\n${bullet}\n`, "utf8");
201
+ await writeFileAtomic(filePath, `${content.trimEnd()}\n${bullet}\n`);
202
202
  } catch {
203
203
  await mkdir(path.dirname(filePath), { recursive: true });
204
- await writeFile(
204
+ await writeFileAtomic(
205
205
  filePath,
206
206
  `# Progress\n\nOngoing log of project progress.\n\n${bullet}\n`,
207
- "utf8",
208
207
  );
209
208
  }
210
209
  }
@@ -1,13 +1,9 @@
1
- import {
2
- existsSync,
3
- mkdirSync,
4
- readFileSync,
5
- rmSync,
6
- writeFileSync,
7
- } from "node:fs";
1
+ import { existsSync, mkdirSync, readFileSync, rmSync } from "node:fs";
8
2
  import os from "node:os";
9
3
  import path from "node:path";
10
4
 
5
+ import { writeFileAtomicSync } from "./atomic.js";
6
+
11
7
  const MANAGED_PROMPT_FILES = ["commit.md", "push.md"] as const;
12
8
  const MANAGED_SUBDIR = "omni-pi";
13
9
  const LEGACY_MANAGED_SUBDIRS = ["zz-omni-pi"] as const;
@@ -55,7 +51,7 @@ export function ensureBundledPromptTemplates(
55
51
  continue;
56
52
  }
57
53
 
58
- writeFileSync(targetPath, nextContent, "utf8");
54
+ writeFileAtomicSync(targetPath, nextContent);
59
55
  written.push(targetPath);
60
56
  }
61
57
 
@@ -2,7 +2,7 @@ import path from "node:path";
2
2
 
3
3
  export const REPO_MAP_DIR = path.join(".pi", "repo-map");
4
4
  export const REPO_MAP_STATE_FILE = path.join(REPO_MAP_DIR, "state.json");
5
- export const REPO_MAP_SCHEMA_VERSION = 1;
5
+ export const REPO_MAP_SCHEMA_VERSION = 2;
6
6
 
7
7
  export type RepoMapParserStatus =
8
8
  | "indexed"
@@ -32,13 +32,17 @@ export interface RepoMapImport {
32
32
  resolvedPath?: string;
33
33
  }
34
34
 
35
+ export type RepoMapFingerprint =
36
+ | { kind: "hash"; value: string }
37
+ | { kind: "stat"; value: string };
38
+
35
39
  export interface RepoMapFileRecord {
36
40
  path: string;
37
41
  language: string;
38
42
  parserStatus: RepoMapParserStatus;
39
43
  size: number;
40
44
  mtimeMs: number;
41
- fingerprint: string;
45
+ fingerprint: RepoMapFingerprint;
42
46
  indexedAt: string;
43
47
  firstIndexedAt: string;
44
48
  symbols: RepoMapSymbol[];
@@ -71,7 +75,7 @@ export interface RepoMapSignal {
71
75
 
72
76
  export interface RepoMapSessionState {
73
77
  signals: RepoMapSignal[];
74
- dirtyPaths: Set<string>;
78
+ dirtyPaths: Map<string, number>;
75
79
  }
76
80
 
77
81
  export interface RepoMapRenderOptions {
@@ -108,7 +108,9 @@ function isIgnored(
108
108
  return false;
109
109
  }
110
110
  if (rule.pattern.includes("/")) {
111
- const target = rule.anchored ? normalized : normalized;
111
+ // Both anchored and non-anchored rules match against the full
112
+ // normalized path; anchoring is expressed in the regex / string
113
+ // comparison shape below, not in the target.
112
114
  if (rule.pattern.includes("*")) {
113
115
  const escaped = rule.pattern
114
116
  .replace(/[.+^${}()|[\]\\]/gu, "\\$&")
@@ -116,13 +118,14 @@ function isIgnored(
116
118
  const regex = rule.anchored
117
119
  ? new RegExp(`^${escaped}(?:/.*)?$`, "u")
118
120
  : new RegExp(`(?:^|/)${escaped}(?:/.*)?$`, "u");
119
- return regex.test(target);
121
+ return regex.test(normalized);
120
122
  }
121
123
  return rule.anchored
122
- ? target === rule.pattern || target.startsWith(`${rule.pattern}/`)
123
- : target === rule.pattern ||
124
- target.includes(`/${rule.pattern}`) ||
125
- target.startsWith(`${rule.pattern}/`);
124
+ ? normalized === rule.pattern ||
125
+ normalized.startsWith(`${rule.pattern}/`)
126
+ : normalized === rule.pattern ||
127
+ normalized.includes(`/${rule.pattern}`) ||
128
+ normalized.startsWith(`${rule.pattern}/`);
126
129
  }
127
130
  return segments.some((segment) =>
128
131
  matchSingleSegment(rule.pattern, segment),
@@ -264,8 +267,12 @@ function parseModuleFile(
264
267
  }
265
268
 
266
269
  const imports: RepoMapImport[] = [];
270
+ // Use [^;]*? rather than [\s\S]*? so a missing `from` on one statement
271
+ // can't lazily expand into the next one and merge two imports into a
272
+ // single match. Multi-line imports are still allowed because they don't
273
+ // contain semicolons until the terminating one.
267
274
  const importRegex =
268
- /^(?:import\s+[\s\S]*?\s+from\s+|export\s+[\s\S]*?\s+from\s+)["']([^"']+)["'];?/gmu;
275
+ /^(?:import\s+[^;]*?\s+from\s+|export\s+[^;]*?\s+from\s+)["']([^"']+)["'];?/gmu;
269
276
  for (const match of content.matchAll(importRegex)) {
270
277
  const specifier = match[1]?.trim();
271
278
  if (!specifier) continue;
@@ -354,7 +361,7 @@ export async function indexRepoMapFile(
354
361
  parserStatus: parsed.parserStatus,
355
362
  size: stats.size,
356
363
  mtimeMs: stats.mtimeMs,
357
- fingerprint: hashContent(content),
364
+ fingerprint: { kind: "hash", value: hashContent(content) },
358
365
  indexedAt: now,
359
366
  firstIndexedAt: previous?.firstIndexedAt ?? now,
360
367
  symbols: parsed.symbols,
@@ -384,6 +391,34 @@ function rebuildIncomingPaths(files: Record<string, RepoMapFileRecord>): void {
384
391
  }
385
392
  }
386
393
 
394
+ const STAT_CONCURRENCY = 32;
395
+
396
+ async function statInPool(
397
+ rootDir: string,
398
+ filePaths: readonly string[],
399
+ ): Promise<Map<string, { size: number; mtimeMs: number } | null>> {
400
+ const results = new Map<string, { size: number; mtimeMs: number } | null>();
401
+ let cursor = 0;
402
+
403
+ async function worker(): Promise<void> {
404
+ while (cursor < filePaths.length) {
405
+ const index = cursor;
406
+ cursor += 1;
407
+ const filePath = filePaths[index];
408
+ try {
409
+ const stats = await stat(path.join(rootDir, filePath));
410
+ results.set(filePath, { size: stats.size, mtimeMs: stats.mtimeMs });
411
+ } catch {
412
+ results.set(filePath, null);
413
+ }
414
+ }
415
+ }
416
+
417
+ const workerCount = Math.min(STAT_CONCURRENCY, filePaths.length);
418
+ await Promise.all(Array.from({ length: workerCount }, () => worker()));
419
+ return results;
420
+ }
421
+
387
422
  export async function refreshRepoMapState(
388
423
  rootDir: string,
389
424
  dirtyPaths: Iterable<string> = [],
@@ -395,11 +430,32 @@ export async function refreshRepoMapState(
395
430
  const nextFiles: Record<string, RepoMapFileRecord> = {};
396
431
  const indexedPaths: string[] = [];
397
432
  const reusedPaths: string[] = [];
433
+ const statResults = await statInPool(rootDir, discovered);
398
434
 
399
435
  for (const filePath of discovered) {
400
436
  const previousRecord = previous.files[filePath];
401
- const absolutePath = path.join(rootDir, filePath);
402
- const stats = await stat(absolutePath);
437
+ let stats = statResults.get(filePath);
438
+ if (!stats) {
439
+ // File disappeared between discovery and stat — treat as removed.
440
+ continue;
441
+ }
442
+ if (
443
+ previousRecord &&
444
+ previous.schemaVersion === REPO_MAP_SCHEMA_VERSION &&
445
+ !dirtySet.has(filePath) &&
446
+ previousRecord.mtimeMs === stats.mtimeMs &&
447
+ previousRecord.size === stats.size
448
+ ) {
449
+ try {
450
+ // The pooled stat pass is intentionally early for throughput. Re-stat
451
+ // cache-hit candidates immediately before reuse so edits that land
452
+ // during the pool window don't keep stale repo-map records alive.
453
+ const freshStats = await stat(path.join(rootDir, filePath));
454
+ stats = { size: freshStats.size, mtimeMs: freshStats.mtimeMs };
455
+ } catch {
456
+ continue;
457
+ }
458
+ }
403
459
  const unchanged =
404
460
  previousRecord &&
405
461
  previousRecord.mtimeMs === stats.mtimeMs &&
@@ -433,7 +489,10 @@ export async function refreshRepoMapState(
433
489
  parserStatus: "parse-fallback",
434
490
  size: stats.size,
435
491
  mtimeMs: stats.mtimeMs,
436
- fingerprint: `${stats.size}:${stats.mtimeMs}`,
492
+ fingerprint: {
493
+ kind: "stat",
494
+ value: `${stats.size}:${stats.mtimeMs}`,
495
+ },
437
496
  indexedAt: new Date().toISOString(),
438
497
  firstIndexedAt:
439
498
  previousRecord?.firstIndexedAt ?? new Date().toISOString(),
@@ -20,7 +20,7 @@ const sessionState = new Map<string, RepoMapSessionState>();
20
20
  function getSessionState(rootDir: string): RepoMapSessionState {
21
21
  let state = sessionState.get(rootDir);
22
22
  if (!state) {
23
- state = { signals: [], dirtyPaths: new Set<string>() };
23
+ state = { signals: [], dirtyPaths: new Map<string, number>() };
24
24
  sessionState.set(rootDir, state);
25
25
  }
26
26
  return state;
@@ -40,7 +40,10 @@ export function recordRepoMapSignal(
40
40
  state.signals.unshift({ type, path: normalized, timestamp: Date.now() });
41
41
  state.signals = state.signals.slice(0, SESSION_RETENTION);
42
42
  if (type === "edit" || type === "write") {
43
- state.dirtyPaths.add(normalized);
43
+ state.dirtyPaths.set(
44
+ normalized,
45
+ (state.dirtyPaths.get(normalized) ?? 0) + 1,
46
+ );
44
47
  }
45
48
  }
46
49
 
@@ -49,11 +52,17 @@ export function warmRepoMap(rootDir: string): Promise<RepoMapRefreshResult> {
49
52
  if (existing) {
50
53
  return existing;
51
54
  }
52
- const task = refreshRepoMapState(
53
- rootDir,
54
- getSessionState(rootDir).dirtyPaths,
55
- ).finally(() => {
56
- getSessionState(rootDir).dirtyPaths.clear();
55
+ // Snapshot dirty paths up front. New edits arriving during the refresh
56
+ // stay in the live Set so they kick off the next refresh — without the
57
+ // snapshot, .finally().clear() would also drop those concurrent edits.
58
+ const snapshot = new Map(getSessionState(rootDir).dirtyPaths);
59
+ const task = refreshRepoMapState(rootDir, snapshot.keys()).finally(() => {
60
+ const live = getSessionState(rootDir).dirtyPaths;
61
+ for (const [dirty, generation] of snapshot) {
62
+ if (live.get(dirty) === generation) {
63
+ live.delete(dirty);
64
+ }
65
+ }
57
66
  warmups.delete(rootDir);
58
67
  });
59
68
  warmups.set(rootDir, task);