@oh-my-pi/pi-coding-agent 14.7.7 → 14.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,25 @@
2
2
 
3
3
  ## [Unreleased]
4
4
 
5
+ ## [14.8.0] - 2026-05-09
6
+ ### Added
7
+
8
+ - Added hashline stale-anchor recovery by replaying edits against a session-scoped `read`/`search` snapshot and 3-way-merging them onto the current file when anchors no longer match
9
+
10
+ ### Fixed
11
+
12
+ - Fixed legacy pi extensions failing to import their own bare-specifier dependencies (e.g. `import x from "pkg"`): files loaded via the `omp-legacy-pi-file:` namespace now pre-resolve bare imports against the extension's directory so the extension's own `node_modules` is honored.
13
+
14
+ ### Changed
15
+
16
+ - Changed hashline success output to include a warning when stale-anchor recovery is used
17
+
18
+ ## [14.7.8] - 2026-05-08
19
+
20
+ ### Fixed
21
+
22
+ - Fixed indefinite startup hang on large repos introduced in 14.7.6 ([#975](https://github.com/can1357/oh-my-pi/issues/975)) on two fronts: (1) `createAgentSession` was awaiting `buildAgentsMdSearch` and `buildWorkspaceTree` directly in its blocking `Promise.all`, bypassing the existing 5s preparation deadline that previously protected startup — both scans are now raced against a 5s deadline and fall back to the system-prompt fallback path on timeout; (2) `buildWorkspaceTree` now derives its listing from `git ls-files --cached --others --exclude-standard` when the workspace is a git worktree, which is O(index size) and avoids the per-call full-tree gitignore-aware native scan that the previous implementation triggered. Repos without git, or where the call fails / times out, transparently fall back to the previous native-glob path.
23
+
5
24
  ## [14.7.6] - 2026-05-07
6
25
  ### Changed
7
26
 
package/README.md CHANGED
@@ -12,6 +12,7 @@ Package-specific references:
12
12
  - [MCP runtime lifecycle](../../docs/mcp-runtime-lifecycle.md)
13
13
  - [MCP server/tool authoring](../../docs/mcp-server-tool-authoring.md)
14
14
  - [DEVELOPMENT](./DEVELOPMENT.md)
15
+ - [RenderMermaid guide](../../docs/render-mermaid.md)
15
16
 
16
17
  ## Memory backends
17
18
 
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "type": "module",
3
3
  "name": "@oh-my-pi/pi-coding-agent",
4
- "version": "14.7.7",
4
+ "version": "14.8.0",
5
5
  "description": "Coding agent CLI with read, bash, edit, write tools and session management",
6
6
  "homepage": "https://github.com/can1357/oh-my-pi",
7
7
  "author": "Can Boluk",
@@ -46,12 +46,12 @@
46
46
  "dependencies": {
47
47
  "@agentclientprotocol/sdk": "0.21.0",
48
48
  "@mozilla/readability": "^0.6.0",
49
- "@oh-my-pi/omp-stats": "14.7.7",
50
- "@oh-my-pi/pi-agent-core": "14.7.7",
51
- "@oh-my-pi/pi-ai": "14.7.7",
52
- "@oh-my-pi/pi-natives": "14.7.7",
53
- "@oh-my-pi/pi-tui": "14.7.7",
54
- "@oh-my-pi/pi-utils": "14.7.7",
49
+ "@oh-my-pi/omp-stats": "14.8.0",
50
+ "@oh-my-pi/pi-agent-core": "14.8.0",
51
+ "@oh-my-pi/pi-ai": "14.8.0",
52
+ "@oh-my-pi/pi-natives": "14.8.0",
53
+ "@oh-my-pi/pi-tui": "14.8.0",
54
+ "@oh-my-pi/pi-utils": "14.8.0",
55
55
  "@puppeteer/browsers": "^2.13.0",
56
56
  "@sinclair/typebox": "^0.34.49",
57
57
  "@types/turndown": "5.0.6",
@@ -266,10 +266,11 @@ async function runSplitCommit(
266
266
  throw new Error(order.error);
267
267
  }
268
268
 
269
+ const stagedDiff = await git.diff(ctx.cwd, { cached: true });
269
270
  await git.stage.reset(ctx.cwd);
270
271
  for (const commitIndex of order) {
271
272
  const commit = plan.commits[commitIndex];
272
- await git.stage.hunks(ctx.cwd, commit.changes);
273
+ await git.stage.hunks(ctx.cwd, commit.changes, { rawDiff: stagedDiff, diffCached: true });
273
274
  const analysis: ConventionalAnalysis = {
274
275
  type: commit.type,
275
276
  scope: commit.scope,
@@ -257,7 +257,12 @@ const ModelOverrideSchema = Type.Object({
257
257
  type ModelOverride = Static<typeof ModelOverrideSchema>;
258
258
 
259
259
  const ProviderDiscoverySchema = Type.Object({
260
- type: Type.Union([Type.Literal("ollama"), Type.Literal("llama.cpp"), Type.Literal("lm-studio")]),
260
+ type: Type.Union([
261
+ Type.Literal("ollama"),
262
+ Type.Literal("llama.cpp"),
263
+ Type.Literal("lm-studio"),
264
+ Type.Literal("openai-models-list"),
265
+ ]),
261
266
  });
262
267
 
263
268
  const ProviderAuthSchema = Type.Union([Type.Literal("apiKey"), Type.Literal("none"), Type.Literal("oauth")]);
@@ -435,7 +440,7 @@ interface DiscoveryProviderConfig {
435
440
  optional?: boolean;
436
441
  }
437
442
 
438
- export type ProviderDiscoveryStatus = "idle" | "ok" | "cached" | "unavailable" | "unauthenticated";
443
+ export type ProviderDiscoveryStatus = "idle" | "ok" | "empty" | "cached" | "unavailable" | "unauthenticated";
439
444
 
440
445
  export interface ProviderDiscoveryState {
441
446
  provider: string;
@@ -1378,11 +1383,13 @@ export class ModelRegistry {
1378
1383
  ? result.models.length > 0
1379
1384
  ? "cached"
1380
1385
  : "unavailable"
1381
- : result.models.length > 0 && strategy !== "offline"
1382
- ? "ok"
1383
- : cached
1386
+ : strategy === "offline"
1387
+ ? cached
1384
1388
  ? "cached"
1385
- : "idle";
1389
+ : "idle"
1390
+ : result.models.length > 0
1391
+ ? "ok"
1392
+ : "empty";
1386
1393
  this.#providerDiscoveryStates.set(providerId, {
1387
1394
  provider: providerId,
1388
1395
  status,
@@ -1411,7 +1418,8 @@ export class ModelRegistry {
1411
1418
  case "llama.cpp":
1412
1419
  return this.#discoverLlamaCppModels(providerConfig);
1413
1420
  case "lm-studio":
1414
- return this.#discoverLmStudioModels(providerConfig);
1421
+ case "openai-models-list":
1422
+ return this.#discoverOpenAIModelsList(providerConfig);
1415
1423
  }
1416
1424
  }
1417
1425
 
@@ -1706,8 +1714,8 @@ export class ModelRegistry {
1706
1714
  return this.#applyProviderModelOverrides(providerConfig.provider, discovered);
1707
1715
  }
1708
1716
 
1709
- async #discoverLmStudioModels(providerConfig: DiscoveryProviderConfig): Promise<Model<Api>[]> {
1710
- const baseUrl = this.#normalizeLmStudioBaseUrl(providerConfig.baseUrl);
1717
+ async #discoverOpenAIModelsList(providerConfig: DiscoveryProviderConfig): Promise<Model<Api>[]> {
1718
+ const baseUrl = this.#normalizeOpenAIModelsListBaseUrl(providerConfig.baseUrl);
1711
1719
  const modelsUrl = `${baseUrl}/models`;
1712
1720
 
1713
1721
  const headers: Record<string, string> = { ...(providerConfig.headers ?? {}) };
@@ -1777,7 +1785,7 @@ export class ModelRegistry {
1777
1785
  }
1778
1786
  }
1779
1787
 
1780
- #normalizeLmStudioBaseUrl(baseUrl?: string): string {
1788
+ #normalizeOpenAIModelsListBaseUrl(baseUrl?: string): string {
1781
1789
  const defaultBaseUrl = "http://127.0.0.1:1234/v1";
1782
1790
  const raw = baseUrl || defaultBaseUrl;
1783
1791
  try {
@@ -0,0 +1,95 @@
1
+ /**
2
+ * Per-session cache of file contents as they were rendered to the model by
3
+ * the `read` and `search` tools in the current agent session.
4
+ *
5
+ * Used by hashline-mode anchor-stale recovery: if the model authored anchors
6
+ * against a version of the file that no longer matches what is on disk —
7
+ * because a subagent, the user, a linter, or a formatter modified the file
8
+ * between the read and the edit — we replay the edits against the cached
9
+ * pre-edit snapshot and 3-way-merge the result onto the live file.
10
+ *
11
+ * Scoped per `ToolSession`: the cache lives on the session object itself, so
12
+ * different sessions never share snapshots and entries get reclaimed when
13
+ * the session goes out of scope. Each session keeps a small LRU window of
14
+ * paths; the cache always reflects what *this* session most recently saw,
15
+ * so it stays correct by construction even when this session writes the
16
+ * file itself — the next read after the write refreshes the entry.
17
+ */
18
+ import { LRUCache } from "lru-cache/raw";
19
+ import type { ToolSession } from "../tools";
20
+
21
+ const MAX_PATHS_PER_SESSION = 30;
22
+
23
+ export interface FileReadSnapshot {
24
+ /** 1-indexed line number → exact line content as observed by `read`/`search`. */
25
+ lines: Map<number, string>;
26
+ recordedAt: number;
27
+ }
28
+
29
+ export class FileReadCache {
30
+ #snapshots = new LRUCache<string, FileReadSnapshot>({ max: MAX_PATHS_PER_SESSION });
31
+
32
+ /** Look up the most recent snapshot for `absPath`, or `null` if absent. */
33
+ get(absPath: string): FileReadSnapshot | null {
34
+ return this.#snapshots.get(absPath) ?? null;
35
+ }
36
+
37
+ /** Record a contiguous run of lines (e.g. from a `read` tool). `startLine` is 1-indexed. */
38
+ recordContiguous(absPath: string, startLine: number, lines: readonly string[]): void {
39
+ if (lines.length === 0) return;
40
+ const entries: Array<readonly [number, string]> = lines.map((line, idx) => [startLine + idx, line] as const);
41
+ this.#record(absPath, entries);
42
+ }
43
+
44
+ /** Record sparse `(lineNumber, content)` pairs (e.g. `search` matches plus context). */
45
+ recordSparse(absPath: string, entries: Iterable<readonly [number, string]>): void {
46
+ const arr = Array.from(entries);
47
+ if (arr.length === 0) return;
48
+ this.#record(absPath, arr);
49
+ }
50
+
51
+ /** Drop the snapshot for a single path. */
52
+ invalidate(absPath: string): void {
53
+ this.#snapshots.delete(absPath);
54
+ }
55
+
56
+ /** Drop every snapshot. */
57
+ clear(): void {
58
+ this.#snapshots.clear();
59
+ }
60
+
61
+ #record(absPath: string, entries: ReadonlyArray<readonly [number, string]>): void {
62
+ const existing = this.#snapshots.get(absPath);
63
+ if (existing && hasConflict(existing.lines, entries)) {
64
+ // File content has changed since we last recorded. Drop the stale
65
+ // snapshot and start fresh with whatever we just observed.
66
+ this.#snapshots.set(absPath, { lines: new Map(entries), recordedAt: Date.now() });
67
+ return;
68
+ }
69
+ if (existing) {
70
+ for (const [lineNum, content] of entries) existing.lines.set(lineNum, content);
71
+ existing.recordedAt = Date.now();
72
+ // `get` above already touched LRU recency for this key.
73
+ return;
74
+ }
75
+ this.#snapshots.set(absPath, { lines: new Map(entries), recordedAt: Date.now() });
76
+ }
77
+ }
78
+
79
+ function hasConflict(existing: Map<number, string>, incoming: ReadonlyArray<readonly [number, string]>): boolean {
80
+ for (const [lineNum, content] of incoming) {
81
+ const prior = existing.get(lineNum);
82
+ if (prior !== undefined && prior !== content) return true;
83
+ }
84
+ return false;
85
+ }
86
+
87
+ /**
88
+ * Look up (or lazily create) the file-read cache attached to a session. The
89
+ * cache is stored as `session.fileReadCache` so it lives exactly as long as
90
+ * the session itself.
91
+ */
92
+ export function getFileReadCache(session: ToolSession): FileReadCache {
93
+ if (!session.fileReadCache) session.fileReadCache = new FileReadCache();
94
+ return session.fileReadCache;
95
+ }
package/src/edit/index.ts CHANGED
@@ -33,6 +33,7 @@ import { type EditToolDetails, type EditToolPerFileResult, getLspBatchRequest, t
33
33
  export { DEFAULT_EDIT_MODE, type EditMode, normalizeEditMode } from "../utils/edit-mode";
34
34
  export * from "./apply-patch";
35
35
  export * from "./diff";
36
+ export * from "./file-read-cache";
36
37
  export * from "./line-hash";
37
38
 
38
39
  // Resolve the `$HFMT$` and `$HSEP$` placeholders in the hashline Lark grammar.
@@ -31,6 +31,7 @@ import * as path from "node:path";
31
31
  import type { AgentToolResult } from "@oh-my-pi/pi-agent-core";
32
32
  import { isEnoent } from "@oh-my-pi/pi-utils";
33
33
  import { type Static, Type } from "@sinclair/typebox";
34
+ import * as Diff from "diff";
34
35
  import type { WritethroughCallback, WritethroughDeferredHandle } from "../../lsp";
35
36
  import type { ToolSession } from "../../tools";
36
37
  import { assertEditableFileContent } from "../../tools/auto-generated-guard";
@@ -40,6 +41,7 @@ import { resolveToCwd } from "../../tools/path-utils";
40
41
  import { enforcePlanModeWrite, resolvePlanPath } from "../../tools/plan-mode-guard";
41
42
  import { formatCodeFrameLine } from "../../tools/render-utils";
42
43
  import { generateDiffString } from "../diff";
44
+ import { type FileReadCache, getFileReadCache } from "../file-read-cache";
43
45
  import {
44
46
  computeLineHash,
45
47
  describeAnchorExamples,
@@ -1540,6 +1542,89 @@ export function applyHashlineEdits(
1540
1542
  };
1541
1543
  }
1542
1544
 
1545
+ // ───────────────────────────────────────────────────────────────────────────
1546
+ // 11b. Anchor-stale recovery via cached read snapshots
1547
+ //
1548
+ // When `applyHashlineEdits` rejects because some anchors no longer match the
1549
+ // current on-disk content, the model may still have authored those anchors
1550
+ // against a real, valid version of the file — one that was just rendered to
1551
+ // it by the `read` or `search` tool, before something else (a subagent, a
1552
+ // linter, the user) modified the file out-of-band.
1553
+ //
1554
+ // The cache in `file-read-cache.ts` keeps a small LRU snapshot of those
1555
+ // rendered lines. We use it to reconstruct that "previous version", re-apply
1556
+ // the edits against it, and then 3-way-merge the resulting diff back onto
1557
+ // the live file. If the merge cleanly lands, that becomes our output. If
1558
+ // it doesn't (or the cache doesn't even cover the failing anchors), we
1559
+ // surface the original mismatch error so the model sees the truth.
1560
+ // ───────────────────────────────────────────────────────────────────────────
1561
+
1562
+ export interface HashlineRecoveryArgs {
1563
+ cache: FileReadCache;
1564
+ absolutePath: string;
1565
+ currentText: string;
1566
+ edits: HashlineEdit[];
1567
+ options: HashlineApplyOptions;
1568
+ }
1569
+
1570
+ export interface HashlineRecoveryResult {
1571
+ lines: string;
1572
+ firstChangedLine: number | undefined;
1573
+ warnings: string[];
1574
+ }
1575
+
1576
+ const HASHLINE_RECOVERY_FUZZ_FACTOR = 3;
1577
+
1578
+ const HASHLINE_RECOVERY_WARNING =
1579
+ "Recovered from stale anchors using a previous read snapshot (file changed externally between read and edit).";
1580
+
1581
+ /**
1582
+ * Attempt to recover from a `HashlineMismatchError` by replaying the edits
1583
+ * against a cached pre-edit snapshot of the file and 3-way-merging the result
1584
+ * onto the current on-disk content. Returns `null` when no recovery is
1585
+ * possible — callers should propagate the original mismatch error in that
1586
+ * case.
1587
+ */
1588
+ export function tryRecoverHashlineWithCache(args: HashlineRecoveryArgs): HashlineRecoveryResult | null {
1589
+ const { cache, absolutePath, currentText, edits, options } = args;
1590
+ const snapshot = cache.get(absolutePath);
1591
+ if (!snapshot || snapshot.lines.size === 0) return null;
1592
+
1593
+ const overlaid = currentText.split("\n");
1594
+ let maxCachedLine = 0;
1595
+ for (const lineNum of snapshot.lines.keys()) {
1596
+ if (lineNum > maxCachedLine) maxCachedLine = lineNum;
1597
+ }
1598
+ while (overlaid.length < maxCachedLine) overlaid.push("");
1599
+ for (const [lineNum, content] of snapshot.lines) {
1600
+ overlaid[lineNum - 1] = content;
1601
+ }
1602
+ const previousText = overlaid.join("\n");
1603
+ if (previousText === currentText) return null;
1604
+
1605
+ let applied: HashlineApplyResult;
1606
+ try {
1607
+ applied = applyHashlineEdits(previousText, edits, options);
1608
+ } catch (err) {
1609
+ if (err instanceof HashlineMismatchError) return null;
1610
+ throw err;
1611
+ }
1612
+ if (applied.lines === previousText) return null;
1613
+
1614
+ const patch = Diff.structuredPatch("file", "file", previousText, applied.lines, "", "", { context: 3 });
1615
+ const merged = Diff.applyPatch(currentText, patch, { fuzzFactor: HASHLINE_RECOVERY_FUZZ_FACTOR });
1616
+ if (typeof merged !== "string" || merged === currentText) return null;
1617
+
1618
+ const mergedDiff = generateDiffString(currentText, merged);
1619
+ const recoveryWarnings = [HASHLINE_RECOVERY_WARNING, ...(applied.warnings ?? [])];
1620
+
1621
+ return {
1622
+ lines: merged,
1623
+ firstChangedLine: mergedDiff.firstChangedLine ?? applied.firstChangedLine,
1624
+ warnings: recoveryWarnings,
1625
+ };
1626
+ }
1627
+
1543
1628
  // ───────────────────────────────────────────────────────────────────────────
1544
1629
  // 12. Input splitting
1545
1630
  //
@@ -1751,6 +1836,39 @@ function getEditDetails(result: AgentToolResult<EditToolDetails>): EditToolDetai
1751
1836
  return result.details ?? { diff: "" };
1752
1837
  }
1753
1838
 
1839
+ /**
1840
+ * Apply hashline edits with anchor-stale recovery: on `HashlineMismatchError`,
1841
+ * consult the read-snapshot cache for the file and 3-way-merge the edits onto
1842
+ * the current text. If recovery succeeds, return the merged result with a
1843
+ * synthetic warning. Otherwise re-throw the original mismatch error.
1844
+ */
1845
+ function applyHashlineEditsWithRecovery(
1846
+ session: ToolSession,
1847
+ absolutePath: string,
1848
+ text: string,
1849
+ edits: HashlineEdit[],
1850
+ options: HashlineApplyOptions,
1851
+ ): HashlineApplyResult {
1852
+ try {
1853
+ return applyHashlineEdits(text, edits, options);
1854
+ } catch (err) {
1855
+ if (!(err instanceof HashlineMismatchError)) throw err;
1856
+ const recovered = tryRecoverHashlineWithCache({
1857
+ cache: getFileReadCache(session),
1858
+ absolutePath,
1859
+ currentText: text,
1860
+ edits,
1861
+ options,
1862
+ });
1863
+ if (!recovered) throw err;
1864
+ return {
1865
+ lines: recovered.lines,
1866
+ firstChangedLine: recovered.firstChangedLine,
1867
+ warnings: recovered.warnings,
1868
+ };
1869
+ }
1870
+ }
1871
+
1754
1872
  /**
1755
1873
  * Run all the front-end checks (notebook guard, parse, plan-mode check, file
1756
1874
  * load, edit application) without writing. Used to fail fast before applying
@@ -1769,7 +1887,13 @@ async function preflightHashlineSection(options: ExecuteHashlineSingleOptions &
1769
1887
 
1770
1888
  const { text } = stripBom(source.rawContent);
1771
1889
  const normalized = normalizeToLF(text);
1772
- const result = applyHashlineEdits(normalized, edits, getHashlineApplyOptions(session));
1890
+ const result = applyHashlineEditsWithRecovery(
1891
+ session,
1892
+ absolutePath,
1893
+ normalized,
1894
+ edits,
1895
+ getHashlineApplyOptions(session),
1896
+ );
1773
1897
  if (normalized === result.lines) throw new Error(formatNoChangeDiagnostic(sectionPath));
1774
1898
  }
1775
1899
 
@@ -1797,7 +1921,13 @@ async function executeHashlineSection(
1797
1921
  const { bom, text } = stripBom(source.rawContent);
1798
1922
  const originalEnding = detectLineEnding(text);
1799
1923
  const originalNormalized = normalizeToLF(text);
1800
- const result = applyHashlineEdits(originalNormalized, edits, getHashlineApplyOptions(session));
1924
+ const result = applyHashlineEditsWithRecovery(
1925
+ session,
1926
+ absolutePath,
1927
+ originalNormalized,
1928
+ edits,
1929
+ getHashlineApplyOptions(session),
1930
+ );
1801
1931
 
1802
1932
  if (originalNormalized === result.lines) {
1803
1933
  return {
@@ -1820,6 +1950,11 @@ async function executeHashlineSection(
1820
1950
  dst => (dst === absolutePath ? beginDeferredDiagnosticsForPath(absolutePath) : undefined),
1821
1951
  );
1822
1952
  invalidateFsScanAfterWrite(absolutePath);
1953
+ // The post-edit content is the freshest, most authoritative "model view"
1954
+ // of the file: the model just received it back as the diff/preview. Cache
1955
+ // it so a follow-up edit anchored against this state can still recover
1956
+ // if the file is touched out-of-band before the next edit lands.
1957
+ getFileReadCache(session).recordContiguous(absolutePath, 1, result.lines.split("\n"));
1823
1958
 
1824
1959
  const diffResult = generateDiffString(originalNormalized, result.lines);
1825
1960
  const meta = outputMeta()
@@ -17,7 +17,9 @@ import type { ExecOptions } from "../../exec/exec";
17
17
  import { execCommand } from "../../exec/exec";
18
18
  import type { CustomMessage } from "../../session/messages";
19
19
  import { EventBus } from "../../utils/event-bus";
20
+ import { installLegacyPiSpecifierShim } from "../plugins/legacy-pi-compat";
20
21
  import { getAllPluginExtensionPaths } from "../plugins/loader";
22
+
21
23
  import { resolvePath } from "../utils";
22
24
  import type {
23
25
  Extension,
@@ -31,6 +33,8 @@ import type {
31
33
  ToolDefinition,
32
34
  } from "./types";
33
35
 
36
+ installLegacyPiSpecifierShim();
37
+
34
38
  type HandlerFn = (...args: unknown[]) => Promise<unknown>;
35
39
 
36
40
  export class ExtensionRuntimeNotInitializedError extends Error {
@@ -267,9 +271,8 @@ async function loadExtension(
267
271
  runtime: IExtensionRuntime,
268
272
  ): Promise<{ extension: Extension | null; error: string | null }> {
269
273
  const resolvedPath = resolvePath(extensionPath, cwd);
270
-
271
274
  try {
272
- const module = await import(resolvedPath);
275
+ const module = await import(`omp-legacy-pi-file:${resolvedPath}`);
273
276
  const factory = (module.default ?? module) as ExtensionFactory;
274
277
 
275
278
  if (typeof factory !== "function") {
@@ -0,0 +1,166 @@
1
+ import * as path from "node:path";
2
+
3
+ const LEGACY_PI_PACKAGE_MAP = {
4
+ "@mariozechner/pi-agent-core": "@oh-my-pi/pi-agent-core",
5
+ "@mariozechner/pi-ai": "@oh-my-pi/pi-ai",
6
+ "@mariozechner/pi-coding-agent": "@oh-my-pi/pi-coding-agent",
7
+ "@mariozechner/pi-tui": "@oh-my-pi/pi-tui",
8
+ } as const;
9
+
10
+ const LEGACY_PI_CODING_AGENT_SUBPATH_MAP = {
11
+ "extensibility/extensions": "@oh-my-pi/pi-coding-agent/extensibility/extensions",
12
+ "extensibility/hooks": "@oh-my-pi/pi-coding-agent/extensibility/hooks",
13
+ } as const;
14
+
15
+ const LEGACY_PI_SPECIFIER_FILTER = /^@mariozechner\/pi-(agent-core|ai|coding-agent|tui)(\/.*)?$/;
16
+ const LEGACY_PI_IMPORT_SPECIFIER_REGEX =
17
+ /((?:from\s+|import\s*\(\s*)["'])(@mariozechner\/pi-(?:agent-core|ai|coding-agent|tui)(?:\/[^"'()\s]+)?)(["'])/g;
18
+ const LEGACY_PI_FILE_PREFIX = "omp-legacy-pi-file:";
19
+ const LEGACY_PI_FILE_NAMESPACE = "omp-legacy-pi-file";
20
+ const resolvedSpecifierFallbacks = new Map<string, string>();
21
+
22
+ let isLegacyPiSpecifierShimInstalled = false;
23
+
24
+ function remapLegacyPiSpecifier(specifier: string): string | null {
25
+ const [legacyScope, packageName, ...subpathParts] = specifier.split("/");
26
+ const legacyPackageName = `${legacyScope}/${packageName}`;
27
+ const mappedPackageName = LEGACY_PI_PACKAGE_MAP[legacyPackageName as keyof typeof LEGACY_PI_PACKAGE_MAP];
28
+ if (!mappedPackageName) {
29
+ return null;
30
+ }
31
+ if (subpathParts.length === 0) {
32
+ return mappedPackageName;
33
+ }
34
+
35
+ const subpath = subpathParts.join("/");
36
+ if (legacyPackageName === "@mariozechner/pi-coding-agent") {
37
+ return (
38
+ LEGACY_PI_CODING_AGENT_SUBPATH_MAP[subpath as keyof typeof LEGACY_PI_CODING_AGENT_SUBPATH_MAP] ??
39
+ `${mappedPackageName}/${subpath}`
40
+ );
41
+ }
42
+
43
+ return `${mappedPackageName}/${subpath}`;
44
+ }
45
+
46
+ function getResolvedSpecifier(specifier: string): string {
47
+ const cached = resolvedSpecifierFallbacks.get(specifier);
48
+ if (cached) {
49
+ return cached;
50
+ }
51
+
52
+ const resolved = Bun.resolveSync(specifier, import.meta.dir);
53
+ resolvedSpecifierFallbacks.set(specifier, resolved);
54
+ return resolved;
55
+ }
56
+
57
+ function rewriteLegacyPiImports(source: string): string {
58
+ return source.replace(
59
+ LEGACY_PI_IMPORT_SPECIFIER_REGEX,
60
+ (match, prefix: string, specifier: string, suffix: string) => {
61
+ const remappedSpecifier = remapLegacyPiSpecifier(specifier);
62
+ if (!remappedSpecifier) {
63
+ return match;
64
+ }
65
+
66
+ return `${prefix}${getResolvedSpecifier(remappedSpecifier)}${suffix}`;
67
+ },
68
+ );
69
+ }
70
+
71
+ // Match `from "..."`, `from '...'`, `import("...")`, `import('...')` import specifiers.
72
+ const ANY_IMPORT_SPECIFIER_REGEX = /((?:from\s+|import\s*\(\s*)["'])([^"']+)(["'])/g;
73
+
74
+ /**
75
+ * Resolves bare module specifiers in a legacy-namespaced extension source file
76
+ * to absolute paths anchored at the extension's own directory. Without this,
77
+ * imports inside files loaded via the `omp-legacy-pi-file:` namespace bypass
78
+ * Node-style node_modules lookup, so an extension cannot use its own deps.
79
+ * Relative paths and already-resolved absolute paths are left untouched.
80
+ */
81
+ function rewriteBareImportsForLegacyExtension(source: string, importerPath: string): string {
82
+ const importerDir = path.dirname(importerPath);
83
+ return source.replace(ANY_IMPORT_SPECIFIER_REGEX, (match, prefix: string, specifier: string, suffix: string) => {
84
+ // Skip relative, absolute, URL-style, and already-resolved Node specifiers.
85
+ if (
86
+ specifier.startsWith(".") ||
87
+ specifier.startsWith("/") ||
88
+ specifier.startsWith("node:") ||
89
+ specifier.includes("://")
90
+ ) {
91
+ return match;
92
+ }
93
+ try {
94
+ const resolved = Bun.resolveSync(specifier, importerDir);
95
+ return `${prefix}${resolved}${suffix}`;
96
+ } catch {
97
+ return match;
98
+ }
99
+ });
100
+ }
101
+
102
+ function getLoader(path: string): "js" | "jsx" | "ts" | "tsx" {
103
+ if (path.endsWith(".tsx")) {
104
+ return "tsx";
105
+ }
106
+ if (path.endsWith(".jsx")) {
107
+ return "jsx";
108
+ }
109
+ if (path.endsWith(".ts") || path.endsWith(".mts") || path.endsWith(".cts")) {
110
+ return "ts";
111
+ }
112
+ return "js";
113
+ }
114
+
115
+ function resolveLegacyPiSpecifier(args: { path: string }): { path: string } | undefined {
116
+ const remappedSpecifier = remapLegacyPiSpecifier(args.path);
117
+ if (!remappedSpecifier) {
118
+ return undefined;
119
+ }
120
+
121
+ return {
122
+ path: getResolvedSpecifier(remappedSpecifier),
123
+ };
124
+ }
125
+
126
+ export function installLegacyPiSpecifierShim(): void {
127
+ if (isLegacyPiSpecifierShimInstalled) {
128
+ return;
129
+ }
130
+ isLegacyPiSpecifierShimInstalled = true;
131
+
132
+ Bun.plugin({
133
+ name: "omp:legacy-pi-shim",
134
+ setup(build) {
135
+ build.onResolve({ filter: LEGACY_PI_SPECIFIER_FILTER, namespace: "file" }, resolveLegacyPiSpecifier);
136
+ build.onResolve(
137
+ { filter: LEGACY_PI_SPECIFIER_FILTER, namespace: LEGACY_PI_FILE_NAMESPACE },
138
+ resolveLegacyPiSpecifier,
139
+ );
140
+
141
+ build.onResolve({ filter: /^omp-legacy-pi-file:/, namespace: "file" }, args => ({
142
+ path: args.path.slice(LEGACY_PI_FILE_PREFIX.length),
143
+ namespace: LEGACY_PI_FILE_NAMESPACE,
144
+ }));
145
+
146
+ build.onResolve({ filter: /^(?:\.{1,2}\/|\/)/, namespace: LEGACY_PI_FILE_NAMESPACE }, args => ({
147
+ path: args.path.startsWith("/") ? args.path : Bun.resolveSync(args.path, path.dirname(args.importer)),
148
+ namespace: LEGACY_PI_FILE_NAMESPACE,
149
+ }));
150
+
151
+ build.onLoad({ filter: /\.[cm]?[jt]sx?$/, namespace: LEGACY_PI_FILE_NAMESPACE }, async args => {
152
+ const raw = await Bun.file(args.path).text();
153
+ // Bare specifiers (e.g. "lodash", "@scope/pkg/sub") imported from a legacy-namespaced
154
+ // extension file would otherwise bypass Node-style node_modules lookup because the
155
+ // importer lives in a custom namespace. Pre-resolve them to absolute paths so the
156
+ // extension's own node_modules are honored.
157
+ const withLegacyRemap = rewriteLegacyPiImports(raw);
158
+ const withBareResolved = rewriteBareImportsForLegacyExtension(withLegacyRemap, args.path);
159
+ return {
160
+ contents: withBareResolved,
161
+ loader: getLoader(args.path),
162
+ };
163
+ });
164
+ },
165
+ });
166
+ }
@@ -8,8 +8,11 @@ import * as fs from "node:fs";
8
8
  import * as path from "node:path";
9
9
  import { getPluginsLockfile, getPluginsNodeModules, getPluginsPackageJson, isEnoent } from "@oh-my-pi/pi-utils";
10
10
  import { getConfigDirPaths } from "../../config";
11
+ import { installLegacyPiSpecifierShim } from "./legacy-pi-compat";
11
12
  import type { InstalledPlugin, PluginManifest, PluginRuntimeConfig, ProjectPluginOverrides } from "./types";
12
13
 
14
+ installLegacyPiSpecifierShim();
15
+
13
16
  // =============================================================================
14
17
  // Runtime Config Loading
15
18
  // =============================================================================
@@ -41,11 +44,6 @@ async function loadProjectOverrides(cwd: string): Promise<ProjectPluginOverrides
41
44
  }
42
45
  return {};
43
46
  }
44
-
45
- // =============================================================================
46
- // Plugin Discovery
47
- // =============================================================================
48
-
49
47
  /**
50
48
  * Get list of enabled plugins with their resolved configurations.
51
49
  * Respects both global runtime config and project overrides.
@@ -69,7 +67,6 @@ export async function getEnabledPlugins(cwd: string): Promise<InstalledPlugin[]>
69
67
  const runtimeConfig = await loadRuntimeConfig();
70
68
  const projectOverrides = await loadProjectOverrides(cwd);
71
69
  const plugins: InstalledPlugin[] = [];
72
-
73
70
  for (const [name] of Object.entries(deps)) {
74
71
  const pluginPkgPath = path.join(nodeModulesPath, name, "package.json");
75
72
  let pluginPkg: { version: string; omp?: PluginManifest; pi?: PluginManifest };
@@ -103,7 +100,6 @@ export async function getEnabledPlugins(cwd: string): Promise<InstalledPlugin[]>
103
100
 
104
101
  // Resolve enabled features (project overrides take precedence)
105
102
  const enabledFeatures = projectOverrides.features?.[name] ?? runtimeState?.enabledFeatures ?? null;
106
-
107
103
  plugins.push({
108
104
  name,
109
105
  version: pluginPkg.version,