@morphllm/morphsdk 0.2.80 → 0.2.81

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/{chunk-DCIUCDWJ.js → chunk-467MCW5R.js} +38 -10
  2. package/dist/chunk-467MCW5R.js.map +1 -0
  3. package/dist/{chunk-HBIDSKIQ.js → chunk-4NI25AKV.js} +14 -6
  4. package/dist/chunk-4NI25AKV.js.map +1 -0
  5. package/dist/{chunk-5PGRBWJX.js → chunk-4VMIPD4G.js} +4 -4
  6. package/dist/{chunk-6OII5QOW.js → chunk-B3UC7UVA.js} +3 -3
  7. package/dist/{chunk-6OII5QOW.js.map → chunk-B3UC7UVA.js.map} +1 -1
  8. package/dist/{chunk-QL5SMQ73.js → chunk-NTTQJM6O.js} +2 -2
  9. package/dist/{chunk-ND7IFSMR.js → chunk-OFMDENAQ.js} +2 -2
  10. package/dist/{chunk-26QXQFLZ.js → chunk-PHJQ4N3T.js} +8 -3
  11. package/dist/chunk-PHJQ4N3T.js.map +1 -0
  12. package/dist/{chunk-LM62QCGS.js → chunk-X6A64F2F.js} +5 -5
  13. package/dist/{chunk-4R3ALD5C.js → chunk-Z2HDXUH7.js} +2 -2
  14. package/dist/client.cjs +54 -12
  15. package/dist/client.cjs.map +1 -1
  16. package/dist/client.js +9 -9
  17. package/dist/index.cjs +54 -12
  18. package/dist/index.cjs.map +1 -1
  19. package/dist/index.js +9 -9
  20. package/dist/tools/warp_grep/agent/runner.cjs +34 -5
  21. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
  22. package/dist/tools/warp_grep/agent/runner.js +2 -2
  23. package/dist/tools/warp_grep/anthropic.cjs +54 -12
  24. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  25. package/dist/tools/warp_grep/anthropic.js +6 -6
  26. package/dist/tools/warp_grep/client.cjs +54 -12
  27. package/dist/tools/warp_grep/client.cjs.map +1 -1
  28. package/dist/tools/warp_grep/client.js +5 -5
  29. package/dist/tools/warp_grep/gemini.cjs +54 -12
  30. package/dist/tools/warp_grep/gemini.cjs.map +1 -1
  31. package/dist/tools/warp_grep/gemini.js +5 -5
  32. package/dist/tools/warp_grep/harness.cjs +50 -14
  33. package/dist/tools/warp_grep/harness.cjs.map +1 -1
  34. package/dist/tools/warp_grep/harness.js +2 -2
  35. package/dist/tools/warp_grep/index.cjs +58 -17
  36. package/dist/tools/warp_grep/index.cjs.map +1 -1
  37. package/dist/tools/warp_grep/index.js +5 -5
  38. package/dist/tools/warp_grep/openai.cjs +54 -12
  39. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  40. package/dist/tools/warp_grep/openai.js +6 -6
  41. package/dist/tools/warp_grep/providers/local.cjs +13 -5
  42. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  43. package/dist/tools/warp_grep/providers/local.js +1 -1
  44. package/dist/tools/warp_grep/providers/remote.cjs +7 -2
  45. package/dist/tools/warp_grep/providers/remote.cjs.map +1 -1
  46. package/dist/tools/warp_grep/providers/remote.js +1 -1
  47. package/dist/tools/warp_grep/vercel.cjs +54 -12
  48. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  49. package/dist/tools/warp_grep/vercel.js +6 -6
  50. package/package.json +1 -1
  51. package/dist/chunk-26QXQFLZ.js.map +0 -1
  52. package/dist/chunk-DCIUCDWJ.js.map +0 -1
  53. package/dist/chunk-HBIDSKIQ.js.map +0 -1
  54. /package/dist/{chunk-5PGRBWJX.js.map → chunk-4VMIPD4G.js.map} +0 -0
  55. /package/dist/{chunk-QL5SMQ73.js.map → chunk-NTTQJM6O.js.map} +0 -0
  56. /package/dist/{chunk-ND7IFSMR.js.map → chunk-OFMDENAQ.js.map} +0 -0
  57. /package/dist/{chunk-LM62QCGS.js.map → chunk-X6A64F2F.js.map} +0 -0
  58. /package/dist/{chunk-4R3ALD5C.js.map → chunk-Z2HDXUH7.js.map} +0 -0
@@ -15,10 +15,25 @@ async function toolGrep(provider, args) {
15
15
  }
16
16
 
17
17
  // tools/warp_grep/agent/tools/read.ts
18
+ function isValidRange(start, end) {
19
+ return typeof start === "number" && typeof end === "number" && Number.isFinite(start) && Number.isFinite(end) && start > 0 && end >= start;
20
+ }
18
21
  async function toolRead(provider, args) {
19
- if (args.lines && args.lines.length > 0) {
22
+ if (args.lines && Array.isArray(args.lines) && args.lines.length > 0) {
23
+ const validRanges = [];
24
+ for (const range of args.lines) {
25
+ if (Array.isArray(range) && range.length >= 2 && isValidRange(range[0], range[1])) {
26
+ validRanges.push([range[0], range[1]]);
27
+ }
28
+ }
29
+ if (validRanges.length === 0) {
30
+ const res2 = await provider.read({ path: args.path });
31
+ if (res2.error) return res2.error;
32
+ if (!res2.lines.length) return "(empty file)";
33
+ return res2.lines.join("\n");
34
+ }
20
35
  const chunks = [];
21
- for (const [start, end] of args.lines) {
36
+ for (const [start, end] of validRanges) {
22
37
  const res2 = await provider.read({ path: args.path, start, end });
23
38
  if (res2.error) return res2.error;
24
39
  chunks.push(res2.lines.join("\n"));
@@ -54,23 +69,36 @@ async function toolListDirectory(provider, args) {
54
69
  }
55
70
 
56
71
  // tools/warp_grep/agent/tools/finish.ts
72
+ function isValidRange2(range) {
73
+ return Array.isArray(range) && range.length >= 2 && typeof range[0] === "number" && typeof range[1] === "number" && Number.isFinite(range[0]) && Number.isFinite(range[1]) && range[0] > 0 && range[1] >= range[0];
74
+ }
75
+ function extractValidRanges(lines) {
76
+ if (!Array.isArray(lines)) return null;
77
+ const valid = [];
78
+ for (const range of lines) {
79
+ if (isValidRange2(range)) {
80
+ valid.push([range[0], range[1]]);
81
+ }
82
+ }
83
+ return valid.length > 0 ? valid : null;
84
+ }
57
85
  function normalizeFinishFiles(files) {
58
86
  return files.map((f) => {
59
- if (f.lines === "*") {
60
- return { path: f.path, lines: "*" };
61
- }
62
- const merged = mergeRanges(f.lines);
63
- return { path: f.path, lines: merged };
87
+ if (f.lines === "*") return { path: f.path, lines: "*" };
88
+ const validRanges = extractValidRanges(f.lines);
89
+ if (!validRanges) return { path: f.path, lines: "*" };
90
+ return { path: f.path, lines: mergeRanges(validRanges) };
64
91
  });
65
92
  }
66
93
  async function readFinishFiles(repoRoot, files, reader) {
67
94
  const out = [];
68
95
  for (const f of files) {
69
- if (f.lines === "*") {
96
+ const validRanges = f.lines === "*" ? null : extractValidRanges(f.lines);
97
+ if (f.lines === "*" || !validRanges) {
70
98
  const lines = await reader(f.path);
71
99
  out.push({ path: f.path, ranges: "*", content: lines.join("\n") });
72
100
  } else {
73
- const ranges = mergeRanges(f.lines);
101
+ const ranges = mergeRanges(validRanges);
74
102
  const chunks = [];
75
103
  for (const [s, e] of ranges) {
76
104
  const lines = await reader(f.path, s, e);
@@ -200,4 +228,4 @@ export {
200
228
  formatListDirectoryTree,
201
229
  enforceContextLimit
202
230
  };
203
- //# sourceMappingURL=chunk-DCIUCDWJ.js.map
231
+ //# sourceMappingURL=chunk-467MCW5R.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/agent/tools/grep.ts","../tools/warp_grep/agent/tools/read.ts","../tools/warp_grep/agent/tools/list_directory.ts","../tools/warp_grep/agent/tools/finish.ts","../tools/warp_grep/agent/helpers.ts"],"sourcesContent":["import type { WarpGrepProvider } from '../../providers/types.js';\n\nexport async function toolGrep(\n provider: WarpGrepProvider,\n args: { pattern: string; path: string; glob?: string }\n): Promise<{ output: string }> {\n const res = await provider.grep(args);\n \n // Return errors as output - let the model see and handle them\n if (res.error) {\n return { output: res.error };\n }\n \n if (!res.lines.length) {\n return { output: 'no matches' };\n }\n \n return { output: res.lines.join('\\n') };\n}\n\n","import type { WarpGrepProvider } from '../../providers/types.js';\n\nfunction isValidRange(start: unknown, end: unknown): boolean {\n return (\n typeof start === 'number' &&\n typeof end === 'number' &&\n Number.isFinite(start) &&\n Number.isFinite(end) &&\n start > 0 &&\n end >= start\n );\n}\n\nexport async function toolRead(\n provider: WarpGrepProvider,\n args: { path: string; start?: number; end?: number; lines?: Array<[number, number]> }\n): Promise<string> {\n if (args.lines && Array.isArray(args.lines) && args.lines.length > 0) {\n const validRanges: Array<[number, number]> = [];\n for (const range of args.lines) {\n if (Array.isArray(range) && range.length >= 2 && isValidRange(range[0], range[1])) {\n validRanges.push([range[0], range[1]]);\n }\n }\n \n if (validRanges.length === 0) {\n const res = await provider.read({ path: args.path });\n if (res.error) return res.error;\n if (!res.lines.length) return '(empty file)';\n return res.lines.join('\\n');\n }\n \n const chunks: string[] = [];\n for (const [start, end] of validRanges) {\n const res = await provider.read({ path: args.path, start, end });\n if (res.error) return res.error;\n chunks.push(res.lines.join('\\n'));\n }\n if (chunks.every(c => c === '')) return '(empty file)';\n return chunks.join('\\n...\\n');\n }\n\n const res = await provider.read({ path: args.path, start: args.start, end: args.end });\n if (res.error) {\n return res.error;\n }\n if (!res.lines.length) return '(empty file)';\n return res.lines.join('\\n');\n}\n\n","import type { WarpGrepProvider } from '../../providers/types.js';\nimport { AGENT_CONFIG } from '../config.js';\n\nexport async function toolListDirectory(\n provider: WarpGrepProvider,\n args: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }\n): Promise<string> {\n const list = await provider.listDirectory({\n path: args.path,\n pattern: args.pattern ?? null,\n maxResults: args.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES,\n maxDepth: args.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH,\n });\n \n if (!list.length) return 'empty';\n if (list.length >= AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return 'query not specific enough, tool called tried to return too much context and failed';\n }\n return list\n .map((e) => {\n const indent = ' '.repeat(e.depth);\n const name = e.type === 'dir' ? `${e.name}/` : e.name;\n return `${indent}${name}`;\n })\n .join('\\n');\n}\n\n","import type { FinishFileSpec } from '../types.js';\n\nfunction isValidRange(range: unknown): range is [number, number] {\n return (\n Array.isArray(range) &&\n range.length >= 2 &&\n typeof range[0] === 'number' &&\n typeof range[1] === 'number' &&\n Number.isFinite(range[0]) &&\n Number.isFinite(range[1]) &&\n range[0] > 0 &&\n range[1] >= range[0]\n );\n}\n\nfunction extractValidRanges(lines: unknown): Array<[number, number]> | null {\n if (!Array.isArray(lines)) return null;\n const valid: Array<[number, number]> = [];\n for (const range of lines) {\n if (isValidRange(range)) {\n valid.push([range[0], range[1]]);\n }\n }\n return valid.length > 0 ? valid : null;\n}\n\nexport function normalizeFinishFiles(files: FinishFileSpec[]): FinishFileSpec[] {\n return files.map((f) => {\n if (f.lines === '*') return { path: f.path, lines: '*' };\n const validRanges = extractValidRanges(f.lines);\n if (!validRanges) return { path: f.path, lines: '*' };\n return { path: f.path, lines: mergeRanges(validRanges) };\n });\n}\n\nexport async function readFinishFiles(\n repoRoot: string,\n files: FinishFileSpec[],\n reader: (path: string, start?: number, end?: number) => Promise<string[]>\n): Promise<{ path: string; ranges: '*' | Array<[number, number]>; content: string }[]> {\n const out: { path: string; ranges: '*' | Array<[number, number]>; content: string }[] = [];\n for (const f of files) {\n const validRanges = f.lines === '*' ? null : extractValidRanges(f.lines);\n if (f.lines === '*' || !validRanges) {\n const lines = await reader(f.path);\n out.push({ path: f.path, ranges: '*', content: lines.join('\\n') });\n } else {\n const ranges = mergeRanges(validRanges);\n const chunks: string[] = [];\n for (const [s, e] of ranges) {\n const lines = await reader(f.path, s, e);\n chunks.push(lines.join('\\n'));\n }\n out.push({ path: f.path, ranges, content: chunks.join('\\n') });\n }\n }\n return out;\n}\n\nfunction mergeRanges(ranges: Array<[number, number]>): Array<[number, number]> {\n if (!ranges.length) return [];\n const sorted = [...ranges].sort((a, b) => a[0] - b[0]);\n const merged: Array<[number, number]> = [];\n let [cs, ce] = sorted[0];\n for (let i = 1; i < sorted.length; i++) {\n const [s, e] = sorted[i];\n if (s <= ce + 1) {\n ce = Math.max(ce, e);\n } else {\n merged.push([cs, ce]);\n cs = s;\n ce = e;\n }\n }\n merged.push([cs, ce]);\n return merged;\n}\n\n","/**\n * Shared helper functions for warp-grep agent loop.\n * Used by both the internal runner and exported for harness users.\n */\n\nimport path from 'path';\nimport { AGENT_CONFIG } from './config.js';\nimport type { ChatMessage } from './types.js';\nimport type { WarpGrepProvider, ListDirectoryEntry } from '../providers/types.js';\n\nconst TRUNCATED_MARKER = '[truncated for context limit]';\n\n/**\n * Format the turn counter message shown to the model.\n */\nexport function formatTurnMessage(turnsUsed: number, maxTurns: number): string {\n const turnsRemaining = maxTurns - turnsUsed;\n if (turnsRemaining === 1) {\n return `\\nYou have used ${turnsUsed} turns, you only have 1 turn remaining. You have run out of turns to explore the code base and MUST call the finish tool now`;\n }\n return `\\nYou have used ${turnsUsed} turn${turnsUsed === 1 ? '' : 's'} and have ${turnsRemaining} remaining`;\n}\n\n/**\n * Calculate and format the context budget indicator.\n */\nexport function calculateContextBudget(messages: ChatMessage[]): string {\n const totalChars = messages.reduce((sum, m) => sum + m.content.length, 0);\n const maxChars = AGENT_CONFIG.MAX_CONTEXT_CHARS;\n const percent = Math.round((totalChars / maxChars) * 100);\n const usedK = Math.round(totalChars / 1000);\n const maxK = Math.round(maxChars / 1000);\n return `<context_budget>${percent}% (${usedK}K/${maxK}K chars)</context_budget>`;\n}\n\n/**\n * Build the initial state message containing repo structure and query.\n */\nexport async function buildInitialState(\n repoRoot: string,\n query: string,\n provider: WarpGrepProvider\n): Promise<string> {\n try {\n const entries = await provider.listDirectory({\n path: '.',\n maxResults: AGENT_CONFIG.MAX_OUTPUT_LINES,\n maxDepth: 2,\n });\n const treeLines = entries.map((e) => {\n const indent = ' '.repeat(e.depth);\n const name = e.type === 'dir' ? `${e.name}/` : e.name;\n return `${indent}${name}`;\n });\n\n const repoName = path.basename(repoRoot);\n const treeOutput =\n treeLines.length > 0 ? `${repoName}/\\n${treeLines.join('\\n')}` : `${repoName}/`;\n\n return `<repo_structure>\\n${treeOutput}\\n</repo_structure>\\n\\n<search_string>\\n${query}\\n</search_string>`;\n } catch {\n const repoName = path.basename(repoRoot);\n return `<repo_structure>\\n${repoName}/\\n</repo_structure>\\n\\n<search_string>\\n${query}\\n</search_string>`;\n }\n}\n\n/**\n * Format directory entries as an indented tree string.\n */\nexport function formatListDirectoryTree(entries: ListDirectoryEntry[]): string {\n if (!entries.length) return 'empty';\n return entries\n .map((e) => {\n const indent = ' '.repeat(e.depth);\n const name = e.type === 'dir' ? `${e.name}/` : e.name;\n return `${indent}${name}`;\n })\n .join('\\n');\n}\n\n/**\n * Enforce hard context limit by truncating user messages.\n * Preserves system prompt, first user message (initial query), and all assistant messages.\n * Removes tool result user messages from oldest to newest until under the threshold.\n * \n * @param messages - The conversation messages array (mutated in place)\n * @param maxChars - Maximum total character count (defaults to AGENT_CONFIG.MAX_CONTEXT_CHARS)\n * @returns The same messages array, truncated if necessary\n */\nexport function enforceContextLimit(\n messages: ChatMessage[],\n maxChars: number = AGENT_CONFIG.MAX_CONTEXT_CHARS\n): ChatMessage[] {\n const getTotalChars = () => messages.reduce((sum, m) => sum + m.content.length, 0);\n \n if (getTotalChars() <= maxChars) {\n return messages;\n }\n\n // Find indices of user messages, skipping the first one (initial query with repo structure)\n // We truncate tool result messages from oldest to newest\n const userIndices: number[] = [];\n let firstUserSkipped = false;\n for (let i = 0; i < messages.length; i++) {\n if (messages[i].role === 'user') {\n if (!firstUserSkipped) {\n firstUserSkipped = true;\n continue; // Skip first user message (contains the search query)\n }\n userIndices.push(i);\n }\n }\n\n // Truncate user messages from oldest to newest until under limit\n for (const idx of userIndices) {\n if (getTotalChars() <= maxChars) {\n break;\n }\n // Only truncate if not already truncated\n if (messages[idx].content !== TRUNCATED_MARKER) {\n messages[idx] = { role: 'user', content: TRUNCATED_MARKER };\n }\n }\n\n return messages;\n}\n\n"],"mappings":";;;;;AAEA,eAAsB,SACpB,UACA,MAC6B;AAC7B,QAAM,MAAM,MAAM,SAAS,KAAK,IAAI;AAGpC,MAAI,IAAI,OAAO;AACb,WAAO,EAAE,QAAQ,IAAI,MAAM;AAAA,EAC7B;AAEA,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,WAAO,EAAE,QAAQ,aAAa;AAAA,EAChC;AAEA,SAAO,EAAE,QAAQ,IAAI,MAAM,KAAK,IAAI,EAAE;AACxC;;;AChBA,SAAS,aAAa,OAAgB,KAAuB;AAC3D,SACE,OAAO,UAAU,YACjB,OAAO,QAAQ,YACf,OAAO,SAAS,KAAK,KACrB,OAAO,SAAS,GAAG,KACnB,QAAQ,KACR,OAAO;AAEX;AAEA,eAAsB,SACpB,UACA,MACiB;AACjB,MAAI,KAAK,SAAS,MAAM,QAAQ,KAAK,KAAK,KAAK,KAAK,MAAM,SAAS,GAAG;AACpE,UAAM,cAAuC,CAAC;AAC9C,eAAW,SAAS,KAAK,OAAO;AAC9B,UAAI,MAAM,QAAQ,KAAK,KAAK,MAAM,UAAU,KAAK,aAAa,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,GAAG;AACjF,oBAAY,KAAK,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAAA,MACvC;AAAA,IACF;AAEA,QAAI,YAAY,WAAW,GAAG;AAC5B,YAAMA,OAAM,MAAM,SAAS,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AACnD,UAAIA,KAAI,MAAO,QAAOA,KAAI;AAC1B,UAAI,CAACA,KAAI,MAAM,OAAQ,QAAO;AAC9B,aAAOA,KAAI,MAAM,KAAK,IAAI;AAAA,IAC5B;AAEA,UAAM,SAAmB,CAAC;AAC1B,eAAW,CAAC,OAAO,GAAG,KAAK,aAAa;AACtC,YAAMA,OAAM,MAAM,SAAS,KAAK,EAAE,MAAM,KAAK,MAAM,OAAO,IAAI,CAAC;AAC/D,UAAIA,KAAI,MAAO,QAAOA,KAAI;AAC1B,aAAO,KAAKA,KAAI,MAAM,KAAK,IAAI,CAAC;AAAA,IAClC;AACA,QAAI,OAAO,MAAM,OAAK,MAAM,EAAE,EAAG,QAAO;AACxC,WAAO,OAAO,KAAK,SAAS;AAAA,EAC9B;AAEA,QAAM,MAAM,MAAM,SAAS,KAAK,EAAE,MAAM,KAAK,MAAM,OAAO,KAAK,OAAO,KAAK,KAAK,IAAI,CAAC;AACrF,MAAI,IAAI,OAAO;AACb,WAAO,IAAI;AAAA,EACb;AACA,MAAI,CAAC,IAAI,MAAM,OAAQ,QAAO;AAC9B,SAAO,IAAI,MAAM,KAAK,IAAI;AAC5B;;;AC7CA,eAAsB,kBACpB,UACA,MACiB;AACjB,QAAM,OAAO,MAAM,SAAS,cAAc;AAAA,IACxC,MAAM,KAAK;AAAA,IACX,SAAS,KAAK,WAAW;AAAA,IACzB,YAAY,KAAK,cAAc,aAAa;AAAA,IAC5C,UAAU,KAAK,YAAY,aAAa;AAAA,EAC1C,CAAC;AAED,MAAI,CAAC,KAAK,OAAQ,QAAO;AACzB,MAAI,KAAK,UAAU,aAAa,kBAAkB;AAChD,WAAO;AAAA,EACT;AACA,SAAO,KACJ,IAAI,CAAC,MAAM;AACV,UAAM,SAAS,KAAK,OAAO,EAAE,KAAK;AAClC,UAAM,OAAO,EAAE,SAAS,QAAQ,GAAG,EAAE,IAAI,MAAM,EAAE;AACjD,WAAO,GAAG,MAAM,GAAG,IAAI;AAAA,EACzB,CAAC,EACA,KAAK,IAAI;AACd;;;ACvBA,SAASC,cAAa,OAA2C;AAC/D,SACE,MAAM,QAAQ,KAAK,KACnB,MAAM,UAAU,KAChB,OAAO,MAAM,CAAC,MAAM,YACpB,OAAO,MAAM,CAAC,MAAM,YACpB,OAAO,SAAS,MAAM,CAAC,CAAC,KACxB,OAAO,SAAS,MAAM,CAAC,CAAC,KACxB,MAAM,CAAC,IAAI,KACX,MAAM,CAAC,KAAK,MAAM,CAAC;AAEvB;AAEA,SAAS,mBAAmB,OAAgD;AAC1E,MAAI,CAAC,MAAM,QAAQ,KAAK,EAAG,QAAO;AAClC,QAAM,QAAiC,CAAC;AACxC,aAAW,SAAS,OAAO;AACzB,QAAIA,cAAa,KAAK,GAAG;AACvB,YAAM,KAAK,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC;AAAA,IACjC;AAAA,EACF;AACA,SAAO,MAAM,SAAS,IAAI,QAAQ;AACpC;AAEO,SAAS,qBAAqB,OAA2C;AAC9E,SAAO,MAAM,IAAI,CAAC,MAAM;AACtB,QAAI,EAAE,UAAU,IAAK,QAAO,EAAE,MAAM,EAAE,MAAM,OAAO,IAAI;AACvD,UAAM,cAAc,mBAAmB,EAAE,KAAK;AAC9C,QAAI,CAAC,YAAa,QAAO,EAAE,MAAM,EAAE,MAAM,OAAO,IAAI;AACpD,WAAO,EAAE,MAAM,EAAE,MAAM,OAAO,YAAY,WAAW,EAAE;AAAA,EACzD,CAAC;AACH;AAEA,eAAsB,gBACpB,UACA,OACA,QACqF;AACrF,QAAM,MAAkF,CAAC;AACzF,aAAW,KAAK,OAAO;AACrB,UAAM,cAAc,EAAE,UAAU,MAAM,OAAO,mBAAmB,EAAE,KAAK;AACvE,QAAI,EAAE,UAAU,OAAO,CAAC,aAAa;AACnC,YAAM,QAAQ,MAAM,OAAO,EAAE,IAAI;AACjC,UAAI,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,KAAK,SAAS,MAAM,KAAK,IAAI,EAAE,CAAC;AAAA,IACnE,OAAO;AACL,YAAM,SAAS,YAAY,WAAW;AACtC,YAAM,SAAmB,CAAC;AAC1B,iBAAW,CAAC,GAAG,CAAC,KAAK,QAAQ;AAC3B,cAAM,QAAQ,MAAM,OAAO,EAAE,MAAM,GAAG,CAAC;AACvC,eAAO,KAAK,MAAM,KAAK,IAAI,CAAC;AAAA,MAC9B;AACA,UAAI,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,SAAS,OAAO,KAAK,IAAI,EAAE,CAAC;AAAA,IAC/D;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,YAAY,QAA0D;AAC7E,MAAI,CAAC,OAAO,OAAQ,QAAO,CAAC;AAC5B,QAAM,SAAS,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AACrD,QAAM,SAAkC,CAAC;AACzC,MAAI,CAAC,IAAI,EAAE,IAAI,OAAO,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,GAAG,CAAC,IAAI,OAAO,CAAC;AACvB,QAAI,KAAK,KAAK,GAAG;AACf,WAAK,KAAK,IAAI,IAAI,CAAC;AAAA,IACrB,OAAO;AACL,aAAO,KAAK,CAAC,IAAI,EAAE,CAAC;AACpB,WAAK;AACL,WAAK;AAAA,IACP;AAAA,EACF;AACA,SAAO,KAAK,CAAC,IAAI,EAAE,CAAC;AACpB,SAAO;AACT;;;ACvEA,OAAO,UAAU;AAKjB,IAAM,mBAAmB;AAKlB,SAAS,kBAAkB,WAAmB,UAA0B;AAC7E,QAAM,iBAAiB,WAAW;AAClC,MAAI,mBAAmB,GAAG;AACxB,WAAO;AAAA,gBAAmB,SAAS;AAAA,EACrC;AACA,SAAO;AAAA,gBAAmB,SAAS,QAAQ,cAAc,IAAI,KAAK,GAAG,aAAa,cAAc;AAClG;AAKO,SAAS,uBAAuB,UAAiC;AACtE,QAAM,aAAa,SAAS,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,QAAQ,CAAC;AACxE,QAAM,WAAW,aAAa;AAC9B,QAAM,UAAU,KAAK,MAAO,aAAa,WAAY,GAAG;AACxD,QAAM,QAAQ,KAAK,MAAM,aAAa,GAAI;AAC1C,QAAM,OAAO,KAAK,MAAM,WAAW,GAAI;AACvC,SAAO,mBAAmB,OAAO,MAAM,KAAK,KAAK,IAAI;AACvD;AAKA,eAAsB,kBACpB,UACA,OACA,UACiB;AACjB,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,cAAc;AAAA,MAC3C,MAAM;AAAA,MACN,YAAY,aAAa;AAAA,MACzB,UAAU;AAAA,IACZ,CAAC;AACD,UAAM,YAAY,QAAQ,IAAI,CAAC,MAAM;AACnC,YAAM,SAAS,KAAK,OAAO,EAAE,KAAK;AAClC,YAAM,OAAO,EAAE,SAAS,QAAQ,GAAG,EAAE,IAAI,MAAM,EAAE;AACjD,aAAO,GAAG,MAAM,GAAG,IAAI;AAAA,IACzB,CAAC;AAED,UAAM,WAAW,KAAK,SAAS,QAAQ;AACvC,UAAM,aACJ,UAAU,SAAS,IAAI,GAAG,QAAQ;AAAA,EAAM,UAAU,KAAK,IAAI,CAAC,KAAK,GAAG,QAAQ;AAE9E,WAAO;AAAA,EAAqB,UAAU;AAAA;AAAA;AAAA;AAAA,EAA2C,KAAK;AAAA;AAAA,EACxF,QAAQ;AACN,UAAM,WAAW,KAAK,SAAS,QAAQ;AACvC,WAAO;AAAA,EAAqB,QAAQ;AAAA;AAAA;AAAA;AAAA,EAA4C,KAAK;AAAA;AAAA,EACvF;AACF;AAKO,SAAS,wBAAwB,SAAuC;AAC7E,MAAI,CAAC,QAAQ,OAAQ,QAAO;AAC5B,SAAO,QACJ,IAAI,CAAC,MAAM;AACV,UAAM,SAAS,KAAK,OAAO,EAAE,KAAK;AAClC,UAAM,OAAO,EAAE,SAAS,QAAQ,GAAG,EAAE,IAAI,MAAM,EAAE;AACjD,WAAO,GAAG,MAAM,GAAG,IAAI;AAAA,EACzB,CAAC,EACA,KAAK,IAAI;AACd;AAWO,SAAS,oBACd,UACA,WAAmB,aAAa,mBACjB;AACf,QAAM,gBAAgB,MAAM,SAAS,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,QAAQ,CAAC;AAEjF,MAAI,cAAc,KAAK,UAAU;AAC/B,WAAO;AAAA,EACT;AAIA,QAAM,cAAwB,CAAC;AAC/B,MAAI,mBAAmB;AACvB,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,QAAI,SAAS,CAAC,EAAE,SAAS,QAAQ;AAC/B,UAAI,CAAC,kBAAkB;AACrB,2BAAmB;AACnB;AAAA,MACF;AACA,kBAAY,KAAK,CAAC;AAAA,IACpB;AAAA,EACF;AAGA,aAAW,OAAO,aAAa;AAC7B,QAAI,cAAc,KAAK,UAAU;AAC/B;AAAA,IACF;AAEA,QAAI,SAAS,GAAG,EAAE,YAAY,kBAAkB;AAC9C,eAAS,GAAG,IAAI,EAAE,MAAM,QAAQ,SAAS,iBAAiB;AAAA,IAC5D;AAAA,EACF;AAEA,SAAO;AACT;","names":["res","isValidRange"]}
@@ -213,11 +213,19 @@ Details: ${res.stderr}` : ""}`
213
213
  };
214
214
  }
215
215
  const total = lines.length;
216
- let s = params.start ?? 1;
217
- let e = Math.min(params.end ?? total, total);
218
- if (s > total && total > 0) {
219
- s = 1;
220
- e = total;
216
+ const rawStart = params.start;
217
+ const rawEnd = params.end;
218
+ let s = 1;
219
+ let e = total;
220
+ const startValid = rawStart === void 0 || Number.isFinite(rawStart) && rawStart > 0;
221
+ const endValid = rawEnd === void 0 || Number.isFinite(rawEnd) && rawEnd > 0;
222
+ if (startValid && endValid) {
223
+ s = rawStart ?? 1;
224
+ e = Math.min(rawEnd ?? total, total);
225
+ if (s > total && total > 0 || s > e) {
226
+ s = 1;
227
+ e = total;
228
+ }
221
229
  }
222
230
  const out = [];
223
231
  for (let i = s; i <= e; i += 1) {
@@ -281,4 +289,4 @@ Details: ${res.stderr}` : ""}`
281
289
  export {
282
290
  LocalRipgrepProvider
283
291
  };
284
- //# sourceMappingURL=chunk-HBIDSKIQ.js.map
292
+ //# sourceMappingURL=chunk-4NI25AKV.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/providers/local.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport fssync from 'fs';\nimport path from 'path';\nimport { runRipgrep } from '../utils/ripgrep.js';\nimport { ensureWithinRepo, resolveUnderRepo, toRepoRelative, isSymlink, isTextualFile, fixPathRepetition } from '../utils/paths.js';\nimport type { WarpGrepProvider, GrepResult, ReadResult, ListDirectoryEntry } from './types.js';\nimport { readAllLines } from '../utils/files.js';\nimport { DEFAULT_EXCLUDES, AGENT_CONFIG } from '../agent/config.js';\n\n/**\n * Directories/files to always skip (exact name match)\n */\nconst SKIP_NAMES = new Set([\n // Version control\n '.git', '.svn', '.hg', '.bzr',\n // Dependencies\n 'node_modules', 'bower_components', '.pnpm', '.yarn',\n 'vendor', 'Pods', '.bundle',\n // Python\n '__pycache__', '.pytest_cache', '.mypy_cache', '.ruff_cache',\n '.venv', 'venv', '.tox', '.nox', '.eggs',\n // Build outputs\n 'dist', 'build', 'out', 'output', 'target', '_build',\n '.next', '.nuxt', '.output', '.vercel', '.netlify',\n // Cache\n '.cache', '.parcel-cache', '.turbo', '.nx', '.gradle',\n // IDE\n '.idea', '.vscode', '.vs',\n // Coverage\n 'coverage', '.coverage', 'htmlcov', '.nyc_output',\n // Temp\n 'tmp', 'temp', '.tmp', '.temp',\n // Lock files\n 'package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb',\n 'Cargo.lock', 'Gemfile.lock', 'poetry.lock',\n]);\n\n/**\n * File extensions to skip\n */\nconst SKIP_EXTENSIONS = new Set([\n '.min.js', '.min.css', '.bundle.js',\n '.wasm', '.so', '.dll', '.pyc',\n '.map', '.js.map',\n]);\n\n/**\n * Check if a filename should be skipped\n */\nfunction shouldSkip(name: string): boolean {\n // Skip exact name matches\n if (SKIP_NAMES.has(name)) return true;\n \n // Skip hidden files/directories (start with .)\n if (name.startsWith('.')) return true;\n \n // Skip files with certain extensions\n for (const ext of SKIP_EXTENSIONS) {\n if (name.endsWith(ext)) return true;\n }\n \n return false;\n}\n\nexport class LocalRipgrepProvider implements WarpGrepProvider {\n constructor(private readonly repoRoot: string, private readonly excludes: string[] = DEFAULT_EXCLUDES) {}\n\n async grep(params: { pattern: string; path: string; glob?: string }): Promise<GrepResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat) return { lines: [] };\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--no-heading',\n '--with-filename',\n '--line-number',\n '--color=never',\n '--trim',\n '--max-columns=400',\n '-C', '1',\n ...(params.glob ? ['--glob', params.glob] : []),\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n params.pattern,\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n \n // Gracefully handle ripgrep not being available\n if (res.exitCode === -1) {\n return {\n lines: [],\n error: `[RIPGREP NOT AVAILABLE] ripgrep (rg) is required but failed to execute. Please install it:\\n` +\n ` • macOS: brew install ripgrep\\n` +\n ` • Ubuntu/Debian: apt install ripgrep\\n` +\n ` • Windows: choco install ripgrep\\n` +\n ` • Or visit: https://github.com/BurntSushi/ripgrep#installation\\n` +\n `Exit code: ${res.exitCode}${res.stderr ? `\\nDetails: ${res.stderr}` : ''}`,\n };\n }\n \n // Handle other ripgrep errors gracefully\n if (res.exitCode !== 0 && res.exitCode !== 1) {\n return {\n lines: [],\n error: `[RIPGREP ERROR] grep failed with exit code ${res.exitCode}${res.stderr ? `: ${res.stderr}` : ''}`,\n };\n }\n \n const lines = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return {\n lines: [],\n error: 'query not specific enough, tool tried to return too much context and failed',\n };\n }\n \n return { lines };\n }\n\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n let stat = await fs.stat(abs).catch(() => null);\n\n // Handle duplicate path segments from model predictions\n if (!stat || !stat.isFile()) {\n const fixedPath = fixPathRepetition(abs);\n if (fixedPath) {\n const fixedStat = await fs.stat(fixedPath).catch(() => null);\n if (fixedStat?.isFile()) {\n abs = fixedPath;\n stat = fixedStat;\n }\n }\n }\n\n // Gracefully handle file not found / not a file\n if (!stat || !stat.isFile()) {\n return {\n lines: [],\n error: `[FILE NOT FOUND] You tried to read \"${params.path}\" but there is no file at this path. ` +\n `Double-check the path exists and is spelled correctly.`,\n };\n }\n \n // Gracefully handle symlinks\n if (isSymlink(abs)) {\n return {\n lines: [],\n error: `[SYMLINK] You tried to read \"${params.path}\" but this is a symlink. ` +\n `Try reading the actual file it points to instead.`,\n };\n }\n \n // Gracefully handle non-text or too-large files\n if (!isTextualFile(abs)) {\n return {\n lines: [],\n error: `[UNREADABLE FILE] You tried to read \"${params.path}\" but this file is either too large ` +\n `or not a text file, so it cannot be read. Try a different file.`,\n };\n }\n \n let lines: string[];\n try {\n lines = await readAllLines(abs);\n } catch (err) {\n return {\n lines: [],\n error: `[READ ERROR] Failed to read \"${params.path}\": ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const total = lines.length;\n const rawStart = params.start;\n const rawEnd = params.end;\n \n let s = 1;\n let e = total;\n \n const startValid = rawStart === undefined || (Number.isFinite(rawStart) && rawStart > 0);\n const endValid = rawEnd === undefined || (Number.isFinite(rawEnd) && rawEnd > 0);\n \n if (startValid && endValid) {\n s = rawStart ?? 1;\n e = Math.min(rawEnd ?? total, total);\n if ((s > total && total > 0) || (s > e)) {\n s = 1;\n e = total;\n }\n }\n const out: string[] = [];\n for (let i = s; i <= e; i += 1) {\n const content = lines[i - 1] ?? '';\n out.push(`${i}|${content}`);\n }\n if (out.length > AGENT_CONFIG.MAX_READ_LINES) {\n const truncated = out.slice(0, AGENT_CONFIG.MAX_READ_LINES);\n truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${out.length} lines]`);\n return { lines: truncated };\n }\n \n return { lines: out };\n }\n\n async listDirectory(params: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }): Promise<ListDirectoryEntry[]> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch {\n // Path outside repo - return empty (graceful failure)\n return [];\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat || !stat.isDirectory()) {\n return [];\n }\n const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;\n const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;\n const regex = params.pattern ? new RegExp(params.pattern) : null;\n const repoRoot = this.repoRoot;\n\n const results: ListDirectoryEntry[] = [];\n let timedOut = false;\n const startTime = Date.now();\n \n async function walk(dir: string, depth: number) {\n if (Date.now() - startTime > AGENT_CONFIG.LIST_TIMEOUT_MS) {\n timedOut = true;\n return;\n }\n if (depth > maxDepth || results.length >= maxResults) return;\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (timedOut || results.length >= maxResults) break;\n \n // Simple name-based filtering - skip junk dirs/files\n if (shouldSkip(entry.name)) continue;\n \n // Apply user-provided pattern filter\n if (regex && !regex.test(entry.name)) continue;\n \n const full = path.join(dir, entry.name);\n const isDir = entry.isDirectory();\n \n results.push({\n name: entry.name,\n path: toRepoRelative(repoRoot, full),\n type: isDir ? 'dir' : 'file',\n depth,\n });\n if (isDir) {\n await walk(full, depth + 1);\n }\n }\n }\n await walk(abs, 0);\n return results;\n }\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAAA,OAAO,QAAQ;AAEf,OAAO,UAAU;AAUjB,IAAM,aAAa,oBAAI,IAAI;AAAA;AAAA,EAEzB;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA;AAAA,EAEvB;AAAA,EAAgB;AAAA,EAAoB;AAAA,EAAS;AAAA,EAC7C;AAAA,EAAU;AAAA,EAAQ;AAAA;AAAA,EAElB;AAAA,EAAe;AAAA,EAAiB;AAAA,EAAe;AAAA,EAC/C;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA;AAAA,EAEjC;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAO;AAAA,EAAU;AAAA,EAAU;AAAA,EAC5C;AAAA,EAAS;AAAA,EAAS;AAAA,EAAW;AAAA,EAAW;AAAA;AAAA,EAExC;AAAA,EAAU;AAAA,EAAiB;AAAA,EAAU;AAAA,EAAO;AAAA;AAAA,EAE5C;AAAA,EAAS;AAAA,EAAW;AAAA;AAAA,EAEpB;AAAA,EAAY;AAAA,EAAa;AAAA,EAAW;AAAA;AAAA,EAEpC;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA;AAAA,EAEvB;AAAA,EAAqB;AAAA,EAAa;AAAA,EAAkB;AAAA,EACpD;AAAA,EAAc;AAAA,EAAgB;AAChC,CAAC;AAKD,IAAM,kBAAkB,oBAAI,IAAI;AAAA,EAC9B;AAAA,EAAW;AAAA,EAAY;AAAA,EACvB;AAAA,EAAS;AAAA,EAAO;AAAA,EAAQ;AAAA,EACxB;AAAA,EAAQ;AACV,CAAC;AAKD,SAAS,WAAW,MAAuB;AAEzC,MAAI,WAAW,IAAI,IAAI,EAAG,QAAO;AAGjC,MAAI,KAAK,WAAW,GAAG,EAAG,QAAO;AAGjC,aAAW,OAAO,iBAAiB;AACjC,QAAI,KAAK,SAAS,GAAG,EAAG,QAAO;AAAA,EACjC;AAEA,SAAO;AACT;AAEO,IAAM,uBAAN,MAAuD;AAAA,EAC5D,YAA6B,UAAmC,WAAqB,kBAAkB;AAA1E;AAAmC;AAAA,EAAwC;AAAA,EAExG,MAAM,KAAK,QAA+E;AACxF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,KAAM,QAAO,EAAE,OAAO,CAAC,EAAE;AAC9B,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAAM;AAAA,MACN,GAAI,OAAO,OAAO,CAAC,UAAU,OAAO,IAAI,IAAI,CAAC;AAAA,MAC7C,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AAGzD,QAAI,IAAI,aAAa,IAAI;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,aAKc,IAAI,QAAQ,GAAG,IAAI,SAAS;AAAA,WAAc,IAAI,MAAM,KAAK,EAAE;AAAA,MAClF;AAAA,IACF;AAGA,QAAI,IAAI,aAAa,KAAK,IAAI,aAAa,GAAG;AAC5C,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,8CAA8C,IAAI,QAAQ,GAAG,IAAI,SAAS,KAAK,IAAI,MAAM,KAAK,EAAE;AAAA,MACzG;AAAA,IACF;AAEA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,QAAI,MAAM,SAAS,aAAa,kBAAkB;AAChD,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAA6E;AACtF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,QAAI,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAG9C,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,GAAG;AAC3B,YAAM,YAAY,kBAAkB,GAAG;AACvC,UAAI,WAAW;AACb,cAAM,YAAY,MAAM,GAAG,KAAK,SAAS,EAAE,MAAM,MAAM,IAAI;AAC3D,YAAI,WAAW,OAAO,GAAG;AACvB,gBAAM;AACN,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAGA,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,GAAG;AAC3B,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,uCAAuC,OAAO,IAAI;AAAA,MAE3D;AAAA,IACF;AAGA,QAAI,UAAU,GAAG,GAAG;AAClB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI;AAAA,MAEpD;AAAA,IACF;AAGA,QAAI,CAAC,cAAc,GAAG,GAAG;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,wCAAwC,OAAO,IAAI;AAAA,MAE5D;AAAA,IACF;AAEA,QAAI;AACJ,QAAI;AACF,cAAQ,MAAM,aAAa,GAAG;AAAA,IAChC,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1G;AAAA,IACF;AACA,UAAM,QAAQ,MAAM;AACpB,UAAM,WAAW,OAAO;AACxB,UAAM,SAAS,OAAO;AAEtB,QAAI,IAAI;AACR,QAAI,IAAI;AAER,UAAM,aAAa,aAAa,UAAc,OAAO,SAAS,QAAQ,KAAK,WAAW;AACtF,UAAM,WAAW,WAAW,UAAc,OAAO,SAAS,MAAM,KAAK,SAAS;AAE9E,QAAI,cAAc,UAAU;AAC1B,UAAI,YAAY;AAChB,UAAI,KAAK,IAAI,UAAU,OAAO,KAAK;AACnC,UAAK,IAAI,SAAS,QAAQ,KAAO,IAAI,GAAI;AACvC,YAAI;AACJ,YAAI;AAAA,MACN;AAAA,IACF;AACA,UAAM,MAAgB,CAAC;AACvB,aAAS,IAAI,GAAG,KAAK,GAAG,KAAK,GAAG;AAC9B,YAAM,UAAU,MAAM,IAAI,CAAC,KAAK;AAChC,UAAI,KAAK,GAAG,CAAC,IAAI,OAAO,EAAE;AAAA,IAC5B;AACA,QAAI,IAAI,SAAS,aAAa,gBAAgB;AAC5C,YAAM,YAAY,IAAI,MAAM,GAAG,aAAa,cAAc;AAC1D,gBAAU,KAAK,2BAA2B,aAAa,cAAc,OAAO,IAAI,MAAM,SAAS;AAC/F,aAAO,EAAE,OAAO,UAAU;AAAA,IAC5B;AAEA,WAAO,EAAE,OAAO,IAAI;AAAA,EACtB;AAAA,EAEA,MAAM,cAAc,QAA0H;AAC5I,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AACA,UAAM,aAAa,OAAO,cAAc,aAAa;AACrD,UAAM,WAAW,OAAO,YAAY,aAAa;AACjD,UAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAC5D,UAAM,WAAW,KAAK;AAEtB,UAAM,UAAgC,CAAC;AACvC,QAAI,WAAW;AACf,UAAM,YAAY,KAAK,IAAI;AAE3B,mBAAe,KAAK,KAAa,OAAe;AAC9C,UAAI,KAAK,IAAI,IAAI,YAAY,aAAa,iBAAiB;AACzD,mBAAW;AACX;AAAA,MACF;AACA,UAAI,QAAQ,YAAY,QAAQ,UAAU,WAAY;AACtD,YAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC7D,iBAAW,SAAS,SAAS;AAC3B,YAAI,YAAY,QAAQ,UAAU,WAAY;AAG9C,YAAI,WAAW,MAAM,IAAI,EAAG;AAG5B,YAAI,SAAS,CAAC,MAAM,KAAK,MAAM,IAAI,EAAG;AAEtC,cAAM,OAAO,KAAK,KAAK,KAAK,MAAM,IAAI;AACtC,cAAM,QAAQ,MAAM,YAAY;AAEhC,gBAAQ,KAAK;AAAA,UACX,MAAM,MAAM;AAAA,UACZ,MAAM,eAAe,UAAU,IAAI;AAAA,UACnC,MAAM,QAAQ,QAAQ;AAAA,UACtB;AAAA,QACF,CAAC;AACD,YAAI,OAAO;AACT,gBAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,KAAK,CAAC;AACjB,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -1,12 +1,12 @@
1
1
  import {
2
2
  runWarpGrep
3
- } from "./chunk-6OII5QOW.js";
3
+ } from "./chunk-B3UC7UVA.js";
4
4
  import {
5
5
  RemoteCommandsProvider
6
- } from "./chunk-26QXQFLZ.js";
6
+ } from "./chunk-PHJQ4N3T.js";
7
7
  import {
8
8
  LocalRipgrepProvider
9
- } from "./chunk-HBIDSKIQ.js";
9
+ } from "./chunk-4NI25AKV.js";
10
10
 
11
11
  // tools/warp_grep/client.ts
12
12
  var WarpGrepClient = class {
@@ -115,4 +115,4 @@ export {
115
115
  executeToolCall,
116
116
  formatResult
117
117
  };
118
- //# sourceMappingURL=chunk-5PGRBWJX.js.map
118
+ //# sourceMappingURL=chunk-4VMIPD4G.js.map
@@ -7,7 +7,7 @@ import {
7
7
  toolGrep,
8
8
  toolListDirectory,
9
9
  toolRead
10
- } from "./chunk-DCIUCDWJ.js";
10
+ } from "./chunk-467MCW5R.js";
11
11
  import {
12
12
  formatAgentToolOutput
13
13
  } from "./chunk-APP75CBN.js";
@@ -157,7 +157,7 @@ async function runWarpGrep(config) {
157
157
  }
158
158
  const parts = ["Relevant context found:"];
159
159
  for (const f of finishMeta.files) {
160
- const ranges = f.lines === "*" ? "*" : f.lines.map(([s, e]) => `${s}-${e}`).join(", ");
160
+ const ranges = f.lines === "*" ? "*" : Array.isArray(f.lines) ? f.lines.map(([s, e]) => `${s}-${e}`).join(", ") : "*";
161
161
  parts.push(`- ${f.path}: ${ranges}`);
162
162
  }
163
163
  const payload = parts.join("\n");
@@ -193,4 +193,4 @@ async function runWarpGrep(config) {
193
193
  export {
194
194
  runWarpGrep
195
195
  };
196
- //# sourceMappingURL=chunk-6OII5QOW.js.map
196
+ //# sourceMappingURL=chunk-B3UC7UVA.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../tools/warp_grep/agent/runner.ts"],"sourcesContent":["import { AGENT_CONFIG, DEFAULT_MODEL } from './config.js';\nimport { getSystemPrompt } from './prompt.js';\nimport type { AgentRunResult, ChatMessage, SessionConfig, AgentFinish } from './types.js';\nimport { LLMResponseParser } from './parser.js';\nimport type { WarpGrepProvider } from '../providers/types.js';\nimport { toolGrep } from './tools/grep.js';\nimport { toolRead } from './tools/read.js';\nimport { toolListDirectory } from './tools/list_directory.js';\nimport { readFinishFiles } from './tools/finish.js';\nimport { fetchWithRetry, withTimeout, type RetryConfig } from '../../utils/resilience.js';\nimport { formatAgentToolOutput } from './formatter.js';\nimport { formatTurnMessage, calculateContextBudget, buildInitialState, enforceContextLimit } from './helpers.js';\nimport path from 'path';\n\ntype EventName =\n | 'initial_state'\n | 'round_start'\n | 'round_end'\n | 'finish'\n | 'error';\n\nexport type EventCallback = (name: EventName, payload: Record<string, unknown>) => void;\n\nconst parser = new LLMResponseParser();\n\nconst DEFAULT_API_URL = 'https://api.morphllm.com';\n\ninterface CallModelOptions {\n morphApiKey?: string;\n morphApiUrl?: string;\n retryConfig?: RetryConfig;\n}\n\nasync function callModel(\n messages: ChatMessage[],\n model: string,\n options: CallModelOptions = {}\n): Promise<string> {\n const baseUrl = options.morphApiUrl || DEFAULT_API_URL;\n const apiKey = options.morphApiKey || process.env.MORPH_API_KEY || '';\n \n const fetchPromise = fetchWithRetry(\n `${baseUrl}/v1/chat/completions`,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n model,\n temperature: 0.0,\n max_tokens: 1024,\n messages,\n }),\n },\n options.retryConfig\n );\n const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, 'morph-warp-grep request timed out');\n if (!resp.ok) {\n if (resp.status === 404) {\n throw new Error(\n 'The endpoint you are trying to call is likely deprecated. Please update with: npm cache clean --force && npx -y @morphllm/morphmcp@latest or visit: https://morphllm.com/mcp'\n );\n }\n // keeping these cases are real throws, if this happens retry will likely not help, so best we just throw here, notice the error and fix\n const t = await resp.text();\n throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);\n }\n const data = await resp.json();\n const content = data?.choices?.[0]?.message?.content;\n if (!content || typeof content !== 'string') {\n throw new Error('Invalid response from model');\n }\n return content;\n}\n\nexport async function runWarpGrep(config: SessionConfig & { provider: WarpGrepProvider }): Promise<AgentRunResult> {\n const repoRoot = path.resolve(config.repoRoot || process.cwd());\n const messages: ChatMessage[] = [];\n\n messages.push({ role: 'system' as const, content: getSystemPrompt() });\n const initialState = await buildInitialState(repoRoot, config.query, config.provider);\n messages.push({ role: 'user', content: initialState });\n\n const maxTurns = AGENT_CONFIG.MAX_TURNS;\n const model = config.model || DEFAULT_MODEL;\n const provider = config.provider;\n const errors: Array<{ message: string }> = [];\n\n let finishMeta: AgentFinish | undefined;\n let terminationReason: AgentRunResult['terminationReason'] = 'terminated';\n\n for (let turn = 1; turn <= maxTurns; turn += 1) {\n // Enforce hard context limit before calling model\n enforceContextLimit(messages);\n \n // call model\n const assistantContent = await callModel(messages, model, {\n morphApiKey: config.morphApiKey,\n morphApiUrl: config.morphApiUrl,\n retryConfig: config.retryConfig,\n }).catch((e: unknown) => {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n return '';\n });\n if (!assistantContent) break;\n messages.push({ role: 'assistant', content: assistantContent });\n\n // parse tool calls (no longer throws - returns _skip calls for malformed commands)\n const toolCalls = parser.parse(assistantContent);\n if (toolCalls.length === 0) {\n errors.push({ message: 'No tool calls produced by the model. Your MCP is likely out of date! Update it by running: rm -rf ~/.npm/_npx && npm cache clean --force && npx -y @morphllm/morphmcp@latest' });\n terminationReason = 'terminated';\n break;\n }\n\n const finishCalls = toolCalls.filter(c => c.name === 'finish');\n const grepCalls = toolCalls.filter(c => c.name === 'grep');\n const listDirCalls = toolCalls.filter(c => c.name === 'list_directory');\n const readCalls = toolCalls.filter(c => c.name === 'read');\n const skipCalls = toolCalls.filter(c => c.name === '_skip');\n\n const formatted: string[] = [];\n\n // Surface any skipped commands as feedback to the LLM\n for (const c of skipCalls) {\n const msg = (c.arguments as { message?: string })?.message || 'Command skipped due to parsing error';\n formatted.push(msg);\n }\n\n const allPromises: Array<Promise<string>> = [];\n \n for (const c of grepCalls) {\n const args = (c.arguments ?? {}) as { pattern: string; path: string; glob?: string };\n allPromises.push(\n toolGrep(provider, args).then(\n ({ output }) => formatAgentToolOutput('grep', args, output, { isError: false }),\n err => formatAgentToolOutput('grep', args, String(err), { isError: true })\n )\n );\n }\n \n for (const c of listDirCalls) {\n const args = (c.arguments ?? {}) as { path: string; pattern?: string | null };\n allPromises.push(\n toolListDirectory(provider, args).then(\n p => formatAgentToolOutput('list_directory', args, p, { isError: false }),\n err => formatAgentToolOutput('list_directory', args, String(err), { isError: true })\n )\n );\n }\n \n for (const c of readCalls) {\n const args = (c.arguments ?? {}) as { path: string; start?: number; end?: number; lines?: Array<[number, number]> };\n allPromises.push(\n toolRead(provider, args).then(\n p => formatAgentToolOutput('read', args, p, { isError: false }),\n err => formatAgentToolOutput('read', args, String(err), { isError: true })\n )\n );\n }\n \n const allResults = await Promise.all(allPromises);\n for (const result of allResults) {\n formatted.push(result);\n }\n\n if (formatted.length > 0) {\n const turnMessage = formatTurnMessage(turn, maxTurns);\n const contextBudget = calculateContextBudget(messages);\n messages.push({ role: 'user', content: formatted.join('\\n') + turnMessage + '\\n' + contextBudget });\n }\n\n if (finishCalls.length) {\n const fc = finishCalls[0];\n const files = ((fc.arguments as any)?.files ?? []) as AgentFinish['files'];\n finishMeta = { files };\n terminationReason = 'completed';\n break;\n }\n }\n\n if (terminationReason !== 'completed' || !finishMeta) {\n return { terminationReason, messages, errors };\n }\n\n // Build finish payload\n const parts: string[] = ['Relevant context found:'];\n for (const f of finishMeta.files) {\n const ranges = f.lines === '*' ? '*' : f.lines.map(([s, e]) => `${s}-${e}`).join(', ');\n parts.push(`- ${f.path}: ${ranges}`);\n }\n const payload = parts.join('\\n');\n\n // Resolve file contents for returned ranges\n // Wrap reader in try-catch to handle non-existent or unreadable files gracefully\n // Track files that couldn't be read for error reporting\n const fileReadErrors: Array<{ path: string; error: string }> = [];\n const resolved = await readFinishFiles(\n repoRoot,\n finishMeta.files,\n async (p: string, s?: number, e?: number) => {\n try {\n const rr = await provider.read({ path: p, start: s, end: e });\n // rr.lines are \"line|content\" → strip the \"line|\" prefix\n return rr.lines.map(l => {\n const idx = l.indexOf('|');\n return idx >= 0 ? l.slice(idx + 1) : l;\n });\n } catch (err) {\n // File doesn't exist or can't be read - log error but don't throw\n // This handles cases where the agent hallucinated a path or the file was deleted\n const errorMsg = err instanceof Error ? err.message : String(err);\n fileReadErrors.push({ path: p, error: errorMsg });\n console.error(`[warp_grep] Failed to read file: ${p} - ${errorMsg}`);\n return [`[couldn't find: ${p}]`];\n }\n }\n );\n\n // Add file read errors to the result so MCP can report them\n if (fileReadErrors.length > 0) {\n errors.push(...fileReadErrors.map(e => ({ message: `File read error: ${e.path} - ${e.error}` })));\n }\n\n return {\n terminationReason: 'completed',\n messages,\n finish: { payload, metadata: finishMeta, resolved },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,OAAO,UAAU;AAWjB,IAAM,SAAS,IAAI,kBAAkB;AAErC,IAAM,kBAAkB;AAQxB,eAAe,UACb,UACA,OACA,UAA4B,CAAC,GACZ;AACjB,QAAM,UAAU,QAAQ,eAAe;AACvC,QAAM,SAAS,QAAQ,eAAe,QAAQ,IAAI,iBAAiB;AAEnE,QAAM,eAAe;AAAA,IACnB,GAAG,OAAO;AAAA,IACV;AAAA,MACE,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,MAAM;AAAA,MACjC;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,aAAa;AAAA,QACb,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,QAAQ;AAAA,EACV;AACA,QAAM,OAAO,MAAM,YAAY,cAAc,aAAa,YAAY,mCAAmC;AACzG,MAAI,CAAC,KAAK,IAAI;AACZ,QAAI,KAAK,WAAW,KAAK;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,KAAK,KAAK;AAC1B,UAAM,IAAI,MAAM,yBAAyB,KAAK,MAAM,KAAK,CAAC,EAAE;AAAA,EAC9D;AACA,QAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,QAAM,UAAU,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7C,MAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,SAAO;AACT;AAEA,eAAsB,YAAY,QAAiF;AACjH,QAAM,WAAW,KAAK,QAAQ,OAAO,YAAY,QAAQ,IAAI,CAAC;AAC9D,QAAM,WAA0B,CAAC;AAEjC,WAAS,KAAK,EAAE,MAAM,UAAmB,SAAS,gBAAgB,EAAE,CAAC;AACrE,QAAM,eAAe,MAAM,kBAAkB,UAAU,OAAO,OAAO,OAAO,QAAQ;AACpF,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,WAAW,aAAa;AAC9B,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,WAAW,OAAO;AACxB,QAAM,SAAqC,CAAC;AAE5C,MAAI;AACJ,MAAI,oBAAyD;AAE7D,WAAS,OAAO,GAAG,QAAQ,UAAU,QAAQ,GAAG;AAE9C,wBAAoB,QAAQ;AAG5B,UAAM,mBAAmB,MAAM,UAAU,UAAU,OAAO;AAAA,MACxD,aAAa,OAAO;AAAA,MACpB,aAAa,OAAO;AAAA,MACpB,aAAa,OAAO;AAAA,IACtB,CAAC,EAAE,MAAM,CAAC,MAAe;AACvB,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,aAAO;AAAA,IACT,CAAC;AACD,QAAI,CAAC,iBAAkB;AACvB,aAAS,KAAK,EAAE,MAAM,aAAa,SAAS,iBAAiB,CAAC;AAG9D,UAAM,YAAY,OAAO,MAAM,gBAAgB;AAC/C,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,KAAK,EAAE,SAAS,+KAA+K,CAAC;AACvM,0BAAoB;AACpB;AAAA,IACF;AAEA,UAAM,cAAc,UAAU,OAAO,OAAK,EAAE,SAAS,QAAQ;AAC7D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,eAAe,UAAU,OAAO,OAAK,EAAE,SAAS,gBAAgB;AACtE,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,OAAO;AAE1D,UAAM,YAAsB,CAAC;AAG7B,eAAW,KAAK,WAAW;AACzB,YAAM,MAAO,EAAE,WAAoC,WAAW;AAC9D,gBAAU,KAAK,GAAG;AAAA,IACpB;AAEA,UAAM,cAAsC,CAAC;AAE7C,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,CAAC,EAAE,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9E,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAEA,eAAW,KAAK,cAAc;AAC5B,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,kBAAkB,UAAU,IAAI,EAAE;AAAA,UAChC,OAAK,sBAAsB,kBAAkB,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UACxE,SAAO,sBAAsB,kBAAkB,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QACrF;AAAA,MACF;AAAA,IACF;AAEA,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,OAAK,sBAAsB,QAAQ,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9D,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAa,MAAM,QAAQ,IAAI,WAAW;AAChD,eAAW,UAAU,YAAY;AAC/B,gBAAU,KAAK,MAAM;AAAA,IACvB;AAEA,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,cAAc,kBAAkB,MAAM,QAAQ;AACpD,YAAM,gBAAgB,uBAAuB,QAAQ;AACrD,eAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,UAAU,KAAK,IAAI,IAAI,cAAc,OAAO,cAAc,CAAC;AAAA,IACpG;AAEA,QAAI,YAAY,QAAQ;AACtB,YAAM,KAAK,YAAY,CAAC;AACxB,YAAM,QAAU,GAAG,WAAmB,SAAS,CAAC;AAChD,mBAAa,EAAE,MAAM;AACrB,0BAAoB;AACpB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,sBAAsB,eAAe,CAAC,YAAY;AACpD,WAAO,EAAE,mBAAmB,UAAU,OAAO;AAAA,EAC/C;AAGA,QAAM,QAAkB,CAAC,yBAAyB;AAClD,aAAW,KAAK,WAAW,OAAO;AAChC,UAAM,SAAS,EAAE,UAAU,MAAM,MAAM,EAAE,MAAM,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,IAAI;AACrF,UAAM,KAAK,KAAK,EAAE,IAAI,KAAK,MAAM,EAAE;AAAA,EACrC;AACA,QAAM,UAAU,MAAM,KAAK,IAAI;AAK/B,QAAM,iBAAyD,CAAC;AAChE,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA,WAAW;AAAA,IACX,OAAO,GAAW,GAAY,MAAe;AAC3C,UAAI;AACF,cAAM,KAAK,MAAM,SAAS,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,KAAK,EAAE,CAAC;AAE5D,eAAO,GAAG,MAAM,IAAI,OAAK;AACvB,gBAAM,MAAM,EAAE,QAAQ,GAAG;AACzB,iBAAO,OAAO,IAAI,EAAE,MAAM,MAAM,CAAC,IAAI;AAAA,QACvC,CAAC;AAAA,MACH,SAAS,KAAK;AAGZ,cAAM,WAAW,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAChE,uBAAe,KAAK,EAAE,MAAM,GAAG,OAAO,SAAS,CAAC;AAChD,gBAAQ,MAAM,oCAAoC,CAAC,MAAM,QAAQ,EAAE;AACnE,eAAO,CAAC,mBAAmB,CAAC,GAAG;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,WAAO,KAAK,GAAG,eAAe,IAAI,QAAM,EAAE,SAAS,oBAAoB,EAAE,IAAI,MAAM,EAAE,KAAK,GAAG,EAAE,CAAC;AAAA,EAClG;AAEA,SAAO;AAAA,IACL,mBAAmB;AAAA,IACnB;AAAA,IACA,QAAQ,EAAE,SAAS,UAAU,YAAY,SAAS;AAAA,EACpD;AACF;","names":[]}
1
+ {"version":3,"sources":["../tools/warp_grep/agent/runner.ts"],"sourcesContent":["import { AGENT_CONFIG, DEFAULT_MODEL } from './config.js';\nimport { getSystemPrompt } from './prompt.js';\nimport type { AgentRunResult, ChatMessage, SessionConfig, AgentFinish } from './types.js';\nimport { LLMResponseParser } from './parser.js';\nimport type { WarpGrepProvider } from '../providers/types.js';\nimport { toolGrep } from './tools/grep.js';\nimport { toolRead } from './tools/read.js';\nimport { toolListDirectory } from './tools/list_directory.js';\nimport { readFinishFiles } from './tools/finish.js';\nimport { fetchWithRetry, withTimeout, type RetryConfig } from '../../utils/resilience.js';\nimport { formatAgentToolOutput } from './formatter.js';\nimport { formatTurnMessage, calculateContextBudget, buildInitialState, enforceContextLimit } from './helpers.js';\nimport path from 'path';\n\ntype EventName =\n | 'initial_state'\n | 'round_start'\n | 'round_end'\n | 'finish'\n | 'error';\n\nexport type EventCallback = (name: EventName, payload: Record<string, unknown>) => void;\n\nconst parser = new LLMResponseParser();\n\nconst DEFAULT_API_URL = 'https://api.morphllm.com';\n\ninterface CallModelOptions {\n morphApiKey?: string;\n morphApiUrl?: string;\n retryConfig?: RetryConfig;\n}\n\nasync function callModel(\n messages: ChatMessage[],\n model: string,\n options: CallModelOptions = {}\n): Promise<string> {\n const baseUrl = options.morphApiUrl || DEFAULT_API_URL;\n const apiKey = options.morphApiKey || process.env.MORPH_API_KEY || '';\n \n const fetchPromise = fetchWithRetry(\n `${baseUrl}/v1/chat/completions`,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n model,\n temperature: 0.0,\n max_tokens: 1024,\n messages,\n }),\n },\n options.retryConfig\n );\n const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, 'morph-warp-grep request timed out');\n if (!resp.ok) {\n if (resp.status === 404) {\n throw new Error(\n 'The endpoint you are trying to call is likely deprecated. Please update with: npm cache clean --force && npx -y @morphllm/morphmcp@latest or visit: https://morphllm.com/mcp'\n );\n }\n // keeping these cases are real throws, if this happens retry will likely not help, so best we just throw here, notice the error and fix\n const t = await resp.text();\n throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);\n }\n const data = await resp.json();\n const content = data?.choices?.[0]?.message?.content;\n if (!content || typeof content !== 'string') {\n throw new Error('Invalid response from model');\n }\n return content;\n}\n\nexport async function runWarpGrep(config: SessionConfig & { provider: WarpGrepProvider }): Promise<AgentRunResult> {\n const repoRoot = path.resolve(config.repoRoot || process.cwd());\n const messages: ChatMessage[] = [];\n\n messages.push({ role: 'system' as const, content: getSystemPrompt() });\n const initialState = await buildInitialState(repoRoot, config.query, config.provider);\n messages.push({ role: 'user', content: initialState });\n\n const maxTurns = AGENT_CONFIG.MAX_TURNS;\n const model = config.model || DEFAULT_MODEL;\n const provider = config.provider;\n const errors: Array<{ message: string }> = [];\n\n let finishMeta: AgentFinish | undefined;\n let terminationReason: AgentRunResult['terminationReason'] = 'terminated';\n\n for (let turn = 1; turn <= maxTurns; turn += 1) {\n // Enforce hard context limit before calling model\n enforceContextLimit(messages);\n \n // call model\n const assistantContent = await callModel(messages, model, {\n morphApiKey: config.morphApiKey,\n morphApiUrl: config.morphApiUrl,\n retryConfig: config.retryConfig,\n }).catch((e: unknown) => {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n return '';\n });\n if (!assistantContent) break;\n messages.push({ role: 'assistant', content: assistantContent });\n\n // parse tool calls (no longer throws - returns _skip calls for malformed commands)\n const toolCalls = parser.parse(assistantContent);\n if (toolCalls.length === 0) {\n errors.push({ message: 'No tool calls produced by the model. Your MCP is likely out of date! Update it by running: rm -rf ~/.npm/_npx && npm cache clean --force && npx -y @morphllm/morphmcp@latest' });\n terminationReason = 'terminated';\n break;\n }\n\n const finishCalls = toolCalls.filter(c => c.name === 'finish');\n const grepCalls = toolCalls.filter(c => c.name === 'grep');\n const listDirCalls = toolCalls.filter(c => c.name === 'list_directory');\n const readCalls = toolCalls.filter(c => c.name === 'read');\n const skipCalls = toolCalls.filter(c => c.name === '_skip');\n\n const formatted: string[] = [];\n\n // Surface any skipped commands as feedback to the LLM\n for (const c of skipCalls) {\n const msg = (c.arguments as { message?: string })?.message || 'Command skipped due to parsing error';\n formatted.push(msg);\n }\n\n const allPromises: Array<Promise<string>> = [];\n \n for (const c of grepCalls) {\n const args = (c.arguments ?? {}) as { pattern: string; path: string; glob?: string };\n allPromises.push(\n toolGrep(provider, args).then(\n ({ output }) => formatAgentToolOutput('grep', args, output, { isError: false }),\n err => formatAgentToolOutput('grep', args, String(err), { isError: true })\n )\n );\n }\n \n for (const c of listDirCalls) {\n const args = (c.arguments ?? {}) as { path: string; pattern?: string | null };\n allPromises.push(\n toolListDirectory(provider, args).then(\n p => formatAgentToolOutput('list_directory', args, p, { isError: false }),\n err => formatAgentToolOutput('list_directory', args, String(err), { isError: true })\n )\n );\n }\n \n for (const c of readCalls) {\n const args = (c.arguments ?? {}) as { path: string; start?: number; end?: number; lines?: Array<[number, number]> };\n allPromises.push(\n toolRead(provider, args).then(\n p => formatAgentToolOutput('read', args, p, { isError: false }),\n err => formatAgentToolOutput('read', args, String(err), { isError: true })\n )\n );\n }\n \n const allResults = await Promise.all(allPromises);\n for (const result of allResults) {\n formatted.push(result);\n }\n\n if (formatted.length > 0) {\n const turnMessage = formatTurnMessage(turn, maxTurns);\n const contextBudget = calculateContextBudget(messages);\n messages.push({ role: 'user', content: formatted.join('\\n') + turnMessage + '\\n' + contextBudget });\n }\n\n if (finishCalls.length) {\n const fc = finishCalls[0];\n const files = ((fc.arguments as any)?.files ?? []) as AgentFinish['files'];\n finishMeta = { files };\n terminationReason = 'completed';\n break;\n }\n }\n\n if (terminationReason !== 'completed' || !finishMeta) {\n return { terminationReason, messages, errors };\n }\n\n // Build finish payload\n const parts: string[] = ['Relevant context found:'];\n for (const f of finishMeta.files) {\n const ranges = f.lines === '*' ? '*' \n : Array.isArray(f.lines) ? f.lines.map(([s, e]) => `${s}-${e}`).join(', ') \n : '*';\n parts.push(`- ${f.path}: ${ranges}`);\n }\n const payload = parts.join('\\n');\n\n // Resolve file contents for returned ranges\n // Wrap reader in try-catch to handle non-existent or unreadable files gracefully\n // Track files that couldn't be read for error reporting\n const fileReadErrors: Array<{ path: string; error: string }> = [];\n const resolved = await readFinishFiles(\n repoRoot,\n finishMeta.files,\n async (p: string, s?: number, e?: number) => {\n try {\n const rr = await provider.read({ path: p, start: s, end: e });\n // rr.lines are \"line|content\" → strip the \"line|\" prefix\n return rr.lines.map(l => {\n const idx = l.indexOf('|');\n return idx >= 0 ? l.slice(idx + 1) : l;\n });\n } catch (err) {\n // File doesn't exist or can't be read - log error but don't throw\n // This handles cases where the agent hallucinated a path or the file was deleted\n const errorMsg = err instanceof Error ? err.message : String(err);\n fileReadErrors.push({ path: p, error: errorMsg });\n console.error(`[warp_grep] Failed to read file: ${p} - ${errorMsg}`);\n return [`[couldn't find: ${p}]`];\n }\n }\n );\n\n // Add file read errors to the result so MCP can report them\n if (fileReadErrors.length > 0) {\n errors.push(...fileReadErrors.map(e => ({ message: `File read error: ${e.path} - ${e.error}` })));\n }\n\n return {\n terminationReason: 'completed',\n messages,\n finish: { payload, metadata: finishMeta, resolved },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,OAAO,UAAU;AAWjB,IAAM,SAAS,IAAI,kBAAkB;AAErC,IAAM,kBAAkB;AAQxB,eAAe,UACb,UACA,OACA,UAA4B,CAAC,GACZ;AACjB,QAAM,UAAU,QAAQ,eAAe;AACvC,QAAM,SAAS,QAAQ,eAAe,QAAQ,IAAI,iBAAiB;AAEnE,QAAM,eAAe;AAAA,IACnB,GAAG,OAAO;AAAA,IACV;AAAA,MACE,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,MAAM;AAAA,MACjC;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,aAAa;AAAA,QACb,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,QAAQ;AAAA,EACV;AACA,QAAM,OAAO,MAAM,YAAY,cAAc,aAAa,YAAY,mCAAmC;AACzG,MAAI,CAAC,KAAK,IAAI;AACZ,QAAI,KAAK,WAAW,KAAK;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,KAAK,KAAK;AAC1B,UAAM,IAAI,MAAM,yBAAyB,KAAK,MAAM,KAAK,CAAC,EAAE;AAAA,EAC9D;AACA,QAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,QAAM,UAAU,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7C,MAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,SAAO;AACT;AAEA,eAAsB,YAAY,QAAiF;AACjH,QAAM,WAAW,KAAK,QAAQ,OAAO,YAAY,QAAQ,IAAI,CAAC;AAC9D,QAAM,WAA0B,CAAC;AAEjC,WAAS,KAAK,EAAE,MAAM,UAAmB,SAAS,gBAAgB,EAAE,CAAC;AACrE,QAAM,eAAe,MAAM,kBAAkB,UAAU,OAAO,OAAO,OAAO,QAAQ;AACpF,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,WAAW,aAAa;AAC9B,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,WAAW,OAAO;AACxB,QAAM,SAAqC,CAAC;AAE5C,MAAI;AACJ,MAAI,oBAAyD;AAE7D,WAAS,OAAO,GAAG,QAAQ,UAAU,QAAQ,GAAG;AAE9C,wBAAoB,QAAQ;AAG5B,UAAM,mBAAmB,MAAM,UAAU,UAAU,OAAO;AAAA,MACxD,aAAa,OAAO;AAAA,MACpB,aAAa,OAAO;AAAA,MACpB,aAAa,OAAO;AAAA,IACtB,CAAC,EAAE,MAAM,CAAC,MAAe;AACvB,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,aAAO;AAAA,IACT,CAAC;AACD,QAAI,CAAC,iBAAkB;AACvB,aAAS,KAAK,EAAE,MAAM,aAAa,SAAS,iBAAiB,CAAC;AAG9D,UAAM,YAAY,OAAO,MAAM,gBAAgB;AAC/C,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,KAAK,EAAE,SAAS,+KAA+K,CAAC;AACvM,0BAAoB;AACpB;AAAA,IACF;AAEA,UAAM,cAAc,UAAU,OAAO,OAAK,EAAE,SAAS,QAAQ;AAC7D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,eAAe,UAAU,OAAO,OAAK,EAAE,SAAS,gBAAgB;AACtE,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,OAAO;AAE1D,UAAM,YAAsB,CAAC;AAG7B,eAAW,KAAK,WAAW;AACzB,YAAM,MAAO,EAAE,WAAoC,WAAW;AAC9D,gBAAU,KAAK,GAAG;AAAA,IACpB;AAEA,UAAM,cAAsC,CAAC;AAE7C,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,CAAC,EAAE,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9E,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAEA,eAAW,KAAK,cAAc;AAC5B,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,kBAAkB,UAAU,IAAI,EAAE;AAAA,UAChC,OAAK,sBAAsB,kBAAkB,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UACxE,SAAO,sBAAsB,kBAAkB,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QACrF;AAAA,MACF;AAAA,IACF;AAEA,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,OAAK,sBAAsB,QAAQ,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9D,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAa,MAAM,QAAQ,IAAI,WAAW;AAChD,eAAW,UAAU,YAAY;AAC/B,gBAAU,KAAK,MAAM;AAAA,IACvB;AAEA,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,cAAc,kBAAkB,MAAM,QAAQ;AACpD,YAAM,gBAAgB,uBAAuB,QAAQ;AACrD,eAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,UAAU,KAAK,IAAI,IAAI,cAAc,OAAO,cAAc,CAAC;AAAA,IACpG;AAEA,QAAI,YAAY,QAAQ;AACtB,YAAM,KAAK,YAAY,CAAC;AACxB,YAAM,QAAU,GAAG,WAAmB,SAAS,CAAC;AAChD,mBAAa,EAAE,MAAM;AACrB,0BAAoB;AACpB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,sBAAsB,eAAe,CAAC,YAAY;AACpD,WAAO,EAAE,mBAAmB,UAAU,OAAO;AAAA,EAC/C;AAGA,QAAM,QAAkB,CAAC,yBAAyB;AAClD,aAAW,KAAK,WAAW,OAAO;AAChC,UAAM,SAAS,EAAE,UAAU,MAAM,MAC7B,MAAM,QAAQ,EAAE,KAAK,IAAI,EAAE,MAAM,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,IAAI,IACvE;AACJ,UAAM,KAAK,KAAK,EAAE,IAAI,KAAK,MAAM,EAAE;AAAA,EACrC;AACA,QAAM,UAAU,MAAM,KAAK,IAAI;AAK/B,QAAM,iBAAyD,CAAC;AAChE,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA,WAAW;AAAA,IACX,OAAO,GAAW,GAAY,MAAe;AAC3C,UAAI;AACF,cAAM,KAAK,MAAM,SAAS,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,KAAK,EAAE,CAAC;AAE5D,eAAO,GAAG,MAAM,IAAI,OAAK;AACvB,gBAAM,MAAM,EAAE,QAAQ,GAAG;AACzB,iBAAO,OAAO,IAAI,EAAE,MAAM,MAAM,CAAC,IAAI;AAAA,QACvC,CAAC;AAAA,MACH,SAAS,KAAK;AAGZ,cAAM,WAAW,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAChE,uBAAe,KAAK,EAAE,MAAM,GAAG,OAAO,SAAS,CAAC;AAChD,gBAAQ,MAAM,oCAAoC,CAAC,MAAM,QAAQ,EAAE;AACnE,eAAO,CAAC,mBAAmB,CAAC,GAAG;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,WAAO,KAAK,GAAG,eAAe,IAAI,QAAM,EAAE,SAAS,oBAAoB,EAAE,IAAI,MAAM,EAAE,KAAK,GAAG,EAAE,CAAC;AAAA,EAClG;AAEA,SAAO;AAAA,IACL,mBAAmB;AAAA,IACnB;AAAA,IACA,QAAQ,EAAE,SAAS,UAAU,YAAY,SAAS;AAAA,EACpD;AACF;","names":[]}
@@ -5,7 +5,7 @@ import {
5
5
  import {
6
6
  executeToolCall,
7
7
  formatResult
8
- } from "./chunk-5PGRBWJX.js";
8
+ } from "./chunk-4VMIPD4G.js";
9
9
  import {
10
10
  getSystemPrompt
11
11
  } from "./chunk-FMLHRJDF.js";
@@ -58,4 +58,4 @@ export {
58
58
  createWarpGrepTool,
59
59
  openai_default
60
60
  };
61
- //# sourceMappingURL=chunk-QL5SMQ73.js.map
61
+ //# sourceMappingURL=chunk-NTTQJM6O.js.map
@@ -3,7 +3,7 @@ import {
3
3
  } from "./chunk-KW7OEGZK.js";
4
4
  import {
5
5
  executeToolCall
6
- } from "./chunk-5PGRBWJX.js";
6
+ } from "./chunk-4VMIPD4G.js";
7
7
 
8
8
  // tools/warp_grep/vercel.ts
9
9
  import { tool } from "ai";
@@ -38,4 +38,4 @@ export {
38
38
  createWarpGrepTool,
39
39
  vercel_default
40
40
  };
41
- //# sourceMappingURL=chunk-ND7IFSMR.js.map
41
+ //# sourceMappingURL=chunk-OFMDENAQ.js.map
@@ -117,8 +117,13 @@ var RemoteCommandsProvider = class {
117
117
  * Read file and add line numbers
118
118
  */
119
119
  async read(params) {
120
- const start = params.start ?? 1;
121
- const end = params.end ?? 1e6;
120
+ const rawStart = params.start;
121
+ const rawEnd = params.end;
122
+ const startValid = rawStart === void 0 || Number.isFinite(rawStart) && rawStart > 0;
123
+ const endValid = rawEnd === void 0 || Number.isFinite(rawEnd) && rawEnd > 0;
124
+ const rangeValid = startValid && endValid && (rawStart === void 0 || rawEnd === void 0 || rawStart <= rawEnd);
125
+ const start = rangeValid && rawStart !== void 0 ? rawStart : 1;
126
+ const end = rangeValid && rawEnd !== void 0 ? rawEnd : 1e6;
122
127
  try {
123
128
  const stdout = await this.commands.read(params.path, start, end);
124
129
  const contentLines = (stdout || "").split("\n");
@@ -180,4 +185,4 @@ var RemoteCommandsProvider = class {
180
185
  export {
181
186
  RemoteCommandsProvider
182
187
  };
183
- //# sourceMappingURL=chunk-26QXQFLZ.js.map
188
+ //# sourceMappingURL=chunk-PHJQ4N3T.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/providers/remote.ts"],"sourcesContent":["/**\n * RemoteCommandsProvider - wraps simple RemoteCommands into WarpGrepProvider\n * \n * Handles parsing of raw stdout from grep/read/listDir commands.\n * Users just return stdout, SDK handles all format conversion.\n */\n\nimport type { WarpGrepProvider, GrepResult, ReadResult, ListDirectoryEntry } from './types.js';\nimport type { RemoteCommands } from '../types.js';\nimport { AGENT_CONFIG } from '../agent/config.js';\n\n/**\n * Directories/files to always skip (exact name match)\n */\nconst SKIP_NAMES = new Set([\n // Version control\n '.git', '.svn', '.hg', '.bzr',\n // Dependencies\n 'node_modules', 'bower_components', '.pnpm', '.yarn',\n 'vendor', 'Pods', '.bundle',\n // Python\n '__pycache__', '.pytest_cache', '.mypy_cache', '.ruff_cache',\n '.venv', 'venv', '.tox', '.nox', '.eggs',\n // Build outputs\n 'dist', 'build', 'out', 'output', 'target', '_build',\n '.next', '.nuxt', '.output', '.vercel', '.netlify',\n // Cache\n '.cache', '.parcel-cache', '.turbo', '.nx', '.gradle',\n // IDE\n '.idea', '.vscode', '.vs',\n // Coverage\n 'coverage', '.coverage', 'htmlcov', '.nyc_output',\n // Temp\n 'tmp', 'temp', '.tmp', '.temp',\n // Lock files\n 'package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb',\n 'Cargo.lock', 'Gemfile.lock', 'poetry.lock',\n]);\n\n/**\n * File extensions to skip\n */\nconst SKIP_EXTENSIONS = new Set([\n '.min.js', '.min.css', '.bundle.js',\n '.wasm', '.so', '.dll', '.pyc',\n '.map', '.js.map',\n]);\n\n/**\n * Check if a filename should be skipped\n */\nfunction shouldSkip(name: string): boolean {\n // Skip exact name matches\n if (SKIP_NAMES.has(name)) return true;\n \n // Skip hidden files/directories (start with .)\n if (name.startsWith('.')) return true;\n \n // Skip files with certain extensions\n for (const ext of SKIP_EXTENSIONS) {\n if (name.endsWith(ext)) return true;\n }\n \n return false;\n}\n\n/**\n * Wraps simple RemoteCommands functions into a full WarpGrepProvider.\n * \n * This allows users to provide three simple functions that return raw stdout,\n * and the SDK handles all parsing internally.\n * \n * @example\n * ```typescript\n * const provider = new RemoteCommandsProvider('/home/repo', {\n * grep: async (pattern, path) => {\n * const r = await sandbox.run(`rg '${pattern}' '${path}'`);\n * return r.stdout;\n * },\n * read: async (path, start, end) => {\n * const r = await sandbox.run(`sed -n '${start},${end}p' '${path}'`);\n * return r.stdout;\n * },\n * listDir: async (path, maxDepth) => {\n * const r = await sandbox.run(`find '${path}' -maxdepth ${maxDepth}`);\n * return r.stdout;\n * },\n * });\n * ```\n */\nexport class RemoteCommandsProvider implements WarpGrepProvider {\n constructor(\n private readonly repoRoot: string,\n private readonly commands: RemoteCommands\n ) {}\n\n /**\n * Run grep command and parse ripgrep output\n */\n async grep(params: { pattern: string; path: string; glob?: string }): Promise<GrepResult> {\n try {\n const stdout = await this.commands.grep(params.pattern, params.path, params.glob);\n \n // Parse ripgrep output: each line is \"path:line:content\" or \"path-line-content\" for context\n const lines = (stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n \n // Check if output is too large\n if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return {\n lines: [],\n error: 'Query not specific enough - too many results returned. Try a more specific pattern.',\n };\n }\n \n return { lines };\n } catch (error) {\n return {\n lines: [],\n error: `[GREP ERROR] ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n }\n\n /**\n * Read file and add line numbers\n */\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n const rawStart = params.start;\n const rawEnd = params.end;\n \n const startValid = rawStart === undefined || (Number.isFinite(rawStart) && rawStart > 0);\n const endValid = rawEnd === undefined || (Number.isFinite(rawEnd) && rawEnd > 0);\n const rangeValid = startValid && endValid && \n (rawStart === undefined || rawEnd === undefined || rawStart <= rawEnd);\n \n const start = rangeValid && rawStart !== undefined ? rawStart : 1;\n const end = rangeValid && rawEnd !== undefined ? rawEnd : 1_000_000;\n \n try {\n const stdout = await this.commands.read(params.path, start, end);\n \n // Split content into lines and add line numbers\n const contentLines = (stdout || '').split('\\n');\n \n // Remove trailing empty line if present (common with sed output)\n if (contentLines.length > 0 && contentLines[contentLines.length - 1] === '') {\n contentLines.pop();\n }\n \n // Format as \"lineNumber|content\"\n const lines = contentLines.map((content, idx) => `${start + idx}|${content}`);\n \n // Check if output is too large\n if (lines.length > AGENT_CONFIG.MAX_READ_LINES) {\n const truncated = lines.slice(0, AGENT_CONFIG.MAX_READ_LINES);\n truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${lines.length} lines]`);\n return { lines: truncated };\n }\n \n return { lines };\n } catch (error) {\n return {\n lines: [],\n error: `[READ ERROR] ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n }\n\n /**\n * List directory and parse find output\n */\n async listDirectory(params: { \n path: string; \n pattern?: string | null; \n maxResults?: number; \n maxDepth?: number;\n }): Promise<ListDirectoryEntry[]> {\n const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;\n const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;\n \n try {\n const stdout = await this.commands.listDir(params.path, maxDepth);\n \n // Parse find output: one path per line\n const paths = (stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((p) => p.length > 0);\n \n const regex = params.pattern ? new RegExp(params.pattern) : null;\n const entries: ListDirectoryEntry[] = [];\n \n for (const fullPath of paths) {\n // Skip the root path itself\n if (fullPath === params.path || fullPath === this.repoRoot) continue;\n \n const name = fullPath.split('/').pop() || '';\n \n // Simple name-based filtering - skip junk dirs/files\n if (shouldSkip(name)) continue;\n \n // Apply pattern filter if provided\n if (regex && !regex.test(name)) continue;\n \n // Determine relative path\n let relativePath = fullPath;\n if (fullPath.startsWith(this.repoRoot)) {\n relativePath = fullPath.slice(this.repoRoot.length).replace(/^\\//, '');\n }\n \n // Calculate depth based on path separators\n const depth = relativePath.split('/').filter(Boolean).length - 1;\n \n // Determine if it's a directory (ends with / or infer from path structure)\n // Note: Most sandbox find commands don't indicate type, so we guess based on extension\n const hasExtension = name.includes('.') && !name.startsWith('.');\n const type: 'file' | 'dir' = hasExtension ? 'file' : 'dir';\n \n entries.push({\n name,\n path: relativePath,\n type,\n depth: Math.max(0, depth),\n });\n \n if (entries.length >= maxResults) break;\n }\n \n return entries;\n } catch (error) {\n // Return empty array on error (consistent with LocalRipgrepProvider)\n return [];\n }\n }\n}\n\n\n\n\n"],"mappings":";;;;;AAcA,IAAM,aAAa,oBAAI,IAAI;AAAA;AAAA,EAEzB;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA;AAAA,EAEvB;AAAA,EAAgB;AAAA,EAAoB;AAAA,EAAS;AAAA,EAC7C;AAAA,EAAU;AAAA,EAAQ;AAAA;AAAA,EAElB;AAAA,EAAe;AAAA,EAAiB;AAAA,EAAe;AAAA,EAC/C;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA;AAAA,EAEjC;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAO;AAAA,EAAU;AAAA,EAAU;AAAA,EAC5C;AAAA,EAAS;AAAA,EAAS;AAAA,EAAW;AAAA,EAAW;AAAA;AAAA,EAExC;AAAA,EAAU;AAAA,EAAiB;AAAA,EAAU;AAAA,EAAO;AAAA;AAAA,EAE5C;AAAA,EAAS;AAAA,EAAW;AAAA;AAAA,EAEpB;AAAA,EAAY;AAAA,EAAa;AAAA,EAAW;AAAA;AAAA,EAEpC;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA;AAAA,EAEvB;AAAA,EAAqB;AAAA,EAAa;AAAA,EAAkB;AAAA,EACpD;AAAA,EAAc;AAAA,EAAgB;AAChC,CAAC;AAKD,IAAM,kBAAkB,oBAAI,IAAI;AAAA,EAC9B;AAAA,EAAW;AAAA,EAAY;AAAA,EACvB;AAAA,EAAS;AAAA,EAAO;AAAA,EAAQ;AAAA,EACxB;AAAA,EAAQ;AACV,CAAC;AAKD,SAAS,WAAW,MAAuB;AAEzC,MAAI,WAAW,IAAI,IAAI,EAAG,QAAO;AAGjC,MAAI,KAAK,WAAW,GAAG,EAAG,QAAO;AAGjC,aAAW,OAAO,iBAAiB;AACjC,QAAI,KAAK,SAAS,GAAG,EAAG,QAAO;AAAA,EACjC;AAEA,SAAO;AACT;AA0BO,IAAM,yBAAN,MAAyD;AAAA,EAC9D,YACmB,UACA,UACjB;AAFiB;AACA;AAAA,EAChB;AAAA;AAAA;AAAA;AAAA,EAKH,MAAM,KAAK,QAA+E;AACxF,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,KAAK,OAAO,SAAS,OAAO,MAAM,OAAO,IAAI;AAGhF,YAAM,SAAS,UAAU,IACtB,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAG7B,UAAI,MAAM,SAAS,aAAa,kBAAkB;AAChD,eAAO;AAAA,UACL,OAAO,CAAC;AAAA,UACR,OAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO,EAAE,MAAM;AAAA,IACjB,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAA6E;AACtF,UAAM,WAAW,OAAO;AACxB,UAAM,SAAS,OAAO;AAEtB,UAAM,aAAa,aAAa,UAAc,OAAO,SAAS,QAAQ,KAAK,WAAW;AACtF,UAAM,WAAW,WAAW,UAAc,OAAO,SAAS,MAAM,KAAK,SAAS;AAC9E,UAAM,aAAa,cAAc,aAC9B,aAAa,UAAa,WAAW,UAAa,YAAY;AAEjE,UAAM,QAAQ,cAAc,aAAa,SAAY,WAAW;AAChE,UAAM,MAAM,cAAc,WAAW,SAAY,SAAS;AAE1D,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,KAAK,OAAO,MAAM,OAAO,GAAG;AAG/D,YAAM,gBAAgB,UAAU,IAAI,MAAM,IAAI;AAG9C,UAAI,aAAa,SAAS,KAAK,aAAa,aAAa,SAAS,CAAC,MAAM,IAAI;AAC3E,qBAAa,IAAI;AAAA,MACnB;AAGA,YAAM,QAAQ,aAAa,IAAI,CAAC,SAAS,QAAQ,GAAG,QAAQ,GAAG,IAAI,OAAO,EAAE;AAG5E,UAAI,MAAM,SAAS,aAAa,gBAAgB;AAC9C,cAAM,YAAY,MAAM,MAAM,GAAG,aAAa,cAAc;AAC5D,kBAAU,KAAK,2BAA2B,aAAa,cAAc,OAAO,MAAM,MAAM,SAAS;AACjG,eAAO,EAAE,OAAO,UAAU;AAAA,MAC5B;AAEA,aAAO,EAAE,MAAM;AAAA,IACjB,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,QAKc;AAChC,UAAM,WAAW,OAAO,YAAY,aAAa;AACjD,UAAM,aAAa,OAAO,cAAc,aAAa;AAErD,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,QAAQ,OAAO,MAAM,QAAQ;AAGhE,YAAM,SAAS,UAAU,IACtB,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAE7B,YAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAC5D,YAAM,UAAgC,CAAC;AAEvC,iBAAW,YAAY,OAAO;AAE5B,YAAI,aAAa,OAAO,QAAQ,aAAa,KAAK,SAAU;AAE5D,cAAM,OAAO,SAAS,MAAM,GAAG,EAAE,IAAI,KAAK;AAG1C,YAAI,WAAW,IAAI,EAAG;AAGtB,YAAI,SAAS,CAAC,MAAM,KAAK,IAAI,EAAG;AAGhC,YAAI,eAAe;AACnB,YAAI,SAAS,WAAW,KAAK,QAAQ,GAAG;AACtC,yBAAe,SAAS,MAAM,KAAK,SAAS,MAAM,EAAE,QAAQ,OAAO,EAAE;AAAA,QACvE;AAGA,cAAM,QAAQ,aAAa,MAAM,GAAG,EAAE,OAAO,OAAO,EAAE,SAAS;AAI/D,cAAM,eAAe,KAAK,SAAS,GAAG,KAAK,CAAC,KAAK,WAAW,GAAG;AAC/D,cAAM,OAAuB,eAAe,SAAS;AAErD,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA,OAAO,KAAK,IAAI,GAAG,KAAK;AAAA,QAC1B,CAAC;AAED,YAAI,QAAQ,UAAU,WAAY;AAAA,MACpC;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AAEd,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;","names":[]}
@@ -1,15 +1,15 @@
1
1
  import {
2
2
  createWarpGrepTool as createWarpGrepTool2
3
- } from "./chunk-4R3ALD5C.js";
3
+ } from "./chunk-Z2HDXUH7.js";
4
4
  import {
5
5
  createWarpGrepTool
6
- } from "./chunk-QL5SMQ73.js";
6
+ } from "./chunk-NTTQJM6O.js";
7
7
  import {
8
8
  createWarpGrepTool as createWarpGrepTool3
9
- } from "./chunk-ND7IFSMR.js";
9
+ } from "./chunk-OFMDENAQ.js";
10
10
  import {
11
11
  WarpGrepClient
12
- } from "./chunk-5PGRBWJX.js";
12
+ } from "./chunk-4VMIPD4G.js";
13
13
  import {
14
14
  createCodebaseSearchTool as createCodebaseSearchTool3
15
15
  } from "./chunk-UBX7QYBD.js";
@@ -280,4 +280,4 @@ export {
280
280
  VercelToolFactory,
281
281
  MorphClient
282
282
  };
283
- //# sourceMappingURL=chunk-LM62QCGS.js.map
283
+ //# sourceMappingURL=chunk-X6A64F2F.js.map
@@ -5,7 +5,7 @@ import {
5
5
  import {
6
6
  executeToolCall,
7
7
  formatResult
8
- } from "./chunk-5PGRBWJX.js";
8
+ } from "./chunk-4VMIPD4G.js";
9
9
  import {
10
10
  getSystemPrompt
11
11
  } from "./chunk-FMLHRJDF.js";
@@ -50,4 +50,4 @@ export {
50
50
  execute,
51
51
  createWarpGrepTool
52
52
  };
53
- //# sourceMappingURL=chunk-4R3ALD5C.js.map
53
+ //# sourceMappingURL=chunk-Z2HDXUH7.js.map
package/dist/client.cjs CHANGED
@@ -1555,10 +1555,25 @@ async function toolGrep(provider, args) {
1555
1555
  }
1556
1556
 
1557
1557
  // tools/warp_grep/agent/tools/read.ts
1558
+ function isValidRange(start, end) {
1559
+ return typeof start === "number" && typeof end === "number" && Number.isFinite(start) && Number.isFinite(end) && start > 0 && end >= start;
1560
+ }
1558
1561
  async function toolRead(provider, args) {
1559
- if (args.lines && args.lines.length > 0) {
1562
+ if (args.lines && Array.isArray(args.lines) && args.lines.length > 0) {
1563
+ const validRanges = [];
1564
+ for (const range of args.lines) {
1565
+ if (Array.isArray(range) && range.length >= 2 && isValidRange(range[0], range[1])) {
1566
+ validRanges.push([range[0], range[1]]);
1567
+ }
1568
+ }
1569
+ if (validRanges.length === 0) {
1570
+ const res2 = await provider.read({ path: args.path });
1571
+ if (res2.error) return res2.error;
1572
+ if (!res2.lines.length) return "(empty file)";
1573
+ return res2.lines.join("\n");
1574
+ }
1560
1575
  const chunks = [];
1561
- for (const [start, end] of args.lines) {
1576
+ for (const [start, end] of validRanges) {
1562
1577
  const res2 = await provider.read({ path: args.path, start, end });
1563
1578
  if (res2.error) return res2.error;
1564
1579
  chunks.push(res2.lines.join("\n"));
@@ -1594,14 +1609,28 @@ async function toolListDirectory(provider, args) {
1594
1609
  }
1595
1610
 
1596
1611
  // tools/warp_grep/agent/tools/finish.ts
1612
+ function isValidRange2(range) {
1613
+ return Array.isArray(range) && range.length >= 2 && typeof range[0] === "number" && typeof range[1] === "number" && Number.isFinite(range[0]) && Number.isFinite(range[1]) && range[0] > 0 && range[1] >= range[0];
1614
+ }
1615
+ function extractValidRanges(lines) {
1616
+ if (!Array.isArray(lines)) return null;
1617
+ const valid = [];
1618
+ for (const range of lines) {
1619
+ if (isValidRange2(range)) {
1620
+ valid.push([range[0], range[1]]);
1621
+ }
1622
+ }
1623
+ return valid.length > 0 ? valid : null;
1624
+ }
1597
1625
  async function readFinishFiles(repoRoot, files, reader) {
1598
1626
  const out = [];
1599
1627
  for (const f of files) {
1600
- if (f.lines === "*") {
1628
+ const validRanges = f.lines === "*" ? null : extractValidRanges(f.lines);
1629
+ if (f.lines === "*" || !validRanges) {
1601
1630
  const lines = await reader(f.path);
1602
1631
  out.push({ path: f.path, ranges: "*", content: lines.join("\n") });
1603
1632
  } else {
1604
- const ranges = mergeRanges(f.lines);
1633
+ const ranges = mergeRanges(validRanges);
1605
1634
  const chunks = [];
1606
1635
  for (const [s, e] of ranges) {
1607
1636
  const lines = await reader(f.path, s, e);
@@ -1935,7 +1964,7 @@ async function runWarpGrep(config) {
1935
1964
  }
1936
1965
  const parts = ["Relevant context found:"];
1937
1966
  for (const f of finishMeta.files) {
1938
- const ranges = f.lines === "*" ? "*" : f.lines.map(([s, e]) => `${s}-${e}`).join(", ");
1967
+ const ranges = f.lines === "*" ? "*" : Array.isArray(f.lines) ? f.lines.map(([s, e]) => `${s}-${e}`).join(", ") : "*";
1939
1968
  parts.push(`- ${f.path}: ${ranges}`);
1940
1969
  }
1941
1970
  const payload = parts.join("\n");
@@ -2295,11 +2324,19 @@ Details: ${res.stderr}` : ""}`
2295
2324
  };
2296
2325
  }
2297
2326
  const total = lines.length;
2298
- let s = params.start ?? 1;
2299
- let e = Math.min(params.end ?? total, total);
2300
- if (s > total && total > 0) {
2301
- s = 1;
2302
- e = total;
2327
+ const rawStart = params.start;
2328
+ const rawEnd = params.end;
2329
+ let s = 1;
2330
+ let e = total;
2331
+ const startValid = rawStart === void 0 || Number.isFinite(rawStart) && rawStart > 0;
2332
+ const endValid = rawEnd === void 0 || Number.isFinite(rawEnd) && rawEnd > 0;
2333
+ if (startValid && endValid) {
2334
+ s = rawStart ?? 1;
2335
+ e = Math.min(rawEnd ?? total, total);
2336
+ if (s > total && total > 0 || s > e) {
2337
+ s = 1;
2338
+ e = total;
2339
+ }
2303
2340
  }
2304
2341
  const out = [];
2305
2342
  for (let i = s; i <= e; i += 1) {
@@ -2475,8 +2512,13 @@ var RemoteCommandsProvider = class {
2475
2512
  * Read file and add line numbers
2476
2513
  */
2477
2514
  async read(params) {
2478
- const start = params.start ?? 1;
2479
- const end = params.end ?? 1e6;
2515
+ const rawStart = params.start;
2516
+ const rawEnd = params.end;
2517
+ const startValid = rawStart === void 0 || Number.isFinite(rawStart) && rawStart > 0;
2518
+ const endValid = rawEnd === void 0 || Number.isFinite(rawEnd) && rawEnd > 0;
2519
+ const rangeValid = startValid && endValid && (rawStart === void 0 || rawEnd === void 0 || rawStart <= rawEnd);
2520
+ const start = rangeValid && rawStart !== void 0 ? rawStart : 1;
2521
+ const end = rangeValid && rawEnd !== void 0 ? rawEnd : 1e6;
2480
2522
  try {
2481
2523
  const stdout = await this.commands.read(params.path, start, end);
2482
2524
  const contentLines = (stdout || "").split("\n");