@morphllm/morphsdk 0.2.62 → 0.2.64
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-PKPJWARB.js → chunk-5QIWYEHJ.js} +2 -2
- package/dist/{chunk-3M4DOG6N.js → chunk-5QRN3JNB.js} +9 -9
- package/dist/chunk-5QRN3JNB.js.map +1 -0
- package/dist/{chunk-GZ7TJ7UB.js → chunk-BLKHVDYQ.js} +3 -3
- package/dist/{chunk-6Y5JB4JC.js → chunk-DCIUCDWJ.js} +15 -7
- package/dist/{chunk-6Y5JB4JC.js.map → chunk-DCIUCDWJ.js.map} +1 -1
- package/dist/{chunk-XWSKUQVQ.js → chunk-DT6MPBD4.js} +8 -8
- package/dist/{chunk-LHKFRPUO.js → chunk-IQFKDBQD.js} +3 -3
- package/dist/{chunk-BHNJEBQJ.js → chunk-IUG2FHNN.js} +2 -2
- package/dist/{chunk-Z75HCPIE.js → chunk-NTCSWQEM.js} +3 -3
- package/dist/chunk-PZ5AY32C.js +10 -0
- package/dist/{chunk-IXNSTNS7.js → chunk-QZNGKOCZ.js} +2 -2
- package/dist/{chunk-S3RCM32V.js → chunk-S6FZAPJ7.js} +3 -3
- package/dist/{chunk-Y7CPJGLU.js → chunk-SUBBMTMY.js} +5 -5
- package/dist/chunk-SUBBMTMY.js.map +1 -0
- package/dist/{chunk-563RLKFC.js → chunk-TPP2UGQP.js} +7 -21
- package/dist/chunk-TPP2UGQP.js.map +1 -0
- package/dist/{chunk-44GS4GJJ.js → chunk-V2IEG55Y.js} +3 -3
- package/dist/{chunk-JNOZPU44.js → chunk-ZO4PPFCZ.js} +2 -2
- package/dist/client.cjs +27 -48
- package/dist/client.cjs.map +1 -1
- package/dist/client.js +14 -14
- package/dist/{finish-kXAcUJyB.d.ts → finish-pPJfB0uO.d.ts} +2 -2
- package/dist/git/client.js +1 -1
- package/dist/git/config.js +1 -1
- package/dist/git/index.js +1 -1
- package/dist/index.cjs +27 -48
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +15 -15
- package/dist/modelrouter/core.js +1 -1
- package/dist/modelrouter/index.js +1 -1
- package/dist/tools/browser/anthropic.js +1 -1
- package/dist/tools/browser/core.js +1 -1
- package/dist/tools/browser/index.js +1 -1
- package/dist/tools/browser/live.js +1 -1
- package/dist/tools/browser/openai.js +1 -1
- package/dist/tools/browser/prompts.js +1 -1
- package/dist/tools/browser/vercel.js +1 -1
- package/dist/tools/codebase_search/anthropic.js +1 -1
- package/dist/tools/codebase_search/core.js +1 -1
- package/dist/tools/codebase_search/index.js +1 -1
- package/dist/tools/codebase_search/openai.js +1 -1
- package/dist/tools/codebase_search/prompts.js +1 -1
- package/dist/tools/codebase_search/vercel.js +1 -1
- package/dist/tools/fastapply/anthropic.js +2 -2
- package/dist/tools/fastapply/core.js +1 -1
- package/dist/tools/fastapply/index.js +4 -4
- package/dist/tools/fastapply/openai.js +2 -2
- package/dist/tools/fastapply/prompts.js +1 -1
- package/dist/tools/fastapply/vercel.js +2 -2
- package/dist/tools/index.js +4 -4
- package/dist/tools/utils/resilience.js +1 -1
- package/dist/tools/warp_grep/agent/config.js +1 -1
- package/dist/tools/warp_grep/agent/formatter.js +1 -1
- package/dist/tools/warp_grep/agent/parser.cjs +8 -8
- package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/parser.js +2 -2
- package/dist/tools/warp_grep/agent/prompt.js +1 -1
- package/dist/tools/warp_grep/agent/runner.cjs +21 -16
- package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/runner.js +4 -4
- package/dist/tools/warp_grep/agent/types.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/types.d.ts +2 -2
- package/dist/tools/warp_grep/anthropic.cjs +27 -48
- package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
- package/dist/tools/warp_grep/anthropic.js +8 -8
- package/dist/tools/warp_grep/client.cjs +27 -48
- package/dist/tools/warp_grep/client.cjs.map +1 -1
- package/dist/tools/warp_grep/client.js +7 -7
- package/dist/tools/warp_grep/gemini.cjs +27 -48
- package/dist/tools/warp_grep/gemini.cjs.map +1 -1
- package/dist/tools/warp_grep/gemini.js +8 -8
- package/dist/tools/warp_grep/harness.cjs +28 -46
- package/dist/tools/warp_grep/harness.cjs.map +1 -1
- package/dist/tools/warp_grep/harness.d.ts +1 -1
- package/dist/tools/warp_grep/harness.js +5 -5
- package/dist/tools/warp_grep/harness.js.map +1 -1
- package/dist/tools/warp_grep/index.cjs +30 -48
- package/dist/tools/warp_grep/index.cjs.map +1 -1
- package/dist/tools/warp_grep/index.d.ts +1 -1
- package/dist/tools/warp_grep/index.js +11 -11
- package/dist/tools/warp_grep/openai.cjs +27 -48
- package/dist/tools/warp_grep/openai.cjs.map +1 -1
- package/dist/tools/warp_grep/openai.js +8 -8
- package/dist/tools/warp_grep/providers/local.cjs +6 -32
- package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
- package/dist/tools/warp_grep/providers/local.js +3 -3
- package/dist/tools/warp_grep/providers/remote.js +1 -1
- package/dist/tools/warp_grep/utils/files.js +1 -1
- package/dist/tools/warp_grep/utils/paths.js +1 -1
- package/dist/tools/warp_grep/utils/ripgrep.cjs +6 -42
- package/dist/tools/warp_grep/utils/ripgrep.cjs.map +1 -1
- package/dist/tools/warp_grep/utils/ripgrep.js +2 -2
- package/dist/tools/warp_grep/vercel.cjs +27 -48
- package/dist/tools/warp_grep/vercel.cjs.map +1 -1
- package/dist/tools/warp_grep/vercel.js +8 -8
- package/package.json +2 -4
- package/dist/chunk-3M4DOG6N.js.map +0 -1
- package/dist/chunk-563RLKFC.js.map +0 -1
- package/dist/chunk-5FUTL2UF.js +0 -22
- package/dist/chunk-Y7CPJGLU.js.map +0 -1
- package/dist/lib-GEPX5Y3T.js +0 -14
- package/dist/lib-GEPX5Y3T.js.map +0 -1
- /package/dist/{chunk-PKPJWARB.js.map → chunk-5QIWYEHJ.js.map} +0 -0
- /package/dist/{chunk-GZ7TJ7UB.js.map → chunk-BLKHVDYQ.js.map} +0 -0
- /package/dist/{chunk-XWSKUQVQ.js.map → chunk-DT6MPBD4.js.map} +0 -0
- /package/dist/{chunk-LHKFRPUO.js.map → chunk-IQFKDBQD.js.map} +0 -0
- /package/dist/{chunk-BHNJEBQJ.js.map → chunk-IUG2FHNN.js.map} +0 -0
- /package/dist/{chunk-Z75HCPIE.js.map → chunk-NTCSWQEM.js.map} +0 -0
- /package/dist/{chunk-5FUTL2UF.js.map → chunk-PZ5AY32C.js.map} +0 -0
- /package/dist/{chunk-IXNSTNS7.js.map → chunk-QZNGKOCZ.js.map} +0 -0
- /package/dist/{chunk-S3RCM32V.js.map → chunk-S6FZAPJ7.js.map} +0 -0
- /package/dist/{chunk-44GS4GJJ.js.map → chunk-V2IEG55Y.js.map} +0 -0
- /package/dist/{chunk-JNOZPU44.js.map → chunk-ZO4PPFCZ.js.map} +0 -0
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../tools/warp_grep/agent/parser.ts"],"sourcesContent":["// Parses assistant lines into structured tool calls\nimport type { ToolCall } from './types.js';\n\nconst VALID_COMMANDS = ['list_directory', 'grep', 'read', 'finish'] as const;\ntype ValidCommand = typeof VALID_COMMANDS[number];\n\nfunction isValidCommand(name: string): name is ValidCommand {\n return VALID_COMMANDS.includes(name as ValidCommand);\n}\n\nfunction getXmlElementText(xml: string, tagName: string): string | null {\n const regex = new RegExp(`<${tagName}>([\\\\s\\\\S]*?)</${tagName}>`, 'i');\n const match = xml.match(regex);\n return match ? match[1].trim() : null;\n}\n\nfunction parseNestedXmlTools(text: string): ToolCall[] {\n const tools: ToolCall[] = [];\n \n // Match any XML tool tags. Unknown tools are silently ignored for forward-compatibility:\n // if we add new tools (e.g. <search>) to the model, old clients won't break.\n const toolRegex = /<([a-z_][a-z0-9_]*)>([\\s\\S]*?)<\\/\\1>/gi;\n let match;\n \n while ((match = toolRegex.exec(text)) !== null) {\n const rawToolName = match[1].toLowerCase();\n const content = match[2];\n \n // Skip unknown tools silently - enables forward compatibility\n if (!isValidCommand(rawToolName)) continue;\n \n const toolName = rawToolName;\n \n if (toolName === 'list_directory') {\n const path = getXmlElementText(content, 'path');\n const pattern = getXmlElementText(content, 'pattern');\n if (path) {\n tools.push({ name: 'list_directory', arguments: { path, pattern } });\n }\n } else if (toolName === 'grep') {\n const pattern = getXmlElementText(content, 'pattern');\n const subDir = getXmlElementText(content, 'sub_dir');\n const glob = getXmlElementText(content, 'glob');\n if (pattern) {\n tools.push({ \n name: 'grep', \n arguments: { \n pattern, \n path: subDir || '.', \n ...(glob && { glob }) \n } \n });\n }\n } else if (toolName === 'read') {\n const path = getXmlElementText(content, 'path');\n const linesStr = getXmlElementText(content, 'lines');\n if (path) {\n const args: Record<string, unknown> = { path };\n if (linesStr) {\n const ranges: Array<[number, number]> = [];\n for (const rangeStr of linesStr.split(',')) {\n const trimmed = rangeStr.trim();\n if (!trimmed) continue;\n const [s, e] = trimmed.split('-').map(v => parseInt(v.trim(), 10));\n if (Number.isFinite(s) && Number.isFinite(e)) {\n ranges.push([s, e]);\n } else if (Number.isFinite(s)) {\n // Single line like \"100\"\n ranges.push([s, s]);\n }\n }\n if (ranges.length === 1) {\n args.start = ranges[0][0];\n args.end = ranges[0][1];\n } else if (ranges.length > 1) {\n args.lines = ranges;\n }\n }\n tools.push({ name: 'read', arguments: args });\n }\n } else if (toolName === 'finish') {\n // Parse nested <file> elements\n const fileRegex = /<file>([\\s\\S]*?)<\\/file>/gi;\n const files: Array<{ path: string; lines: Array<[number, number]> }> = [];\n let fileMatch;\n \n while ((fileMatch = fileRegex.exec(content)) !== null) {\n const fileContent = fileMatch[1];\n const filePath = getXmlElementText(fileContent, 'path');\n const linesStr = getXmlElementText(fileContent, 'lines');\n \n if (filePath && linesStr) {\n const ranges: Array<[number, number]> = [];\n for (const rangeStr of linesStr.split(',')) {\n if (rangeStr.trim() === '*') {\n ranges.push([1, 999999]); // Entire file\n } else {\n const [s, e] = rangeStr.split('-').map(v => parseInt(v.trim(), 10));\n if (Number.isFinite(s) && Number.isFinite(e)) {\n ranges.push([s, e]);\n }\n }\n }\n if (ranges.length > 0) {\n files.push({ path: filePath, lines: ranges });\n }\n }\n }\n \n if (files.length > 0) {\n tools.push({ name: 'finish', arguments: { files } });\n }\n }\n }\n \n return tools;\n}\n\nfunction preprocessText(text: string): { lines: string[]; nestedTools: ToolCall[] } {\n // Strip <think> blocks (reasoning) and <tool_call> wrappers (model hallucination).\n // NOTE: Only strip exact <tool_call> tags - NOT <tool> or <tool_xyz> which could be valid future tools.\n const processed = text\n .replace(/<think>[\\s\\S]*?<\\/think>/gi, '')\n .replace(/<\\/?tool_call>/gi, '');\n const nestedTools = parseNestedXmlTools(processed);\n \n // Extract raw line-based tool calls (legacy format: `grep 'pattern' path`)\n const toolCallLines: string[] = [];\n const allLines = processed.split(/\\r?\\n/).map(l => l.trim());\n for (const line of allLines) {\n if (!line) continue;\n if (line.startsWith('<')) continue;\n \n const firstWord = line.split(/\\s/)[0];\n if (VALID_COMMANDS.includes(firstWord as ValidCommand)) {\n if (!toolCallLines.includes(line)) {\n toolCallLines.push(line);\n }\n }\n }\n \n return { lines: toolCallLines, nestedTools };\n}\n\nexport class LLMResponseParser {\n private readonly finishSpecSplitRe = /,(?=[^,\\s]+:)/;\n\n parse(text: string): ToolCall[] {\n if (typeof text !== 'string') {\n throw new TypeError('Command text must be a string.');\n }\n \n // Preprocess to handle XML tags\n const { lines, nestedTools } = preprocessText(text);\n \n // Start with nested XML tools (new format)\n const commands: ToolCall[] = [...nestedTools];\n let finishAccumulator: Map<string, number[][]> | null = null;\n\n lines.forEach((line) => {\n if (!line || line.startsWith('#')) return;\n const parts = this.splitLine(line);\n if (parts.length === 0) return;\n const cmd = parts[0];\n \n switch (cmd) {\n case 'list_directory':\n this.handleListDirectory(parts, line, commands);\n break;\n case 'grep':\n this.handleGrep(parts, line, commands);\n break;\n case 'read':\n this.handleRead(parts, line, commands);\n break;\n case 'finish':\n finishAccumulator = this.handleFinish(parts, line, commands, finishAccumulator);\n break;\n default:\n // Silently ignore unknown commands\n break;\n }\n });\n\n if (finishAccumulator) {\n const map = finishAccumulator as Map<string, number[][]>;\n const entries = [...map.entries()];\n const filesPayload = entries.map(([path, ranges]) => ({\n path,\n lines: [...ranges].sort((a, b) => a[0] - b[0]) as Array<[number, number]>,\n }));\n commands.push({ name: 'finish', arguments: { files: filesPayload } });\n }\n return commands;\n }\n\n private splitLine(line: string): string[] {\n // Split by whitespace but keep quoted blocks as one\n const parts: string[] = [];\n let current = '';\n let inSingle = false;\n for (let i = 0; i < line.length; i++) {\n const ch = line[i];\n if (ch === \"'\" && line[i - 1] !== '\\\\') {\n inSingle = !inSingle;\n current += ch;\n } else if (!inSingle && /\\s/.test(ch)) {\n if (current) {\n parts.push(current);\n current = '';\n }\n } else {\n current += ch;\n }\n }\n if (current) parts.push(current);\n return parts;\n }\n\n /** Helper to create a _skip tool call with an error message */\n private skip(message: string): ToolCall {\n return { name: '_skip', arguments: { message } };\n }\n\n private handleListDirectory(parts: string[], rawLine: string, commands: ToolCall[]) {\n // list_directory <path> [pattern]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: list_directory <path> [pattern]. Example: list_directory src/`\n ));\n return;\n }\n const path = parts[1];\n const pattern = parts[2]?.replace(/^\"|\"$/g, '') ?? null;\n commands.push({ name: 'list_directory', arguments: { path, pattern } });\n }\n\n private handleGrep(parts: string[], rawLine: string, commands: ToolCall[]) {\n // grep '<pattern>' <path>\n if (parts.length < 3) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing arguments. ` +\n `Correct format: grep '<pattern>' <path>. Example: grep 'TODO' src/`\n ));\n return;\n }\n let pat = parts[1];\n // Be lenient: accept unquoted patterns by treating the first arg as the pattern\n if (pat.startsWith(\"'\") && pat.endsWith(\"'\")) {\n pat = pat.slice(1, -1);\n }\n // If pattern is empty after processing, skip\n if (!pat) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" has an empty pattern. ` +\n `Provide a non-empty search pattern. Example: grep 'function' src/`\n ));\n return;\n }\n commands.push({ name: 'grep', arguments: { pattern: pat, path: parts[2] } });\n }\n\n private handleRead(parts: string[], rawLine: string, commands: ToolCall[]) {\n // read <path>[:start-end]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: read <path> or read <path>:<start>-<end>. Example: read src/index.ts:1-50`\n ));\n return;\n }\n const spec = parts[1];\n const rangeIdx = spec.indexOf(':');\n if (rangeIdx === -1) {\n commands.push({ name: 'read', arguments: { path: spec } });\n return;\n }\n const filePath = spec.slice(0, rangeIdx);\n const range = spec.slice(rangeIdx + 1);\n const [s, e] = range.split('-').map(v => parseInt(v, 10));\n // If range is invalid, fallback to reading the whole file\n if (!Number.isFinite(s) || !Number.isFinite(e)) {\n commands.push({ name: 'read', arguments: { path: filePath } });\n return;\n }\n commands.push({ name: 'read', arguments: { path: filePath, start: s, end: e } });\n }\n\n private handleFinish(parts: string[], rawLine: string, commands: ToolCall[], acc: Map<string, number[][]> | null) {\n // finish file1:1-10,20-30 file2:5-7\n const map = acc ?? new Map<string, number[][]>();\n const args = parts.slice(1);\n for (const token of args) {\n const [filePath, rangesText] = token.split(':', 2);\n if (!filePath || !rangesText) {\n // Skip this malformed token, continue processing others\n commands.push(this.skip(\n `[SKIPPED] Invalid finish token \"${token}\". ` +\n `Correct format: finish <path>:<start>-<end>. Example: finish src/index.ts:1-50`\n ));\n continue;\n }\n const rangeSpecs = rangesText.split(',').filter(Boolean);\n for (const spec of rangeSpecs) {\n const [s, e] = spec.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {\n // Skip this invalid range, continue with others\n commands.push(this.skip(\n `[SKIPPED] Invalid range \"${spec}\" in \"${token}\". ` +\n `Ranges must be <start>-<end> where start <= end. Example: 1-50`\n ));\n continue;\n }\n const arr = map.get(filePath) ?? [];\n arr.push([s, e]);\n map.set(filePath, arr);\n }\n }\n return map;\n }\n}\n"],"mappings":";AAGA,IAAM,iBAAiB,CAAC,kBAAkB,QAAQ,QAAQ,QAAQ;AAGlE,SAAS,eAAe,MAAoC;AAC1D,SAAO,eAAe,SAAS,IAAoB;AACrD;AAEA,SAAS,kBAAkB,KAAa,SAAgC;AACtE,QAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,kBAAkB,OAAO,KAAK,GAAG;AACrE,QAAM,QAAQ,IAAI,MAAM,KAAK;AAC7B,SAAO,QAAQ,MAAM,CAAC,EAAE,KAAK,IAAI;AACnC;AAEA,SAAS,oBAAoB,MAA0B;AACrD,QAAM,QAAoB,CAAC;AAI3B,QAAM,YAAY;AAClB,MAAI;AAEJ,UAAQ,QAAQ,UAAU,KAAK,IAAI,OAAO,MAAM;AAC9C,UAAM,cAAc,MAAM,CAAC,EAAE,YAAY;AACzC,UAAM,UAAU,MAAM,CAAC;AAGvB,QAAI,CAAC,eAAe,WAAW,EAAG;AAElC,UAAM,WAAW;AAEjB,QAAI,aAAa,kBAAkB;AACjC,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,YAAM,UAAU,kBAAkB,SAAS,SAAS;AACpD,UAAI,MAAM;AACR,cAAM,KAAK,EAAE,MAAM,kBAAkB,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,MACrE;AAAA,IACF,WAAW,aAAa,QAAQ;AAC9B,YAAM,UAAU,kBAAkB,SAAS,SAAS;AACpD,YAAM,SAAS,kBAAkB,SAAS,SAAS;AACnD,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,UAAI,SAAS;AACX,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,WAAW;AAAA,YACT;AAAA,YACA,MAAM,UAAU;AAAA,YAChB,GAAI,QAAQ,EAAE,KAAK;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,WAAW,aAAa,QAAQ;AAC9B,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,YAAM,WAAW,kBAAkB,SAAS,OAAO;AACnD,UAAI,MAAM;AACR,cAAM,OAAgC,EAAE,KAAK;AAC7C,YAAI,UAAU;AACZ,gBAAM,SAAkC,CAAC;AACzC,qBAAW,YAAY,SAAS,MAAM,GAAG,GAAG;AAC1C,kBAAM,UAAU,SAAS,KAAK;AAC9B,gBAAI,CAAC,QAAS;AACd,kBAAM,CAAC,GAAG,CAAC,IAAI,QAAQ,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,EAAE,KAAK,GAAG,EAAE,CAAC;AACjE,gBAAI,OAAO,SAAS,CAAC,KAAK,OAAO,SAAS,CAAC,GAAG;AAC5C,qBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,YACpB,WAAW,OAAO,SAAS,CAAC,GAAG;AAE7B,qBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,YACpB;AAAA,UACF;AACA,cAAI,OAAO,WAAW,GAAG;AACvB,iBAAK,QAAQ,OAAO,CAAC,EAAE,CAAC;AACxB,iBAAK,MAAM,OAAO,CAAC,EAAE,CAAC;AAAA,UACxB,WAAW,OAAO,SAAS,GAAG;AAC5B,iBAAK,QAAQ;AAAA,UACf;AAAA,QACF;AACA,cAAM,KAAK,EAAE,MAAM,QAAQ,WAAW,KAAK,CAAC;AAAA,MAC9C;AAAA,IACF,WAAW,aAAa,UAAU;AAEhC,YAAM,YAAY;AAClB,YAAM,QAAiE,CAAC;AACxE,UAAI;AAEJ,cAAQ,YAAY,UAAU,KAAK,OAAO,OAAO,MAAM;AACrD,cAAM,cAAc,UAAU,CAAC;AAC/B,cAAM,WAAW,kBAAkB,aAAa,MAAM;AACtD,cAAM,WAAW,kBAAkB,aAAa,OAAO;AAEvD,YAAI,YAAY,UAAU;AACxB,gBAAM,SAAkC,CAAC;AACzC,qBAAW,YAAY,SAAS,MAAM,GAAG,GAAG;AAC1C,gBAAI,SAAS,KAAK,MAAM,KAAK;AAC3B,qBAAO,KAAK,CAAC,GAAG,MAAM,CAAC;AAAA,YACzB,OAAO;AACL,oBAAM,CAAC,GAAG,CAAC,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,EAAE,KAAK,GAAG,EAAE,CAAC;AAClE,kBAAI,OAAO,SAAS,CAAC,KAAK,OAAO,SAAS,CAAC,GAAG;AAC5C,uBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,cACpB;AAAA,YACF;AAAA,UACF;AACA,cAAI,OAAO,SAAS,GAAG;AACrB,kBAAM,KAAK,EAAE,MAAM,UAAU,OAAO,OAAO,CAAC;AAAA,UAC9C;AAAA,QACF;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,MAAM,EAAE,CAAC;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,MAA4D;AAGlF,QAAM,YAAY,KACf,QAAQ,8BAA8B,EAAE,EACxC,QAAQ,oBAAoB,EAAE;AACjC,QAAM,cAAc,oBAAoB,SAAS;AAGjD,QAAM,gBAA0B,CAAC;AACjC,QAAM,WAAW,UAAU,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAC3D,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAM;AACX,QAAI,KAAK,WAAW,GAAG,EAAG;AAE1B,UAAM,YAAY,KAAK,MAAM,IAAI,EAAE,CAAC;AACpC,QAAI,eAAe,SAAS,SAAyB,GAAG;AACtD,UAAI,CAAC,cAAc,SAAS,IAAI,GAAG;AACjC,sBAAc,KAAK,IAAI;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,eAAe,YAAY;AAC7C;AAEO,IAAM,oBAAN,MAAwB;AAAA,EACZ,oBAAoB;AAAA,EAErC,MAAM,MAA0B;AAC9B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,UAAU,gCAAgC;AAAA,IACtD;AAGA,UAAM,EAAE,OAAO,YAAY,IAAI,eAAe,IAAI;AAGlD,UAAM,WAAuB,CAAC,GAAG,WAAW;AAC5C,QAAI,oBAAoD;AAExD,UAAM,QAAQ,CAAC,SAAS;AACtB,UAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AACnC,YAAM,QAAQ,KAAK,UAAU,IAAI;AACjC,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,MAAM,MAAM,CAAC;AAEnB,cAAQ,KAAK;AAAA,QACX,KAAK;AACH,eAAK,oBAAoB,OAAO,MAAM,QAAQ;AAC9C;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,8BAAoB,KAAK,aAAa,OAAO,MAAM,UAAU,iBAAiB;AAC9E;AAAA,QACF;AAEE;AAAA,MACJ;AAAA,IACF,CAAC;AAED,QAAI,mBAAmB;AACrB,YAAM,MAAM;AACZ,YAAM,UAAU,CAAC,GAAG,IAAI,QAAQ,CAAC;AACjC,YAAM,eAAe,QAAQ,IAAI,CAAC,CAAC,MAAM,MAAM,OAAO;AAAA,QACpD;AAAA,QACA,OAAO,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAAA,MAC/C,EAAE;AACF,eAAS,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,OAAO,aAAa,EAAE,CAAC;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAwB;AAExC,UAAM,QAAkB,CAAC;AACzB,QAAI,UAAU;AACd,QAAI,WAAW;AACf,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,KAAK,KAAK,CAAC;AACjB,UAAI,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,MAAM;AACtC,mBAAW,CAAC;AACZ,mBAAW;AAAA,MACb,WAAW,CAAC,YAAY,KAAK,KAAK,EAAE,GAAG;AACrC,YAAI,SAAS;AACX,gBAAM,KAAK,OAAO;AAClB,oBAAU;AAAA,QACZ;AAAA,MACF,OAAO;AACL,mBAAW;AAAA,MACb;AAAA,IACF;AACA,QAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,KAAK,SAA2B;AACtC,WAAO,EAAE,MAAM,SAAS,WAAW,EAAE,QAAQ,EAAE;AAAA,EACjD;AAAA,EAEQ,oBAAoB,OAAiB,SAAiB,UAAsB;AAElF,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,MAAM,CAAC,GAAG,QAAQ,UAAU,EAAE,KAAK;AACnD,aAAS,KAAK,EAAE,MAAM,kBAAkB,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,EACxE;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,QAAI,MAAM,MAAM,CAAC;AAEjB,QAAI,IAAI,WAAW,GAAG,KAAK,IAAI,SAAS,GAAG,GAAG;AAC5C,YAAM,IAAI,MAAM,GAAG,EAAE;AAAA,IACvB;AAEA,QAAI,CAAC,KAAK;AACR,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,SAAS,KAAK,MAAM,MAAM,CAAC,EAAE,EAAE,CAAC;AAAA,EAC7E;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,QAAI,aAAa,IAAI;AACnB,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,KAAK,EAAE,CAAC;AACzD;AAAA,IACF;AACA,UAAM,WAAW,KAAK,MAAM,GAAG,QAAQ;AACvC,UAAM,QAAQ,KAAK,MAAM,WAAW,CAAC;AACrC,UAAM,CAAC,GAAG,CAAC,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AAExD,QAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,GAAG;AAC9C,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,SAAS,EAAE,CAAC;AAC7D;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,UAAU,OAAO,GAAG,KAAK,EAAE,EAAE,CAAC;AAAA,EACjF;AAAA,EAEQ,aAAa,OAAiB,SAAiB,UAAsB,KAAqC;AAEhH,UAAM,MAAM,OAAO,oBAAI,IAAwB;AAC/C,UAAM,OAAO,MAAM,MAAM,CAAC;AAC1B,eAAW,SAAS,MAAM;AACxB,YAAM,CAAC,UAAU,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AACjD,UAAI,CAAC,YAAY,CAAC,YAAY;AAE5B,iBAAS,KAAK,KAAK;AAAA,UACjB,mCAAmC,KAAK;AAAA,QAE1C,CAAC;AACD;AAAA,MACF;AACA,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,OAAO,OAAO;AACvD,iBAAW,QAAQ,YAAY;AAC7B,cAAM,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACvD,YAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,KAAK,IAAI,GAAG;AAEvD,mBAAS,KAAK,KAAK;AAAA,YACjB,4BAA4B,IAAI,SAAS,KAAK;AAAA,UAEhD,CAAC;AACD;AAAA,QACF;AACA,cAAM,MAAM,IAAI,IAAI,QAAQ,KAAK,CAAC;AAClC,YAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACf,YAAI,IAAI,UAAU,GAAG;AAAA,MACvB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../tools/warp_grep/utils/ripgrep.ts"],"sourcesContent":["import { spawn } from 'child_process';\n\nexport type ExecResult = { stdout: string; stderr: string; exitCode: number };\n\n// Cache the working ripgrep path to avoid repeated fallback checks\nlet resolvedRgPath: string | null = null;\nlet rgPathChecked = false;\n\n// Lazily load the bundled ripgrep path (optional dependency)\nlet bundledRgPath: string | null = null;\nasync function getBundledRgPath(): Promise<string | null> {\n if (bundledRgPath !== null) return bundledRgPath;\n try {\n const module = await import('@vscode/ripgrep');\n bundledRgPath = module.rgPath;\n return bundledRgPath;\n } catch {\n bundledRgPath = '';\n return null;\n }\n}\n\nfunction spawnRg(rgBinary: string, args: string[], opts?: { cwd?: string; env?: NodeJS.ProcessEnv }): Promise<ExecResult> {\n return new Promise((resolve) => {\n const child = spawn(rgBinary, args, {\n cwd: opts?.cwd,\n env: { ...process.env, ...(opts?.env || {}) },\n stdio: ['ignore', 'pipe', 'pipe'],\n });\n let stdout = '';\n let stderr = '';\n child.stdout.on('data', (d) => (stdout += d.toString()));\n child.stderr.on('data', (d) => (stderr += d.toString()));\n child.on('close', (code) => {\n resolve({ stdout, stderr, exitCode: typeof code === 'number' ? code : -1 });\n });\n child.on('error', () => {\n resolve({ stdout: '', stderr: 'Failed to spawn ripgrep.', exitCode: -1 });\n });\n });\n}\n\n// Check if a result indicates a binary compatibility failure (not a normal rg error)\nfunction isBinaryFailure(result: ExecResult): boolean {\n if (result.exitCode === -1) return true;\n if (result.stderr.includes('jemalloc') || result.stderr.includes('Unsupported system page size')) return true;\n if (result.exitCode === 134) return true;\n return false;\n}\n\nexport async function runRipgrep(args: string[], opts?: { cwd?: string; env?: NodeJS.ProcessEnv }): Promise<ExecResult> {\n // If we've already determined the working path, use it\n if (rgPathChecked && resolvedRgPath) {\n return spawnRg(resolvedRgPath, args, opts);\n }\n\n // First attempt: bundled ripgrep (if available)\n if (!rgPathChecked) {\n const bundledPath = await getBundledRgPath();\n \n if (bundledPath) {\n const result = await spawnRg(bundledPath, args, opts);\n \n if (!isBinaryFailure(result)) {\n resolvedRgPath = bundledPath;\n rgPathChecked = true;\n return result;\n }\n }\n\n // Bundled binary not available or failed, try system rg\n const fallbackResult = await spawnRg('rg', args, opts);\n \n if (!isBinaryFailure(fallbackResult)) {\n resolvedRgPath = 'rg';\n rgPathChecked = true;\n return fallbackResult;\n }\n\n // Neither works, mark as checked and return the original error\n rgPathChecked = true;\n return { \n stdout: '', \n stderr: 'Failed to spawn ripgrep. Neither bundled nor system rg is available.', \n exitCode: -1 \n };\n }\n\n // rgPathChecked is true but resolvedRgPath is null - no working rg found\n return { \n stdout: '', \n stderr: 'Failed to spawn ripgrep. Neither bundled nor system rg is available.', \n exitCode: -1 \n };\n}\n"],"mappings":";AAAA,SAAS,aAAa;AAKtB,IAAI,iBAAgC;AACpC,IAAI,gBAAgB;AAGpB,IAAI,gBAA+B;AACnC,eAAe,mBAA2C;AACxD,MAAI,kBAAkB,KAAM,QAAO;AACnC,MAAI;AACF,UAAM,SAAS,MAAM,OAAO,mBAAiB;AAC7C,oBAAgB,OAAO;AACvB,WAAO;AAAA,EACT,QAAQ;AACN,oBAAgB;AAChB,WAAO;AAAA,EACT;AACF;AAEA,SAAS,QAAQ,UAAkB,MAAgB,MAAuE;AACxH,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,QAAQ,MAAM,UAAU,MAAM;AAAA,MAClC,KAAK,MAAM;AAAA,MACX,KAAK,EAAE,GAAG,QAAQ,KAAK,GAAI,MAAM,OAAO,CAAC,EAAG;AAAA,MAC5C,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,IAClC,CAAC;AACD,QAAI,SAAS;AACb,QAAI,SAAS;AACb,UAAM,OAAO,GAAG,QAAQ,CAAC,MAAO,UAAU,EAAE,SAAS,CAAE;AACvD,UAAM,OAAO,GAAG,QAAQ,CAAC,MAAO,UAAU,EAAE,SAAS,CAAE;AACvD,UAAM,GAAG,SAAS,CAAC,SAAS;AAC1B,cAAQ,EAAE,QAAQ,QAAQ,UAAU,OAAO,SAAS,WAAW,OAAO,GAAG,CAAC;AAAA,IAC5E,CAAC;AACD,UAAM,GAAG,SAAS,MAAM;AACtB,cAAQ,EAAE,QAAQ,IAAI,QAAQ,4BAA4B,UAAU,GAAG,CAAC;AAAA,IAC1E,CAAC;AAAA,EACH,CAAC;AACH;AAGA,SAAS,gBAAgB,QAA6B;AACpD,MAAI,OAAO,aAAa,GAAI,QAAO;AACnC,MAAI,OAAO,OAAO,SAAS,UAAU,KAAK,OAAO,OAAO,SAAS,8BAA8B,EAAG,QAAO;AACzG,MAAI,OAAO,aAAa,IAAK,QAAO;AACpC,SAAO;AACT;AAEA,eAAsB,WAAW,MAAgB,MAAuE;AAEtH,MAAI,iBAAiB,gBAAgB;AACnC,WAAO,QAAQ,gBAAgB,MAAM,IAAI;AAAA,EAC3C;AAGA,MAAI,CAAC,eAAe;AAClB,UAAM,cAAc,MAAM,iBAAiB;AAE3C,QAAI,aAAa;AACf,YAAM,SAAS,MAAM,QAAQ,aAAa,MAAM,IAAI;AAEpD,UAAI,CAAC,gBAAgB,MAAM,GAAG;AAC5B,yBAAiB;AACjB,wBAAgB;AAChB,eAAO;AAAA,MACT;AAAA,IACF;AAGA,UAAM,iBAAiB,MAAM,QAAQ,MAAM,MAAM,IAAI;AAErD,QAAI,CAAC,gBAAgB,cAAc,GAAG;AACpC,uBAAiB;AACjB,sBAAgB;AAChB,aAAO;AAAA,IACT;AAGA,oBAAgB;AAChB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,UAAU;AAAA,EACZ;AACF;","names":[]}
|
package/dist/chunk-5FUTL2UF.js
DELETED
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
var __defProp = Object.defineProperty;
|
|
2
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
3
|
-
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
4
|
-
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
5
|
-
}) : x)(function(x) {
|
|
6
|
-
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
7
|
-
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
8
|
-
});
|
|
9
|
-
var __commonJS = (cb, mod) => function __require2() {
|
|
10
|
-
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
11
|
-
};
|
|
12
|
-
var __export = (target, all) => {
|
|
13
|
-
for (var name in all)
|
|
14
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
15
|
-
};
|
|
16
|
-
|
|
17
|
-
export {
|
|
18
|
-
__require,
|
|
19
|
-
__commonJS,
|
|
20
|
-
__export
|
|
21
|
-
};
|
|
22
|
-
//# sourceMappingURL=chunk-5FUTL2UF.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../tools/warp_grep/agent/runner.ts"],"sourcesContent":["import { AGENT_CONFIG, DEFAULT_MODEL } from './config.js';\nimport { getSystemPrompt } from './prompt.js';\nimport type { AgentRunResult, ChatMessage, SessionConfig, AgentFinish } from './types.js';\nimport { LLMResponseParser } from './parser.js';\nimport type { WarpGrepProvider } from '../providers/types.js';\nimport { toolGrep } from './tools/grep.js';\nimport { toolRead } from './tools/read.js';\nimport { toolListDirectory } from './tools/list_directory.js';\nimport { readFinishFiles } from './tools/finish.js';\nimport { fetchWithRetry, withTimeout, type RetryConfig } from '../../utils/resilience.js';\nimport { formatAgentToolOutput } from './formatter.js';\nimport { formatTurnMessage, calculateContextBudget, buildInitialState, enforceContextLimit } from './helpers.js';\nimport path from 'path';\n\ntype EventName =\n | 'initial_state'\n | 'round_start'\n | 'round_end'\n | 'finish'\n | 'error';\n\nexport type EventCallback = (name: EventName, payload: Record<string, unknown>) => void;\n\nconst parser = new LLMResponseParser();\n\nconst DEFAULT_API_URL = 'https://api.morphllm.com';\n\ninterface CallModelOptions {\n morphApiKey?: string;\n morphApiUrl?: string;\n retryConfig?: RetryConfig;\n}\n\nasync function callModel(\n messages: ChatMessage[],\n model: string,\n options: CallModelOptions = {}\n): Promise<string> {\n const baseUrl = DEFAULT_API_URL;\n const apiKey = options.morphApiKey || process.env.MORPH_API_KEY || '';\n \n const fetchPromise = fetchWithRetry(\n `${baseUrl}/v1/chat/completions`,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n model,\n temperature: 0.0,\n max_tokens: 1024,\n messages,\n }),\n },\n options.retryConfig\n );\n const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, 'morph-warp-grep request timed out');\n if (!resp.ok) {\n if (resp.status === 404) {\n throw new Error(\n 'The endpoint you are trying to call is likely deprecated. Please update with: npm cache clean --force && npx -y @morphllm/morphmcp@latest or visit: https://morphllm.com/mcp'\n );\n }\n // keeping these cases are real throws, if this happens retry will likely not help, so best we just throw here, notice the error and fix\n const t = await resp.text();\n throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);\n }\n const data = await resp.json();\n const content = data?.choices?.[0]?.message?.content;\n if (!content || typeof content !== 'string') {\n throw new Error('Invalid response from model');\n }\n return content;\n}\n\nexport async function runWarpGrep(config: SessionConfig & { provider: WarpGrepProvider }): Promise<AgentRunResult> {\n const repoRoot = path.resolve(config.repoRoot || process.cwd());\n const messages: ChatMessage[] = [];\n\n messages.push({ role: 'system' as const, content: getSystemPrompt() });\n const initialState = await buildInitialState(repoRoot, config.query, config.provider);\n messages.push({ role: 'user', content: initialState });\n\n const maxTurns = AGENT_CONFIG.MAX_TURNS;\n const model = config.model || DEFAULT_MODEL;\n const provider = config.provider;\n const errors: Array<{ message: string }> = [];\n\n let finishMeta: AgentFinish | undefined;\n let terminationReason: AgentRunResult['terminationReason'] = 'terminated';\n\n for (let turn = 1; turn <= maxTurns; turn += 1) {\n // Enforce hard context limit before calling model\n enforceContextLimit(messages);\n \n // call model\n const assistantContent = await callModel(messages, model, {\n morphApiKey: config.morphApiKey,\n morphApiUrl: config.morphApiUrl,\n retryConfig: config.retryConfig,\n }).catch((e: unknown) => {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n return '';\n });\n if (!assistantContent) break;\n messages.push({ role: 'assistant', content: assistantContent });\n\n // parse tool calls (no longer throws - returns _skip calls for malformed commands)\n const toolCalls = parser.parse(assistantContent);\n if (toolCalls.length === 0) {\n errors.push({ message: 'No tool calls produced by the model. Your MCP is likely out of date! Update it by running: npm cache clean --force && npx -y @morphllm/morphmcp@latest' });\n terminationReason = 'terminated';\n break;\n }\n\n const finishCalls = toolCalls.filter(c => c.name === 'finish');\n const grepCalls = toolCalls.filter(c => c.name === 'grep');\n const listDirCalls = toolCalls.filter(c => c.name === 'list_directory');\n const readCalls = toolCalls.filter(c => c.name === 'read');\n const skipCalls = toolCalls.filter(c => c.name === '_skip');\n\n const formatted: string[] = [];\n\n // Surface any skipped commands as feedback to the LLM\n for (const c of skipCalls) {\n const msg = (c.arguments as { message?: string })?.message || 'Command skipped due to parsing error';\n formatted.push(msg);\n }\n\n const allPromises: Array<Promise<string>> = [];\n \n for (const c of grepCalls) {\n const args = (c.arguments ?? {}) as { pattern: string; path: string; glob?: string };\n allPromises.push(\n toolGrep(provider, args).then(\n ({ output }) => formatAgentToolOutput('grep', args, output, { isError: false }),\n err => formatAgentToolOutput('grep', args, String(err), { isError: true })\n )\n );\n }\n \n for (const c of listDirCalls) {\n const args = (c.arguments ?? {}) as { path: string; pattern?: string | null };\n allPromises.push(\n toolListDirectory(provider, args).then(\n p => formatAgentToolOutput('list_directory', args, p, { isError: false }),\n err => formatAgentToolOutput('list_directory', args, String(err), { isError: true })\n )\n );\n }\n \n for (const c of readCalls) {\n const args = (c.arguments ?? {}) as { path: string; start?: number; end?: number; lines?: Array<[number, number]> };\n allPromises.push(\n toolRead(provider, args).then(\n p => formatAgentToolOutput('read', args, p, { isError: false }),\n err => formatAgentToolOutput('read', args, String(err), { isError: true })\n )\n );\n }\n \n const allResults = await Promise.all(allPromises);\n for (const result of allResults) {\n formatted.push(result);\n }\n\n if (formatted.length > 0) {\n const turnMessage = formatTurnMessage(turn, maxTurns);\n const contextBudget = calculateContextBudget(messages);\n messages.push({ role: 'user', content: formatted.join('\\n') + turnMessage + '\\n' + contextBudget });\n }\n\n if (finishCalls.length) {\n const fc = finishCalls[0];\n const files = ((fc.arguments as any)?.files ?? []) as AgentFinish['files'];\n finishMeta = { files };\n terminationReason = 'completed';\n break;\n }\n }\n\n if (terminationReason !== 'completed' || !finishMeta) {\n return { terminationReason, messages, errors };\n }\n\n // Build finish payload\n const parts: string[] = ['Relevant context found:'];\n for (const f of finishMeta.files) {\n const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(', ');\n parts.push(`- ${f.path}: ${ranges}`);\n }\n const payload = parts.join('\\n');\n\n // Resolve file contents for returned ranges\n // Wrap reader in try-catch to handle non-existent or unreadable files gracefully\n // Track files that couldn't be read for error reporting\n const fileReadErrors: Array<{ path: string; error: string }> = [];\n const resolved = await readFinishFiles(\n repoRoot,\n finishMeta.files,\n async (p: string, s: number, e: number) => {\n try {\n const rr = await provider.read({ path: p, start: s, end: e });\n // rr.lines are \"line|content\" → strip the \"line|\" prefix\n return rr.lines.map(l => {\n const idx = l.indexOf('|');\n return idx >= 0 ? l.slice(idx + 1) : l;\n });\n } catch (err) {\n // File doesn't exist or can't be read - log error but don't throw\n // This handles cases where the agent hallucinated a path or the file was deleted\n const errorMsg = err instanceof Error ? err.message : String(err);\n fileReadErrors.push({ path: p, error: errorMsg });\n console.error(`[warp_grep] Failed to read file: ${p} - ${errorMsg}`);\n return [`[couldn't find: ${p}]`];\n }\n }\n );\n\n // Add file read errors to the result so MCP can report them\n if (fileReadErrors.length > 0) {\n errors.push(...fileReadErrors.map(e => ({ message: `File read error: ${e.path} - ${e.error}` })));\n }\n\n return {\n terminationReason: 'completed',\n messages,\n finish: { payload, metadata: finishMeta, resolved },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,OAAO,UAAU;AAWjB,IAAM,SAAS,IAAI,kBAAkB;AAErC,IAAM,kBAAkB;AAQxB,eAAe,UACb,UACA,OACA,UAA4B,CAAC,GACZ;AACjB,QAAM,UAAW;AACjB,QAAM,SAAS,QAAQ,eAAe,QAAQ,IAAI,iBAAiB;AAEnE,QAAM,eAAe;AAAA,IACnB,GAAG,OAAO;AAAA,IACV;AAAA,MACE,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,MAAM;AAAA,MACjC;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,aAAa;AAAA,QACb,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,QAAQ;AAAA,EACV;AACA,QAAM,OAAO,MAAM,YAAY,cAAc,aAAa,YAAY,mCAAmC;AACzG,MAAI,CAAC,KAAK,IAAI;AACZ,QAAI,KAAK,WAAW,KAAK;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,KAAK,KAAK;AAC1B,UAAM,IAAI,MAAM,yBAAyB,KAAK,MAAM,KAAK,CAAC,EAAE;AAAA,EAC9D;AACA,QAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,QAAM,UAAU,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7C,MAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,SAAO;AACT;AAEA,eAAsB,YAAY,QAAiF;AACjH,QAAM,WAAW,KAAK,QAAQ,OAAO,YAAY,QAAQ,IAAI,CAAC;AAC9D,QAAM,WAA0B,CAAC;AAEjC,WAAS,KAAK,EAAE,MAAM,UAAmB,SAAS,gBAAgB,EAAE,CAAC;AACrE,QAAM,eAAe,MAAM,kBAAkB,UAAU,OAAO,OAAO,OAAO,QAAQ;AACpF,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,WAAW,aAAa;AAC9B,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,WAAW,OAAO;AACxB,QAAM,SAAqC,CAAC;AAE5C,MAAI;AACJ,MAAI,oBAAyD;AAE7D,WAAS,OAAO,GAAG,QAAQ,UAAU,QAAQ,GAAG;AAE9C,wBAAoB,QAAQ;AAG5B,UAAM,mBAAmB,MAAM,UAAU,UAAU,OAAO;AAAA,MACxD,aAAa,OAAO;AAAA,MACpB,aAAa,OAAO;AAAA,MACpB,aAAa,OAAO;AAAA,IACtB,CAAC,EAAE,MAAM,CAAC,MAAe;AACvB,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,aAAO;AAAA,IACT,CAAC;AACD,QAAI,CAAC,iBAAkB;AACvB,aAAS,KAAK,EAAE,MAAM,aAAa,SAAS,iBAAiB,CAAC;AAG9D,UAAM,YAAY,OAAO,MAAM,gBAAgB;AAC/C,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,KAAK,EAAE,SAAS,yJAAyJ,CAAC;AACjL,0BAAoB;AACpB;AAAA,IACF;AAEA,UAAM,cAAc,UAAU,OAAO,OAAK,EAAE,SAAS,QAAQ;AAC7D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,eAAe,UAAU,OAAO,OAAK,EAAE,SAAS,gBAAgB;AACtE,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,OAAO;AAE1D,UAAM,YAAsB,CAAC;AAG7B,eAAW,KAAK,WAAW;AACzB,YAAM,MAAO,EAAE,WAAoC,WAAW;AAC9D,gBAAU,KAAK,GAAG;AAAA,IACpB;AAEA,UAAM,cAAsC,CAAC;AAE7C,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,CAAC,EAAE,OAAO,MAAM,sBAAsB,QAAQ,MAAM,QAAQ,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9E,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAEA,eAAW,KAAK,cAAc;AAC5B,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,kBAAkB,UAAU,IAAI,EAAE;AAAA,UAChC,OAAK,sBAAsB,kBAAkB,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UACxE,SAAO,sBAAsB,kBAAkB,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QACrF;AAAA,MACF;AAAA,IACF;AAEA,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,kBAAY;AAAA,QACV,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,OAAK,sBAAsB,QAAQ,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9D,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAa,MAAM,QAAQ,IAAI,WAAW;AAChD,eAAW,UAAU,YAAY;AAC/B,gBAAU,KAAK,MAAM;AAAA,IACvB;AAEA,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,cAAc,kBAAkB,MAAM,QAAQ;AACpD,YAAM,gBAAgB,uBAAuB,QAAQ;AACrD,eAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,UAAU,KAAK,IAAI,IAAI,cAAc,OAAO,cAAc,CAAC;AAAA,IACpG;AAEA,QAAI,YAAY,QAAQ;AACtB,YAAM,KAAK,YAAY,CAAC;AACxB,YAAM,QAAU,GAAG,WAAmB,SAAS,CAAC;AAChD,mBAAa,EAAE,MAAM;AACrB,0BAAoB;AACpB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,sBAAsB,eAAe,CAAC,YAAY;AACpD,WAAO,EAAE,mBAAmB,UAAU,OAAO;AAAA,EAC/C;AAGA,QAAM,QAAkB,CAAC,yBAAyB;AAClD,aAAW,KAAK,WAAW,OAAO;AAChC,UAAM,SAAS,EAAE,MAAM,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,IAAI;AAC7D,UAAM,KAAK,KAAK,EAAE,IAAI,KAAK,MAAM,EAAE;AAAA,EACrC;AACA,QAAM,UAAU,MAAM,KAAK,IAAI;AAK/B,QAAM,iBAAyD,CAAC;AAChE,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA,WAAW;AAAA,IACX,OAAO,GAAW,GAAW,MAAc;AACzC,UAAI;AACF,cAAM,KAAK,MAAM,SAAS,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,KAAK,EAAE,CAAC;AAE5D,eAAO,GAAG,MAAM,IAAI,OAAK;AACvB,gBAAM,MAAM,EAAE,QAAQ,GAAG;AACzB,iBAAO,OAAO,IAAI,EAAE,MAAM,MAAM,CAAC,IAAI;AAAA,QACvC,CAAC;AAAA,MACH,SAAS,KAAK;AAGZ,cAAM,WAAW,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAChE,uBAAe,KAAK,EAAE,MAAM,GAAG,OAAO,SAAS,CAAC;AAChD,gBAAQ,MAAM,oCAAoC,CAAC,MAAM,QAAQ,EAAE;AACnE,eAAO,CAAC,mBAAmB,CAAC,GAAG;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,WAAO,KAAK,GAAG,eAAe,IAAI,QAAM,EAAE,SAAS,oBAAoB,EAAE,IAAI,MAAM,EAAE,KAAK,GAAG,EAAE,CAAC;AAAA,EAClG;AAEA,SAAO;AAAA,IACL,mBAAmB;AAAA,IACnB;AAAA,IACA,QAAQ,EAAE,SAAS,UAAU,YAAY,SAAS;AAAA,EACpD;AACF;","names":[]}
|
package/dist/lib-GEPX5Y3T.js
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
__commonJS,
|
|
3
|
-
__require
|
|
4
|
-
} from "./chunk-5FUTL2UF.js";
|
|
5
|
-
|
|
6
|
-
// node_modules/@vscode/ripgrep/lib/index.js
|
|
7
|
-
var require_lib = __commonJS({
|
|
8
|
-
"node_modules/@vscode/ripgrep/lib/index.js"(exports, module) {
|
|
9
|
-
var path = __require("path");
|
|
10
|
-
module.exports.rgPath = path.join(__dirname, `../bin/rg${process.platform === "win32" ? ".exe" : ""}`);
|
|
11
|
-
}
|
|
12
|
-
});
|
|
13
|
-
export default require_lib();
|
|
14
|
-
//# sourceMappingURL=lib-GEPX5Y3T.js.map
|
package/dist/lib-GEPX5Y3T.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../node_modules/@vscode/ripgrep/lib/index.js"],"sourcesContent":["'use strict';\n\nconst path = require('path');\n\nmodule.exports.rgPath = path.join(__dirname, `../bin/rg${process.platform === 'win32' ? '.exe' : ''}`);"],"mappings":";;;;;;AAAA;AAAA;AAEA,QAAM,OAAO,UAAQ,MAAM;AAE3B,WAAO,QAAQ,SAAS,KAAK,KAAK,WAAW,YAAY,QAAQ,aAAa,UAAU,SAAS,EAAE,EAAE;AAAA;AAAA;","names":[]}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|