@morphllm/morphsdk 0.2.54 → 0.2.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-IFVROB4L.js → chunk-37SRI4GW.js} +6 -7
- package/dist/chunk-37SRI4GW.js.map +1 -0
- package/dist/{chunk-FB3E5BRY.js → chunk-BSHJGJ25.js} +3 -3
- package/dist/{chunk-KVMZPOUT.js → chunk-C6QQL6FX.js} +5 -5
- package/dist/{chunk-D5ZSGQT4.js → chunk-KO6JQFRE.js} +2 -2
- package/dist/{chunk-PYTBBWL6.js → chunk-X5HNQ7SB.js} +3 -3
- package/dist/{chunk-4HOUN5TW.js → chunk-ZWY434TS.js} +3 -3
- package/dist/client.cjs +12 -13
- package/dist/client.cjs.map +1 -1
- package/dist/client.js +6 -6
- package/dist/index.cjs +12 -13
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +6 -6
- package/dist/tools/warp_grep/agent/runner.cjs +5 -6
- package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
- package/dist/tools/warp_grep/agent/runner.js +1 -1
- package/dist/tools/warp_grep/anthropic.cjs +12 -13
- package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
- package/dist/tools/warp_grep/anthropic.js +3 -3
- package/dist/tools/warp_grep/index.cjs +12 -13
- package/dist/tools/warp_grep/index.cjs.map +1 -1
- package/dist/tools/warp_grep/index.js +5 -5
- package/dist/tools/warp_grep/openai.cjs +12 -13
- package/dist/tools/warp_grep/openai.cjs.map +1 -1
- package/dist/tools/warp_grep/openai.js +3 -3
- package/dist/tools/warp_grep/vercel.cjs +12 -13
- package/dist/tools/warp_grep/vercel.cjs.map +1 -1
- package/dist/tools/warp_grep/vercel.js +3 -3
- package/package.json +1 -1
- package/dist/chunk-IFVROB4L.js.map +0 -1
- /package/dist/{chunk-FB3E5BRY.js.map → chunk-BSHJGJ25.js.map} +0 -0
- /package/dist/{chunk-KVMZPOUT.js.map → chunk-C6QQL6FX.js.map} +0 -0
- /package/dist/{chunk-D5ZSGQT4.js.map → chunk-KO6JQFRE.js.map} +0 -0
- /package/dist/{chunk-PYTBBWL6.js.map → chunk-X5HNQ7SB.js.map} +0 -0
- /package/dist/{chunk-4HOUN5TW.js.map → chunk-ZWY434TS.js.map} +0 -0
|
@@ -32,13 +32,12 @@ import {
|
|
|
32
32
|
|
|
33
33
|
// tools/warp_grep/agent/runner.ts
|
|
34
34
|
import path from "path";
|
|
35
|
-
import fs from "fs/promises";
|
|
36
35
|
var parser = new LLMResponseParser();
|
|
37
|
-
async function buildInitialState(repoRoot, query) {
|
|
36
|
+
async function buildInitialState(repoRoot, query, provider) {
|
|
38
37
|
try {
|
|
39
|
-
const entries = await
|
|
40
|
-
const dirs = entries.filter((e) => e.
|
|
41
|
-
const files = entries.filter((e) => e.
|
|
38
|
+
const entries = await provider.analyse({ path: ".", maxResults: 100 });
|
|
39
|
+
const dirs = entries.filter((e) => e.type === "dir").map((d) => d.name).slice(0, 50);
|
|
40
|
+
const files = entries.filter((e) => e.type === "file").map((f) => f.name).slice(0, 50);
|
|
42
41
|
const parts = [
|
|
43
42
|
`<repo_root>${repoRoot}</repo_root>`,
|
|
44
43
|
`<top_dirs>${dirs.join(", ")}</top_dirs>`,
|
|
@@ -87,7 +86,7 @@ async function runWarpGrep(config) {
|
|
|
87
86
|
messages.push(systemMessage);
|
|
88
87
|
const queryContent = `<query>${config.query}</query>`;
|
|
89
88
|
messages.push({ role: "user", content: queryContent });
|
|
90
|
-
const initialState = await buildInitialState(repoRoot, config.query);
|
|
89
|
+
const initialState = await buildInitialState(repoRoot, config.query, config.provider);
|
|
91
90
|
messages.push({ role: "user", content: initialState });
|
|
92
91
|
const maxRounds = AGENT_CONFIG.MAX_ROUNDS;
|
|
93
92
|
const model = config.model || DEFAULT_MODEL;
|
|
@@ -232,4 +231,4 @@ async function runWarpGrep(config) {
|
|
|
232
231
|
export {
|
|
233
232
|
runWarpGrep
|
|
234
233
|
};
|
|
235
|
-
//# sourceMappingURL=chunk-
|
|
234
|
+
//# sourceMappingURL=chunk-37SRI4GW.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/agent/runner.ts"],"sourcesContent":["import { AGENT_CONFIG, DEFAULT_MODEL } from './config.js';\nimport { getSystemPrompt } from './prompt.js';\nimport type { AgentRunResult, ChatMessage, SessionConfig, ToolCall, AgentFinish } from './types.js';\nimport { LLMResponseParser } from './parser.js';\nimport type { WarpGrepProvider } from '../providers/types.js';\nimport { toolRead } from '../tools/read.js';\nimport { toolAnalyse } from '../tools/analyse.js';\nimport { fetchWithRetry, withTimeout } from '../../utils/resilience.js';\nimport { formatAgentToolOutput } from './formatter.js';\nimport { GrepState, parseAndFilterGrepOutput, formatTurnGrepOutput } from './grep_helpers.js';\nimport { readFinishFiles } from '../tools/finish.js';\nimport path from 'path';\n\ntype EventName =\n | 'initial_state'\n | 'round_start'\n | 'round_end'\n | 'finish'\n | 'error';\n\nexport type EventCallback = (name: EventName, payload: Record<string, unknown>) => void;\n\nconst parser = new LLMResponseParser();\n\nasync function buildInitialState(repoRoot: string, query: string, provider: WarpGrepProvider): Promise<string> {\n // Summarize top-level directories and file counts using the provider\n // This works for both local and remote filesystems (Modal, E2B, etc.)\n try {\n const entries = await provider.analyse({ path: '.', maxResults: 100 });\n const dirs = entries.filter(e => e.type === 'dir').map(d => d.name).slice(0, 50);\n const files = entries.filter(e => e.type === 'file').map(f => f.name).slice(0, 50);\n const parts = [\n `<repo_root>${repoRoot}</repo_root>`,\n `<top_dirs>${dirs.join(', ')}</top_dirs>`,\n `<top_files>${files.join(', ')}</top_files>`,\n ];\n return parts.join('\\n');\n } catch {\n return `<repo_root>${repoRoot}</repo_root>`;\n }\n}\n\nfunction formatAssistantToolBlock(name: string, args: Record<string, unknown>, payload: string, isError = false): string {\n const argStr = Object.entries(args)\n .map(([k, v]) => `${k}=${JSON.stringify(v)}`)\n .join(' ');\n const prefix = isError ? 'error' : 'result';\n return `<${prefix} name=\"${name}\" ${argStr}>\\n${payload}\\n</${prefix}>`;\n}\n\nasync function callModel(messages: ChatMessage[], model: string, apiKey?: string): Promise<string> {\n const api = 'https://api.morphllm.com/v1/chat/completions';\n const fetchPromise = fetchWithRetry(\n api,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey || process.env.MORPH_API_KEY || ''}`,\n },\n body: JSON.stringify({\n model,\n temperature: 0.0,\n max_tokens: 1024,\n messages,\n }),\n },\n {}\n );\n const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, 'morph-warp-grep request timed out');\n if (!resp.ok) {\n // keeping these cases are real throws, if this happens retry will likely not help, so best we just throw here, notice the error and fix\n const t = await resp.text();\n throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);\n }\n const data = await resp.json();\n const content = data?.choices?.[0]?.message?.content;\n if (!content || typeof content !== 'string') {\n throw new Error('Invalid response from model');\n }\n return content;\n}\n\nexport async function runWarpGrep(config: SessionConfig & { provider: WarpGrepProvider }): Promise<AgentRunResult> {\n const repoRoot = path.resolve(config.repoRoot || process.cwd());\n const messages: ChatMessage[] = [];\n\n // system\n const systemMessage = { role: 'system' as const, content: getSystemPrompt() };\n messages.push(systemMessage);\n // user query\n const queryContent = `<query>${config.query}</query>`;\n messages.push({ role: 'user', content: queryContent });\n // initial state\n const initialState = await buildInitialState(repoRoot, config.query, config.provider);\n messages.push({ role: 'user', content: initialState });\n\n const maxRounds = AGENT_CONFIG.MAX_ROUNDS;\n const model = config.model || DEFAULT_MODEL;\n const provider = config.provider;\n const errors: Array<{ message: string }> = [];\n const grepState = new GrepState();\n\n let finishMeta: AgentFinish | undefined;\n let terminationReason: AgentRunResult['terminationReason'] = 'terminated';\n\n for (let round = 1; round <= maxRounds; round += 1) {\n // call model\n const assistantContent = await callModel(messages, model, config.apiKey).catch((e: unknown) => {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n return '';\n });\n if (!assistantContent) break;\n messages.push({ role: 'assistant', content: assistantContent });\n\n // parse tool calls (no longer throws - returns _skip calls for malformed commands)\n const toolCalls = parser.parse(assistantContent);\n if (toolCalls.length === 0) {\n errors.push({ message: 'No tool calls produced by the model.' });\n terminationReason = 'terminated';\n break;\n }\n\n const finishCalls = toolCalls.filter(c => c.name === 'finish');\n const grepCalls = toolCalls.filter(c => c.name === 'grep');\n const analyseCalls = toolCalls.filter(c => c.name === 'analyse');\n const readCalls = toolCalls.filter(c => c.name === 'read');\n const skipCalls = toolCalls.filter(c => c.name === '_skip');\n\n const formatted: string[] = [];\n\n // Surface any skipped commands as feedback to the LLM\n for (const c of skipCalls) {\n const msg = (c.arguments as { message?: string })?.message || 'Command skipped due to parsing error';\n formatted.push(msg);\n }\n\n // Execute non-grep tools in parallel\n const otherPromises: Array<Promise<string>> = [];\n for (const c of analyseCalls) {\n const args = (c.arguments ?? {}) as { path: string; pattern?: string | null };\n otherPromises.push(\n toolAnalyse(provider, args).then(\n p => formatAgentToolOutput('analyse', args, p, { isError: false }),\n err => formatAgentToolOutput('analyse', args, String(err), { isError: true })\n )\n );\n }\n for (const c of readCalls) {\n const args = (c.arguments ?? {}) as { path: string; start?: number; end?: number };\n otherPromises.push(\n toolRead(provider, args).then(\n p => formatAgentToolOutput('read', args, p, { isError: false }),\n err => formatAgentToolOutput('read', args, String(err), { isError: true })\n )\n );\n }\n const otherResults = await Promise.all(otherPromises);\n formatted.push(...otherResults);\n\n // Execute grep calls sequentially like MCP runner to keep outputs compact\n for (const c of grepCalls) {\n const args = (c.arguments ?? {}) as { pattern: string; path: string };\n try {\n const grepRes = await provider.grep({ pattern: args.pattern, path: args.path });\n \n // Check for ripgrep availability error - terminate early with clear message\n if (grepRes.error) {\n errors.push({ message: grepRes.error });\n terminationReason = 'terminated';\n // Return immediately with the error so user knows to install ripgrep\n return {\n terminationReason: 'terminated',\n messages,\n errors,\n };\n }\n \n const rawOutput = Array.isArray(grepRes.lines) ? grepRes.lines.join('\\n') : '';\n const newMatches = parseAndFilterGrepOutput(rawOutput, grepState);\n let formattedPayload = formatTurnGrepOutput(newMatches);\n if (formattedPayload === \"No new matches found.\") {\n formattedPayload = \"no new matches\";\n }\n formatted.push(formatAgentToolOutput('grep', args, formattedPayload, { isError: false }));\n } catch (err) {\n formatted.push(formatAgentToolOutput('grep', args, String(err), { isError: true }));\n }\n }\n\n if (formatted.length > 0) {\n // Add turn counter message\n const turnsUsed = round;\n const turnsRemaining = 4 - turnsUsed;\n let turnMessage: string;\n if (turnsRemaining === 0) {\n turnMessage = `\\n\\n[Turn ${turnsUsed}/4] This is your LAST turn. You MUST call the finish tool now.`;\n } else if (turnsRemaining === 1) {\n turnMessage = `\\n\\n[Turn ${turnsUsed}/4] You have 1 turn remaining. Next turn you MUST call the finish tool.`;\n } else {\n turnMessage = `\\n\\n[Turn ${turnsUsed}/4] You have ${turnsRemaining} turns remaining.`;\n }\n messages.push({ role: 'user', content: formatted.join('\\n') + turnMessage });\n }\n\n if (finishCalls.length) {\n const fc = finishCalls[0];\n const files = ((fc.arguments as any)?.files ?? []) as AgentFinish['files'];\n finishMeta = { files };\n terminationReason = 'completed';\n break;\n }\n }\n\n if (terminationReason !== 'completed' || !finishMeta) {\n return { terminationReason, messages, errors };\n }\n\n // Build finish payload\n const parts: string[] = ['Relevant context found:'];\n for (const f of finishMeta.files) {\n const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(', ');\n parts.push(`- ${f.path}: ${ranges}`);\n }\n const payload = parts.join('\\n');\n\n // Resolve file contents for returned ranges\n // Wrap reader in try-catch to handle non-existent or unreadable files gracefully\n // Track files that couldn't be read for error reporting\n const fileReadErrors: Array<{ path: string; error: string }> = [];\n const resolved = await readFinishFiles(\n repoRoot,\n finishMeta.files,\n async (p: string, s: number, e: number) => {\n try {\n const rr = await provider.read({ path: p, start: s, end: e });\n // rr.lines are \"line|content\" → strip the \"line|\" prefix\n return rr.lines.map(l => {\n const idx = l.indexOf('|');\n return idx >= 0 ? l.slice(idx + 1) : l;\n });\n } catch (err) {\n // File doesn't exist or can't be read - log error but don't throw\n // This handles cases where the agent hallucinated a path or the file was deleted\n const errorMsg = err instanceof Error ? err.message : String(err);\n fileReadErrors.push({ path: p, error: errorMsg });\n console.error(`[warp_grep] Failed to read file: ${p} - ${errorMsg}`);\n return [`[couldn't find: ${p}]`];\n }\n }\n );\n\n // Add file read errors to the result so MCP can report them\n if (fileReadErrors.length > 0) {\n errors.push(...fileReadErrors.map(e => ({ message: `File read error: ${e.path} - ${e.error}` })));\n }\n\n return {\n terminationReason: 'completed',\n messages,\n finish: { payload, metadata: finishMeta, resolved },\n };\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,OAAO,UAAU;AAWjB,IAAM,SAAS,IAAI,kBAAkB;AAErC,eAAe,kBAAkB,UAAkB,OAAe,UAA6C;AAG7G,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,QAAQ,EAAE,MAAM,KAAK,YAAY,IAAI,CAAC;AACrE,UAAM,OAAO,QAAQ,OAAO,OAAK,EAAE,SAAS,KAAK,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AAC/E,UAAM,QAAQ,QAAQ,OAAO,OAAK,EAAE,SAAS,MAAM,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AACjF,UAAM,QAAQ;AAAA,MACZ,cAAc,QAAQ;AAAA,MACtB,aAAa,KAAK,KAAK,IAAI,CAAC;AAAA,MAC5B,cAAc,MAAM,KAAK,IAAI,CAAC;AAAA,IAChC;AACA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB,QAAQ;AACN,WAAO,cAAc,QAAQ;AAAA,EAC/B;AACF;AAUA,eAAe,UAAU,UAAyB,OAAe,QAAkC;AACjG,QAAM,MAAM;AACZ,QAAM,eAAe;AAAA,IACnB;AAAA,IACA;AAAA,MACE,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,UAAU,QAAQ,IAAI,iBAAiB,EAAE;AAAA,MACpE;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,aAAa;AAAA,QACb,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC;AAAA,EACH;AACA,QAAM,OAAO,MAAM,YAAY,cAAc,aAAa,YAAY,mCAAmC;AACzG,MAAI,CAAC,KAAK,IAAI;AAEZ,UAAM,IAAI,MAAM,KAAK,KAAK;AAC1B,UAAM,IAAI,MAAM,yBAAyB,KAAK,MAAM,KAAK,CAAC,EAAE;AAAA,EAC9D;AACA,QAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,QAAM,UAAU,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7C,MAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,SAAO;AACT;AAEA,eAAsB,YAAY,QAAiF;AACjH,QAAM,WAAW,KAAK,QAAQ,OAAO,YAAY,QAAQ,IAAI,CAAC;AAC9D,QAAM,WAA0B,CAAC;AAGjC,QAAM,gBAAgB,EAAE,MAAM,UAAmB,SAAS,gBAAgB,EAAE;AAC5E,WAAS,KAAK,aAAa;AAE3B,QAAM,eAAe,UAAU,OAAO,KAAK;AAC3C,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,eAAe,MAAM,kBAAkB,UAAU,OAAO,OAAO,OAAO,QAAQ;AACpF,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,YAAY,aAAa;AAC/B,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,WAAW,OAAO;AACxB,QAAM,SAAqC,CAAC;AAC5C,QAAM,YAAY,IAAI,UAAU;AAEhC,MAAI;AACJ,MAAI,oBAAyD;AAE7D,WAAS,QAAQ,GAAG,SAAS,WAAW,SAAS,GAAG;AAElD,UAAM,mBAAmB,MAAM,UAAU,UAAU,OAAO,OAAO,MAAM,EAAE,MAAM,CAAC,MAAe;AAC7F,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,aAAO;AAAA,IACT,CAAC;AACD,QAAI,CAAC,iBAAkB;AACvB,aAAS,KAAK,EAAE,MAAM,aAAa,SAAS,iBAAiB,CAAC;AAG9D,UAAM,YAAY,OAAO,MAAM,gBAAgB;AAC/C,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,KAAK,EAAE,SAAS,uCAAuC,CAAC;AAC/D,0BAAoB;AACpB;AAAA,IACF;AAEA,UAAM,cAAc,UAAU,OAAO,OAAK,EAAE,SAAS,QAAQ;AAC7D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,eAAe,UAAU,OAAO,OAAK,EAAE,SAAS,SAAS;AAC/D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,OAAO;AAE1D,UAAM,YAAsB,CAAC;AAG7B,eAAW,KAAK,WAAW;AACzB,YAAM,MAAO,EAAE,WAAoC,WAAW;AAC9D,gBAAU,KAAK,GAAG;AAAA,IACpB;AAGA,UAAM,gBAAwC,CAAC;AAC/C,eAAW,KAAK,cAAc;AAC5B,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,oBAAc;AAAA,QACZ,YAAY,UAAU,IAAI,EAAE;AAAA,UAC1B,OAAK,sBAAsB,WAAW,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UACjE,SAAO,sBAAsB,WAAW,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC9E;AAAA,MACF;AAAA,IACF;AACA,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,oBAAc;AAAA,QACZ,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,OAAK,sBAAsB,QAAQ,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9D,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AACA,UAAM,eAAe,MAAM,QAAQ,IAAI,aAAa;AACpD,cAAU,KAAK,GAAG,YAAY;AAG9B,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,UAAI;AACF,cAAM,UAAU,MAAM,SAAS,KAAK,EAAE,SAAS,KAAK,SAAS,MAAM,KAAK,KAAK,CAAC;AAG9E,YAAI,QAAQ,OAAO;AACjB,iBAAO,KAAK,EAAE,SAAS,QAAQ,MAAM,CAAC;AACtC,8BAAoB;AAEpB,iBAAO;AAAA,YACL,mBAAmB;AAAA,YACnB;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAEA,cAAM,YAAY,MAAM,QAAQ,QAAQ,KAAK,IAAI,QAAQ,MAAM,KAAK,IAAI,IAAI;AAC5E,cAAM,aAAa,yBAAyB,WAAW,SAAS;AAChE,YAAI,mBAAmB,qBAAqB,UAAU;AACtD,YAAI,qBAAqB,yBAAyB;AAChD,6BAAmB;AAAA,QACrB;AACA,kBAAU,KAAK,sBAAsB,QAAQ,MAAM,kBAAkB,EAAE,SAAS,MAAM,CAAC,CAAC;AAAA,MAC1F,SAAS,KAAK;AACZ,kBAAU,KAAK,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC,CAAC;AAAA,MACpF;AAAA,IACF;AAEA,QAAI,UAAU,SAAS,GAAG;AAExB,YAAM,YAAY;AAClB,YAAM,iBAAiB,IAAI;AAC3B,UAAI;AACJ,UAAI,mBAAmB,GAAG;AACxB,sBAAc;AAAA;AAAA,QAAa,SAAS;AAAA,MACtC,WAAW,mBAAmB,GAAG;AAC/B,sBAAc;AAAA;AAAA,QAAa,SAAS;AAAA,MACtC,OAAO;AACL,sBAAc;AAAA;AAAA,QAAa,SAAS,gBAAgB,cAAc;AAAA,MACpE;AACA,eAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,UAAU,KAAK,IAAI,IAAI,YAAY,CAAC;AAAA,IAC7E;AAEA,QAAI,YAAY,QAAQ;AACtB,YAAM,KAAK,YAAY,CAAC;AACxB,YAAM,QAAU,GAAG,WAAmB,SAAS,CAAC;AAChD,mBAAa,EAAE,MAAM;AACrB,0BAAoB;AACpB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,sBAAsB,eAAe,CAAC,YAAY;AACpD,WAAO,EAAE,mBAAmB,UAAU,OAAO;AAAA,EAC/C;AAGA,QAAM,QAAkB,CAAC,yBAAyB;AAClD,aAAW,KAAK,WAAW,OAAO;AAChC,UAAM,SAAS,EAAE,MAAM,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,IAAI;AAC7D,UAAM,KAAK,KAAK,EAAE,IAAI,KAAK,MAAM,EAAE;AAAA,EACrC;AACA,QAAM,UAAU,MAAM,KAAK,IAAI;AAK/B,QAAM,iBAAyD,CAAC;AAChE,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA,WAAW;AAAA,IACX,OAAO,GAAW,GAAW,MAAc;AACzC,UAAI;AACF,cAAM,KAAK,MAAM,SAAS,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,KAAK,EAAE,CAAC;AAE5D,eAAO,GAAG,MAAM,IAAI,OAAK;AACvB,gBAAM,MAAM,EAAE,QAAQ,GAAG;AACzB,iBAAO,OAAO,IAAI,EAAE,MAAM,MAAM,CAAC,IAAI;AAAA,QACvC,CAAC;AAAA,MACH,SAAS,KAAK;AAGZ,cAAM,WAAW,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAChE,uBAAe,KAAK,EAAE,MAAM,GAAG,OAAO,SAAS,CAAC;AAChD,gBAAQ,MAAM,oCAAoC,CAAC,MAAM,QAAQ,EAAE;AACnE,eAAO,CAAC,mBAAmB,CAAC,GAAG;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,GAAG;AAC7B,WAAO,KAAK,GAAG,eAAe,IAAI,QAAM,EAAE,SAAS,oBAAoB,EAAE,IAAI,MAAM,EAAE,KAAK,GAAG,EAAE,CAAC;AAAA,EAClG;AAEA,SAAO;AAAA,IACL,mBAAmB;AAAA,IACnB;AAAA,IACA,QAAQ,EAAE,SAAS,UAAU,YAAY,SAAS;AAAA,EACpD;AACF;","names":[]}
|
|
@@ -2,10 +2,10 @@ import {
|
|
|
2
2
|
WARP_GREP_DESCRIPTION,
|
|
3
3
|
WARP_GREP_TOOL_NAME,
|
|
4
4
|
formatResult
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-KO6JQFRE.js";
|
|
6
6
|
import {
|
|
7
7
|
runWarpGrep
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-37SRI4GW.js";
|
|
9
9
|
import {
|
|
10
10
|
getSystemPrompt
|
|
11
11
|
} from "./chunk-WETRQJGU.js";
|
|
@@ -74,4 +74,4 @@ export {
|
|
|
74
74
|
createMorphWarpGrepTool,
|
|
75
75
|
anthropic_default
|
|
76
76
|
};
|
|
77
|
-
//# sourceMappingURL=chunk-
|
|
77
|
+
//# sourceMappingURL=chunk-BSHJGJ25.js.map
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
import {
|
|
2
2
|
createMorphWarpGrepTool as createMorphWarpGrepTool2
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-BSHJGJ25.js";
|
|
4
4
|
import {
|
|
5
5
|
createMorphWarpGrepTool
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-ZWY434TS.js";
|
|
7
7
|
import {
|
|
8
8
|
createMorphWarpGrepTool as createMorphWarpGrepTool3
|
|
9
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-X5HNQ7SB.js";
|
|
10
10
|
import {
|
|
11
11
|
WarpGrepClient
|
|
12
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-KO6JQFRE.js";
|
|
13
13
|
import {
|
|
14
14
|
createCodebaseSearchTool as createCodebaseSearchTool3
|
|
15
15
|
} from "./chunk-UBX7QYBD.js";
|
|
@@ -280,4 +280,4 @@ export {
|
|
|
280
280
|
VercelToolFactory,
|
|
281
281
|
MorphClient
|
|
282
282
|
};
|
|
283
|
-
//# sourceMappingURL=chunk-
|
|
283
|
+
//# sourceMappingURL=chunk-C6QQL6FX.js.map
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {
|
|
2
2
|
runWarpGrep
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-37SRI4GW.js";
|
|
4
4
|
import {
|
|
5
5
|
LocalRipgrepProvider
|
|
6
6
|
} from "./chunk-ZJIIICRA.js";
|
|
@@ -104,4 +104,4 @@ export {
|
|
|
104
104
|
WARP_GREP_TOOL_NAME,
|
|
105
105
|
WARP_GREP_DESCRIPTION
|
|
106
106
|
};
|
|
107
|
-
//# sourceMappingURL=chunk-
|
|
107
|
+
//# sourceMappingURL=chunk-KO6JQFRE.js.map
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import {
|
|
2
2
|
WARP_GREP_DESCRIPTION
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-KO6JQFRE.js";
|
|
4
4
|
import {
|
|
5
5
|
runWarpGrep
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-37SRI4GW.js";
|
|
7
7
|
import {
|
|
8
8
|
LocalRipgrepProvider
|
|
9
9
|
} from "./chunk-ZJIIICRA.js";
|
|
@@ -67,4 +67,4 @@ export {
|
|
|
67
67
|
execute,
|
|
68
68
|
createMorphWarpGrepTool
|
|
69
69
|
};
|
|
70
|
-
//# sourceMappingURL=chunk-
|
|
70
|
+
//# sourceMappingURL=chunk-X5HNQ7SB.js.map
|
|
@@ -2,10 +2,10 @@ import {
|
|
|
2
2
|
WARP_GREP_DESCRIPTION,
|
|
3
3
|
WARP_GREP_TOOL_NAME,
|
|
4
4
|
formatResult
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-KO6JQFRE.js";
|
|
6
6
|
import {
|
|
7
7
|
runWarpGrep
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-37SRI4GW.js";
|
|
9
9
|
import {
|
|
10
10
|
getSystemPrompt
|
|
11
11
|
} from "./chunk-WETRQJGU.js";
|
|
@@ -80,4 +80,4 @@ export {
|
|
|
80
80
|
createMorphWarpGrepTool,
|
|
81
81
|
openai_default
|
|
82
82
|
};
|
|
83
|
-
//# sourceMappingURL=chunk-
|
|
83
|
+
//# sourceMappingURL=chunk-ZWY434TS.js.map
|
package/dist/client.cjs
CHANGED
|
@@ -1595,13 +1595,12 @@ function mergeRanges(ranges) {
|
|
|
1595
1595
|
|
|
1596
1596
|
// tools/warp_grep/agent/runner.ts
|
|
1597
1597
|
var import_path2 = __toESM(require("path"), 1);
|
|
1598
|
-
var import_promises2 = __toESM(require("fs/promises"), 1);
|
|
1599
1598
|
var parser = new LLMResponseParser();
|
|
1600
|
-
async function buildInitialState(repoRoot, query) {
|
|
1599
|
+
async function buildInitialState(repoRoot, query, provider) {
|
|
1601
1600
|
try {
|
|
1602
|
-
const entries = await
|
|
1603
|
-
const dirs = entries.filter((e) => e.
|
|
1604
|
-
const files = entries.filter((e) => e.
|
|
1601
|
+
const entries = await provider.analyse({ path: ".", maxResults: 100 });
|
|
1602
|
+
const dirs = entries.filter((e) => e.type === "dir").map((d) => d.name).slice(0, 50);
|
|
1603
|
+
const files = entries.filter((e) => e.type === "file").map((f) => f.name).slice(0, 50);
|
|
1605
1604
|
const parts = [
|
|
1606
1605
|
`<repo_root>${repoRoot}</repo_root>`,
|
|
1607
1606
|
`<top_dirs>${dirs.join(", ")}</top_dirs>`,
|
|
@@ -1650,7 +1649,7 @@ async function runWarpGrep(config) {
|
|
|
1650
1649
|
messages.push(systemMessage);
|
|
1651
1650
|
const queryContent = `<query>${config.query}</query>`;
|
|
1652
1651
|
messages.push({ role: "user", content: queryContent });
|
|
1653
|
-
const initialState = await buildInitialState(repoRoot, config.query);
|
|
1652
|
+
const initialState = await buildInitialState(repoRoot, config.query, config.provider);
|
|
1654
1653
|
messages.push({ role: "user", content: initialState });
|
|
1655
1654
|
const maxRounds = AGENT_CONFIG.MAX_ROUNDS;
|
|
1656
1655
|
const model = config.model || DEFAULT_MODEL;
|
|
@@ -1793,7 +1792,7 @@ async function runWarpGrep(config) {
|
|
|
1793
1792
|
}
|
|
1794
1793
|
|
|
1795
1794
|
// tools/warp_grep/providers/local.ts
|
|
1796
|
-
var
|
|
1795
|
+
var import_promises3 = __toESM(require("fs/promises"), 1);
|
|
1797
1796
|
var import_path4 = __toESM(require("path"), 1);
|
|
1798
1797
|
|
|
1799
1798
|
// tools/warp_grep/utils/ripgrep.ts
|
|
@@ -1903,9 +1902,9 @@ function isTextualFile(filePath, maxBytes = 2e6) {
|
|
|
1903
1902
|
}
|
|
1904
1903
|
|
|
1905
1904
|
// tools/warp_grep/utils/files.ts
|
|
1906
|
-
var
|
|
1905
|
+
var import_promises2 = __toESM(require("fs/promises"), 1);
|
|
1907
1906
|
async function readAllLines(filePath) {
|
|
1908
|
-
const content = await
|
|
1907
|
+
const content = await import_promises2.default.readFile(filePath, "utf8");
|
|
1909
1908
|
return content.split(/\r?\n/);
|
|
1910
1909
|
}
|
|
1911
1910
|
|
|
@@ -1917,7 +1916,7 @@ var LocalRipgrepProvider = class {
|
|
|
1917
1916
|
}
|
|
1918
1917
|
async grep(params) {
|
|
1919
1918
|
const abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
1920
|
-
const stat = await
|
|
1919
|
+
const stat = await import_promises3.default.stat(abs).catch(() => null);
|
|
1921
1920
|
if (!stat) return { lines: [] };
|
|
1922
1921
|
const targetArg = abs === import_path4.default.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
|
|
1923
1922
|
const args = [
|
|
@@ -1975,7 +1974,7 @@ Details: ${res.stderr}` : ""}`
|
|
|
1975
1974
|
}
|
|
1976
1975
|
async read(params) {
|
|
1977
1976
|
const abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
1978
|
-
const stat = await
|
|
1977
|
+
const stat = await import_promises3.default.stat(abs).catch(() => null);
|
|
1979
1978
|
if (!stat || !stat.isFile()) {
|
|
1980
1979
|
return {
|
|
1981
1980
|
lines: [],
|
|
@@ -2011,7 +2010,7 @@ Details: ${res.stderr}` : ""}`
|
|
|
2011
2010
|
}
|
|
2012
2011
|
async analyse(params) {
|
|
2013
2012
|
const abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
2014
|
-
const stat = await
|
|
2013
|
+
const stat = await import_promises3.default.stat(abs).catch(() => null);
|
|
2015
2014
|
if (!stat || !stat.isDirectory()) {
|
|
2016
2015
|
return [];
|
|
2017
2016
|
}
|
|
@@ -2021,7 +2020,7 @@ Details: ${res.stderr}` : ""}`
|
|
|
2021
2020
|
const results = [];
|
|
2022
2021
|
async function walk(dir, depth) {
|
|
2023
2022
|
if (depth > maxDepth || results.length >= maxResults) return;
|
|
2024
|
-
const entries = await
|
|
2023
|
+
const entries = await import_promises3.default.readdir(dir, { withFileTypes: true });
|
|
2025
2024
|
for (const entry of entries) {
|
|
2026
2025
|
const full = import_path4.default.join(dir, entry.name);
|
|
2027
2026
|
const rel = toRepoRelative(abs, full).replace(/^[.][/\\]?/, "");
|