@morphllm/morphsdk 0.2.21 → 0.2.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (193) hide show
  1. package/dist/{chunk-BILUTNBC.js → chunk-4A7UBGLS.js} +2 -2
  2. package/dist/chunk-73RQWOQC.js +16 -0
  3. package/dist/chunk-73RQWOQC.js.map +1 -0
  4. package/dist/chunk-AFEPUNAO.js +15 -0
  5. package/dist/chunk-AFEPUNAO.js.map +1 -0
  6. package/dist/{chunk-VJK4PH5V.js → chunk-AG3ICTC5.js} +5 -2
  7. package/dist/chunk-AG3ICTC5.js.map +1 -0
  8. package/dist/chunk-EAA7D24N.js +201 -0
  9. package/dist/chunk-EAA7D24N.js.map +1 -0
  10. package/dist/chunk-EK7OQPWD.js +44 -0
  11. package/dist/chunk-EK7OQPWD.js.map +1 -0
  12. package/dist/chunk-G2RSY56Q.js +11 -0
  13. package/dist/chunk-G2RSY56Q.js.map +1 -0
  14. package/dist/chunk-GTOXMAF2.js +140 -0
  15. package/dist/chunk-GTOXMAF2.js.map +1 -0
  16. package/dist/chunk-HKZB23U7.js +85 -0
  17. package/dist/chunk-HKZB23U7.js.map +1 -0
  18. package/dist/{chunk-34F3D6JD.js → chunk-HWJN5AMA.js} +8 -8
  19. package/dist/chunk-LN4CTQZG.js +56 -0
  20. package/dist/chunk-LN4CTQZG.js.map +1 -0
  21. package/dist/chunk-NDZO5IPV.js +121 -0
  22. package/dist/chunk-NDZO5IPV.js.map +1 -0
  23. package/dist/chunk-RSLIOCOE.js +26 -0
  24. package/dist/chunk-RSLIOCOE.js.map +1 -0
  25. package/dist/chunk-S3GI7HNR.js +44 -0
  26. package/dist/chunk-S3GI7HNR.js.map +1 -0
  27. package/dist/chunk-SMGZ6A64.js +53 -0
  28. package/dist/chunk-SMGZ6A64.js.map +1 -0
  29. package/dist/chunk-TICMYDII.js +81 -0
  30. package/dist/chunk-TICMYDII.js.map +1 -0
  31. package/dist/chunk-UYBIKZPM.js +135 -0
  32. package/dist/chunk-UYBIKZPM.js.map +1 -0
  33. package/dist/chunk-VBBJGWHY.js +73 -0
  34. package/dist/chunk-VBBJGWHY.js.map +1 -0
  35. package/dist/chunk-W5CHJ6OX.js +53 -0
  36. package/dist/chunk-W5CHJ6OX.js.map +1 -0
  37. package/dist/{chunk-WXBUVKYL.js → chunk-WHZQDTM6.js} +2 -2
  38. package/dist/chunk-XYPMN4A3.js +1 -0
  39. package/dist/chunk-XYPMN4A3.js.map +1 -0
  40. package/dist/{chunk-YVGRWE7D.js → chunk-Y2IY7NYY.js} +2 -2
  41. package/dist/chunk-Z2FBMSNE.js +10 -0
  42. package/dist/chunk-Z2FBMSNE.js.map +1 -0
  43. package/dist/client.cjs +4 -1
  44. package/dist/client.cjs.map +1 -1
  45. package/dist/client.js +5 -5
  46. package/dist/index.cjs +4 -1
  47. package/dist/index.cjs.map +1 -1
  48. package/dist/index.js +9 -9
  49. package/dist/tools/codebase_search/anthropic.cjs +4 -1
  50. package/dist/tools/codebase_search/anthropic.cjs.map +1 -1
  51. package/dist/tools/codebase_search/anthropic.js +2 -2
  52. package/dist/tools/codebase_search/core.cjs +4 -1
  53. package/dist/tools/codebase_search/core.cjs.map +1 -1
  54. package/dist/tools/codebase_search/core.js +1 -1
  55. package/dist/tools/codebase_search/index.cjs +4 -1
  56. package/dist/tools/codebase_search/index.cjs.map +1 -1
  57. package/dist/tools/codebase_search/index.js +6 -6
  58. package/dist/tools/codebase_search/openai.cjs +4 -1
  59. package/dist/tools/codebase_search/openai.cjs.map +1 -1
  60. package/dist/tools/codebase_search/openai.js +2 -2
  61. package/dist/tools/codebase_search/types.cjs.map +1 -1
  62. package/dist/tools/codebase_search/vercel.cjs +4 -1
  63. package/dist/tools/codebase_search/vercel.cjs.map +1 -1
  64. package/dist/tools/codebase_search/vercel.js +2 -2
  65. package/dist/tools/warp_grep/agent/config.cjs +41 -0
  66. package/dist/tools/warp_grep/agent/config.cjs.map +1 -0
  67. package/dist/tools/warp_grep/agent/config.js +12 -0
  68. package/dist/tools/warp_grep/agent/config.js.map +1 -0
  69. package/dist/tools/warp_grep/agent/formatter.cjs +106 -0
  70. package/dist/tools/warp_grep/agent/formatter.cjs.map +1 -0
  71. package/dist/tools/warp_grep/agent/formatter.js +10 -0
  72. package/dist/tools/warp_grep/agent/formatter.js.map +1 -0
  73. package/dist/tools/warp_grep/agent/grep_helpers.cjs +148 -0
  74. package/dist/tools/warp_grep/agent/grep_helpers.cjs.map +1 -0
  75. package/dist/tools/warp_grep/agent/grep_helpers.js +14 -0
  76. package/dist/tools/warp_grep/agent/grep_helpers.js.map +1 -0
  77. package/dist/tools/warp_grep/agent/parser.cjs +165 -0
  78. package/dist/tools/warp_grep/agent/parser.cjs.map +1 -0
  79. package/dist/tools/warp_grep/agent/parser.js +10 -0
  80. package/dist/tools/warp_grep/agent/parser.js.map +1 -0
  81. package/dist/tools/warp_grep/agent/prompt.cjs +110 -0
  82. package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -0
  83. package/dist/tools/warp_grep/agent/prompt.js +10 -0
  84. package/dist/tools/warp_grep/agent/prompt.js.map +1 -0
  85. package/dist/tools/warp_grep/agent/runner.cjs +744 -0
  86. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -0
  87. package/dist/tools/warp_grep/agent/runner.js +17 -0
  88. package/dist/tools/warp_grep/agent/runner.js.map +1 -0
  89. package/dist/tools/warp_grep/agent/types.cjs +19 -0
  90. package/dist/tools/warp_grep/agent/types.cjs.map +1 -0
  91. package/dist/tools/warp_grep/agent/types.js +2 -0
  92. package/dist/tools/warp_grep/agent/types.js.map +1 -0
  93. package/dist/tools/warp_grep/anthropic.cjs +977 -0
  94. package/dist/tools/warp_grep/anthropic.cjs.map +1 -0
  95. package/dist/tools/warp_grep/anthropic.js +22 -0
  96. package/dist/tools/warp_grep/anthropic.js.map +1 -0
  97. package/dist/tools/warp_grep/index.cjs +1136 -0
  98. package/dist/tools/warp_grep/index.cjs.map +1 -0
  99. package/dist/tools/warp_grep/index.js +48 -0
  100. package/dist/tools/warp_grep/index.js.map +1 -0
  101. package/dist/tools/warp_grep/openai.cjs +980 -0
  102. package/dist/tools/warp_grep/openai.cjs.map +1 -0
  103. package/dist/tools/warp_grep/openai.js +22 -0
  104. package/dist/tools/warp_grep/openai.js.map +1 -0
  105. package/dist/tools/warp_grep/providers/command.cjs +98 -0
  106. package/dist/tools/warp_grep/providers/command.cjs.map +1 -0
  107. package/dist/tools/warp_grep/providers/command.js +9 -0
  108. package/dist/tools/warp_grep/providers/command.js.map +1 -0
  109. package/dist/tools/warp_grep/providers/local.cjs +232 -0
  110. package/dist/tools/warp_grep/providers/local.cjs.map +1 -0
  111. package/dist/tools/warp_grep/providers/local.js +12 -0
  112. package/dist/tools/warp_grep/providers/local.js.map +1 -0
  113. package/dist/tools/warp_grep/providers/types.cjs +19 -0
  114. package/dist/tools/warp_grep/providers/types.cjs.map +1 -0
  115. package/dist/tools/warp_grep/providers/types.js +1 -0
  116. package/dist/tools/warp_grep/providers/types.js.map +1 -0
  117. package/dist/tools/warp_grep/tools/analyse.cjs +40 -0
  118. package/dist/tools/warp_grep/tools/analyse.cjs.map +1 -0
  119. package/dist/tools/warp_grep/tools/analyse.js +8 -0
  120. package/dist/tools/warp_grep/tools/analyse.js.map +1 -0
  121. package/dist/tools/warp_grep/tools/finish.cjs +69 -0
  122. package/dist/tools/warp_grep/tools/finish.cjs.map +1 -0
  123. package/dist/tools/warp_grep/tools/finish.js +10 -0
  124. package/dist/tools/warp_grep/tools/finish.js.map +1 -0
  125. package/dist/tools/warp_grep/tools/grep.cjs +35 -0
  126. package/dist/tools/warp_grep/tools/grep.cjs.map +1 -0
  127. package/dist/tools/warp_grep/tools/grep.js +12 -0
  128. package/dist/tools/warp_grep/tools/grep.js.map +1 -0
  129. package/dist/tools/warp_grep/tools/read.cjs +34 -0
  130. package/dist/tools/warp_grep/tools/read.cjs.map +1 -0
  131. package/dist/tools/warp_grep/tools/read.js +8 -0
  132. package/dist/tools/warp_grep/tools/read.js.map +1 -0
  133. package/dist/tools/warp_grep/utils/files.cjs +45 -0
  134. package/dist/tools/warp_grep/utils/files.cjs.map +1 -0
  135. package/dist/tools/warp_grep/utils/files.js +8 -0
  136. package/dist/tools/warp_grep/utils/files.js.map +1 -0
  137. package/dist/tools/warp_grep/utils/format.cjs +42 -0
  138. package/dist/tools/warp_grep/utils/format.cjs.map +1 -0
  139. package/dist/tools/warp_grep/utils/format.js +18 -0
  140. package/dist/tools/warp_grep/utils/format.js.map +1 -0
  141. package/dist/tools/warp_grep/utils/paths.cjs +91 -0
  142. package/dist/tools/warp_grep/utils/paths.cjs.map +1 -0
  143. package/dist/tools/warp_grep/utils/paths.js +16 -0
  144. package/dist/tools/warp_grep/utils/paths.js.map +1 -0
  145. package/dist/tools/warp_grep/utils/ripgrep.cjs +50 -0
  146. package/dist/tools/warp_grep/utils/ripgrep.cjs.map +1 -0
  147. package/dist/tools/warp_grep/utils/ripgrep.js +8 -0
  148. package/dist/tools/warp_grep/utils/ripgrep.js.map +1 -0
  149. package/dist/tools/warp_grep/vercel.cjs +968 -0
  150. package/dist/tools/warp_grep/vercel.cjs.map +1 -0
  151. package/dist/tools/warp_grep/vercel.js +22 -0
  152. package/dist/tools/warp_grep/vercel.js.map +1 -0
  153. package/package.json +23 -3
  154. package/dist/anthropic-CknfcMoO.d.ts +0 -64
  155. package/dist/chunk-VJK4PH5V.js.map +0 -1
  156. package/dist/client.d.ts +0 -114
  157. package/dist/git/client.d.ts +0 -255
  158. package/dist/git/config.d.ts +0 -11
  159. package/dist/git/index.d.ts +0 -5
  160. package/dist/git/types.d.ts +0 -102
  161. package/dist/index.d.ts +0 -14
  162. package/dist/modelrouter/core.d.ts +0 -56
  163. package/dist/modelrouter/index.d.ts +0 -2
  164. package/dist/modelrouter/types.d.ts +0 -35
  165. package/dist/openai-BkKsS30n.d.ts +0 -111
  166. package/dist/tools/browser/anthropic.d.ts +0 -51
  167. package/dist/tools/browser/core.d.ts +0 -196
  168. package/dist/tools/browser/index.d.ts +0 -72
  169. package/dist/tools/browser/openai.d.ts +0 -69
  170. package/dist/tools/browser/prompts.d.ts +0 -7
  171. package/dist/tools/browser/types.d.ts +0 -227
  172. package/dist/tools/browser/vercel.d.ts +0 -69
  173. package/dist/tools/codebase_search/anthropic.d.ts +0 -40
  174. package/dist/tools/codebase_search/core.d.ts +0 -40
  175. package/dist/tools/codebase_search/index.d.ts +0 -10
  176. package/dist/tools/codebase_search/openai.d.ts +0 -87
  177. package/dist/tools/codebase_search/prompts.d.ts +0 -7
  178. package/dist/tools/codebase_search/types.d.ts +0 -46
  179. package/dist/tools/codebase_search/vercel.d.ts +0 -65
  180. package/dist/tools/fastapply/anthropic.d.ts +0 -4
  181. package/dist/tools/fastapply/core.d.ts +0 -41
  182. package/dist/tools/fastapply/index.d.ts +0 -10
  183. package/dist/tools/fastapply/openai.d.ts +0 -4
  184. package/dist/tools/fastapply/prompts.d.ts +0 -7
  185. package/dist/tools/fastapply/types.d.ts +0 -77
  186. package/dist/tools/fastapply/vercel.d.ts +0 -4
  187. package/dist/tools/index.d.ts +0 -10
  188. package/dist/tools/utils/resilience.d.ts +0 -58
  189. package/dist/vercel-B1GZ_g9N.d.ts +0 -69
  190. /package/dist/{chunk-BILUTNBC.js.map → chunk-4A7UBGLS.js.map} +0 -0
  191. /package/dist/{chunk-34F3D6JD.js.map → chunk-HWJN5AMA.js.map} +0 -0
  192. /package/dist/{chunk-WXBUVKYL.js.map → chunk-WHZQDTM6.js.map} +0 -0
  193. /package/dist/{chunk-YVGRWE7D.js.map → chunk-Y2IY7NYY.js.map} +0 -0
@@ -4,7 +4,7 @@ import {
4
4
  } from "./chunk-YQMPVJ2L.js";
5
5
  import {
6
6
  executeCodebaseSearch
7
- } from "./chunk-VJK4PH5V.js";
7
+ } from "./chunk-AG3ICTC5.js";
8
8
 
9
9
  // tools/codebase_search/anthropic.ts
10
10
  function createCodebaseSearchTool(config) {
@@ -80,4 +80,4 @@ function formatResult(result) {
80
80
  export {
81
81
  createCodebaseSearchTool
82
82
  };
83
- //# sourceMappingURL=chunk-BILUTNBC.js.map
83
+ //# sourceMappingURL=chunk-4A7UBGLS.js.map
@@ -0,0 +1,16 @@
1
+ // tools/warp_grep/tools/analyse.ts
2
+ async function toolAnalyse(provider, args) {
3
+ const list = await provider.analyse({
4
+ path: args.path,
5
+ pattern: args.pattern ?? null,
6
+ maxResults: args.maxResults ?? 100,
7
+ maxDepth: args.maxDepth ?? 2
8
+ });
9
+ if (!list.length) return "empty";
10
+ return list.map((e) => `${" ".repeat(e.depth)}- ${e.type === "dir" ? "[D]" : "[F]"} ${e.name}`).join("\n");
11
+ }
12
+
13
+ export {
14
+ toolAnalyse
15
+ };
16
+ //# sourceMappingURL=chunk-73RQWOQC.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/tools/analyse.ts"],"sourcesContent":["import type { WarpGrepProvider } from '../providers/types.js';\n\nexport async function toolAnalyse(\n provider: WarpGrepProvider,\n args: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }\n): Promise<string> {\n const list = await provider.analyse({\n path: args.path,\n pattern: args.pattern ?? null,\n maxResults: args.maxResults ?? 100,\n maxDepth: args.maxDepth ?? 2,\n });\n if (!list.length) return 'empty';\n return list\n .map((e) => `${' '.repeat(e.depth)}- ${e.type === 'dir' ? '[D]' : '[F]'} ${e.name}`)\n .join('\\n');\n}\n\n\n"],"mappings":";AAEA,eAAsB,YACpB,UACA,MACiB;AACjB,QAAM,OAAO,MAAM,SAAS,QAAQ;AAAA,IAClC,MAAM,KAAK;AAAA,IACX,SAAS,KAAK,WAAW;AAAA,IACzB,YAAY,KAAK,cAAc;AAAA,IAC/B,UAAU,KAAK,YAAY;AAAA,EAC7B,CAAC;AACD,MAAI,CAAC,KAAK,OAAQ,QAAO;AACzB,SAAO,KACJ,IAAI,CAAC,MAAM,GAAG,KAAK,OAAO,EAAE,KAAK,CAAC,KAAK,EAAE,SAAS,QAAQ,QAAQ,KAAK,IAAI,EAAE,IAAI,EAAE,EACnF,KAAK,IAAI;AACd;","names":[]}
@@ -0,0 +1,15 @@
1
+ // tools/warp_grep/agent/config.ts
2
+ var AGENT_CONFIG = {
3
+ // Give the model freedom; failsafe cap to prevent infinite loops
4
+ MAX_ROUNDS: 10,
5
+ TIMEOUT_MS: 3e4
6
+ };
7
+ var DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || "").split(",").map((s) => s.trim()).filter(Boolean).concat(["node_modules", ".git", "dist", "build", ".cache", "venv", "target"]);
8
+ var DEFAULT_MODEL = "morph-warp-grep";
9
+
10
+ export {
11
+ AGENT_CONFIG,
12
+ DEFAULT_EXCLUDES,
13
+ DEFAULT_MODEL
14
+ };
15
+ //# sourceMappingURL=chunk-AFEPUNAO.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/agent/config.ts"],"sourcesContent":["// Agent configuration defaults for morph-warp-grep\n// Hard-coded: SDK does not expose control over rounds or timeout.\nexport const AGENT_CONFIG = {\n // Give the model freedom; failsafe cap to prevent infinite loops\n MAX_ROUNDS: 10,\n TIMEOUT_MS: 30000,\n};\n\nexport const DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || '')\n .split(',')\n .map(s => s.trim())\n .filter(Boolean)\n .concat(['node_modules', '.git', 'dist', 'build', '.cache', 'venv', 'target']);\n\nexport const DEFAULT_MODEL = 'morph-warp-grep';\n\n\n"],"mappings":";AAEO,IAAM,eAAe;AAAA;AAAA,EAE1B,YAAY;AAAA,EACZ,YAAY;AACd;AAEO,IAAM,oBAAoB,QAAQ,IAAI,2BAA2B,IACrE,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAO,EACd,OAAO,CAAC,gBAAgB,QAAQ,QAAQ,SAAS,UAAU,QAAQ,QAAQ,CAAC;AAExE,IAAM,gBAAgB;","names":[]}
@@ -43,7 +43,8 @@ async function executeCodebaseSearch(input, config) {
43
43
  const timeout = config.timeout || 3e4;
44
44
  const debug = config.debug || false;
45
45
  if (debug) {
46
- console.log(`[CodebaseSearch] Query: "${input.query.slice(0, 60)}..." repo=${config.repoId}`);
46
+ const ref = config.commitHash ? `commit=${config.commitHash.slice(0, 8)}` : config.branch ? `branch=${config.branch}` : "latest main";
47
+ console.log(`[CodebaseSearch] Query: "${input.query.slice(0, 60)}..." repo=${config.repoId} (${ref})`);
47
48
  console.log(`[CodebaseSearch] URL: ${searchUrl}/v1/codebase_search`);
48
49
  }
49
50
  const startTime = Date.now();
@@ -59,6 +60,8 @@ async function executeCodebaseSearch(input, config) {
59
60
  body: JSON.stringify({
60
61
  query: input.query,
61
62
  repoId: config.repoId,
63
+ branch: config.branch,
64
+ commitHash: config.commitHash,
62
65
  targetDirectories: input.target_directories || [],
63
66
  limit: input.limit || 10,
64
67
  candidateLimit: 50
@@ -102,4 +105,4 @@ export {
102
105
  CodebaseSearchClient,
103
106
  executeCodebaseSearch
104
107
  };
105
- //# sourceMappingURL=chunk-VJK4PH5V.js.map
108
+ //# sourceMappingURL=chunk-AG3ICTC5.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/codebase_search/core.ts"],"sourcesContent":["/**\n * Core implementation for codebase search\n * Calls Morph rerank service for two-stage semantic search\n */\n\nimport { fetchWithRetry, withTimeout } from '../utils/resilience.js';\nimport type { CodebaseSearchConfig, CodebaseSearchInput, CodebaseSearchResult } from './types.js';\n\n/**\n * CodebaseSearch client for programmatic semantic search\n */\nexport class CodebaseSearchClient {\n private config: { \n apiKey?: string; \n searchUrl?: string; \n debug?: boolean; \n timeout?: number; \n retryConfig?: any;\n };\n\n constructor(config: { apiKey?: string; debug?: boolean; timeout?: number; retryConfig?: any } = {}) {\n this.config = {\n apiKey: config.apiKey,\n searchUrl: process.env.MORPH_SEARCH_URL || 'http://embedrerank.morphllm.com:8081',\n debug: config.debug,\n timeout: config.timeout || 30000,\n retryConfig: config.retryConfig,\n };\n }\n\n /**\n * Execute a semantic code search\n * \n * @param input - Search parameters including query, repoId, and target directories\n * @param overrides - Optional config overrides for this operation\n * @returns Search results with ranked code matches\n */\n async search(\n input: { query: string; repoId: string; target_directories?: string[]; explanation?: string; limit?: number },\n overrides?: any\n ): Promise<CodebaseSearchResult> {\n return executeCodebaseSearch(\n {\n query: input.query,\n target_directories: input.target_directories,\n explanation: input.explanation,\n limit: input.limit,\n },\n { ...this.config, repoId: input.repoId, ...overrides }\n );\n }\n}\n\n/**\n * Execute semantic code search\n */\nexport async function executeCodebaseSearch(\n input: CodebaseSearchInput,\n config: CodebaseSearchConfig\n): Promise<CodebaseSearchResult> {\n const apiKey = config.apiKey || process.env.MORPH_API_KEY;\n if (!apiKey) {\n throw new Error('MORPH_API_KEY not found. Set environment variable or pass in config');\n }\n\n const searchUrl = config.searchUrl || process.env.MORPH_SEARCH_URL || 'http://embedrerank.morphllm.com:8081';\n const timeout = config.timeout || 30000;\n const debug = config.debug || false;\n\n if (debug) {\n const ref = config.commitHash ? `commit=${config.commitHash.slice(0, 8)}` : config.branch ? `branch=${config.branch}` : 'latest main';\n console.log(`[CodebaseSearch] Query: \"${input.query.slice(0, 60)}...\" repo=${config.repoId} (${ref})`);\n console.log(`[CodebaseSearch] URL: ${searchUrl}/v1/codebase_search`);\n }\n\n const startTime = Date.now();\n\n try {\n const fetchPromise = fetchWithRetry(\n `${searchUrl}/v1/codebase_search`,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n query: input.query,\n repoId: config.repoId,\n branch: config.branch,\n commitHash: config.commitHash,\n targetDirectories: input.target_directories || [],\n limit: input.limit || 10,\n candidateLimit: 50,\n }),\n },\n config.retryConfig\n );\n\n const response = await withTimeout(fetchPromise, timeout, `Codebase search timed out after ${timeout}ms`);\n\n if (!response.ok) {\n const errorText = await response.text();\n if (debug) console.error(`[CodebaseSearch] Error: ${response.status} - ${errorText}`);\n return {\n success: false,\n results: [],\n stats: { totalResults: 0, candidatesRetrieved: 0, searchTimeMs: 0 },\n error: `Search failed (${response.status}): ${errorText}`,\n };\n }\n\n const data = await response.json();\n const elapsed = Date.now() - startTime;\n\n if (debug) {\n console.log(`[CodebaseSearch] ✅ ${data.results?.length || 0} results in ${elapsed}ms`);\n }\n\n return {\n success: true,\n results: data.results || [],\n stats: data.stats || { totalResults: 0, candidatesRetrieved: 0, searchTimeMs: elapsed },\n };\n\n } catch (error) {\n if (debug) console.error(`[CodebaseSearch] Exception: ${error instanceof Error ? error.message : error}`);\n return {\n success: false,\n results: [],\n stats: { totalResults: 0, candidatesRetrieved: 0, searchTimeMs: 0 },\n error: error instanceof Error ? error.message : 'Unknown error',\n };\n }\n}\n\n"],"mappings":";;;;;;AAWO,IAAM,uBAAN,MAA2B;AAAA,EACxB;AAAA,EAQR,YAAY,SAAoF,CAAC,GAAG;AAClG,SAAK,SAAS;AAAA,MACZ,QAAQ,OAAO;AAAA,MACf,WAAW,QAAQ,IAAI,oBAAoB;AAAA,MAC3C,OAAO,OAAO;AAAA,MACd,SAAS,OAAO,WAAW;AAAA,MAC3B,aAAa,OAAO;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,OACJ,OACA,WAC+B;AAC/B,WAAO;AAAA,MACL;AAAA,QACE,OAAO,MAAM;AAAA,QACb,oBAAoB,MAAM;AAAA,QAC1B,aAAa,MAAM;AAAA,QACnB,OAAO,MAAM;AAAA,MACf;AAAA,MACA,EAAE,GAAG,KAAK,QAAQ,QAAQ,MAAM,QAAQ,GAAG,UAAU;AAAA,IACvD;AAAA,EACF;AACF;AAKA,eAAsB,sBACpB,OACA,QAC+B;AAC/B,QAAM,SAAS,OAAO,UAAU,QAAQ,IAAI;AAC5C,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,qEAAqE;AAAA,EACvF;AAEA,QAAM,YAAY,OAAO,aAAa,QAAQ,IAAI,oBAAoB;AACtE,QAAM,UAAU,OAAO,WAAW;AAClC,QAAM,QAAQ,OAAO,SAAS;AAE9B,MAAI,OAAO;AACT,UAAM,MAAM,OAAO,aAAa,UAAU,OAAO,WAAW,MAAM,GAAG,CAAC,CAAC,KAAK,OAAO,SAAS,UAAU,OAAO,MAAM,KAAK;AACxH,YAAQ,IAAI,4BAA4B,MAAM,MAAM,MAAM,GAAG,EAAE,CAAC,aAAa,OAAO,MAAM,KAAK,GAAG,GAAG;AACrG,YAAQ,IAAI,yBAAyB,SAAS,qBAAqB;AAAA,EACrE;AAEA,QAAM,YAAY,KAAK,IAAI;AAE3B,MAAI;AACF,UAAM,eAAe;AAAA,MACnB,GAAG,SAAS;AAAA,MACZ;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,iBAAiB,UAAU,MAAM;AAAA,QACnC;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB,OAAO,MAAM;AAAA,UACb,QAAQ,OAAO;AAAA,UACf,QAAQ,OAAO;AAAA,UACf,YAAY,OAAO;AAAA,UACnB,mBAAmB,MAAM,sBAAsB,CAAC;AAAA,UAChD,OAAO,MAAM,SAAS;AAAA,UACtB,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,MACA,OAAO;AAAA,IACT;AAEA,UAAM,WAAW,MAAM,YAAY,cAAc,SAAS,mCAAmC,OAAO,IAAI;AAExG,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK;AACtC,UAAI,MAAO,SAAQ,MAAM,2BAA2B,SAAS,MAAM,MAAM,SAAS,EAAE;AACpF,aAAO;AAAA,QACL,SAAS;AAAA,QACT,SAAS,CAAC;AAAA,QACV,OAAO,EAAE,cAAc,GAAG,qBAAqB,GAAG,cAAc,EAAE;AAAA,QAClE,OAAO,kBAAkB,SAAS,MAAM,MAAM,SAAS;AAAA,MACzD;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAM,UAAU,KAAK,IAAI,IAAI;AAE7B,QAAI,OAAO;AACT,cAAQ,IAAI,2BAAsB,KAAK,SAAS,UAAU,CAAC,eAAe,OAAO,IAAI;AAAA,IACvF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS,KAAK,WAAW,CAAC;AAAA,MAC1B,OAAO,KAAK,SAAS,EAAE,cAAc,GAAG,qBAAqB,GAAG,cAAc,QAAQ;AAAA,IACxF;AAAA,EAEF,SAAS,OAAO;AACd,QAAI,MAAO,SAAQ,MAAM,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,KAAK,EAAE;AACxG,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS,CAAC;AAAA,MACV,OAAO,EAAE,cAAc,GAAG,qBAAqB,GAAG,cAAc,EAAE;AAAA,MAClE,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAClD;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,201 @@
1
+ import {
2
+ toolRead
3
+ } from "./chunk-Z2FBMSNE.js";
4
+ import {
5
+ getSystemPrompt
6
+ } from "./chunk-HKZB23U7.js";
7
+ import {
8
+ toolAnalyse
9
+ } from "./chunk-73RQWOQC.js";
10
+ import {
11
+ readFinishFiles
12
+ } from "./chunk-EK7OQPWD.js";
13
+ import {
14
+ AGENT_CONFIG,
15
+ DEFAULT_MODEL
16
+ } from "./chunk-AFEPUNAO.js";
17
+ import {
18
+ formatAgentToolOutput
19
+ } from "./chunk-TICMYDII.js";
20
+ import {
21
+ GrepState,
22
+ formatTurnGrepOutput,
23
+ parseAndFilterGrepOutput
24
+ } from "./chunk-NDZO5IPV.js";
25
+ import {
26
+ LLMResponseParser
27
+ } from "./chunk-GTOXMAF2.js";
28
+ import {
29
+ fetchWithRetry,
30
+ withTimeout
31
+ } from "./chunk-4VWJFZVS.js";
32
+
33
+ // tools/warp_grep/agent/runner.ts
34
+ import path from "path";
35
+ import fs from "fs/promises";
36
+ var parser = new LLMResponseParser();
37
+ async function buildInitialState(repoRoot, query) {
38
+ try {
39
+ const entries = await fs.readdir(repoRoot, { withFileTypes: true });
40
+ const dirs = entries.filter((e) => e.isDirectory()).map((d) => d.name).slice(0, 50);
41
+ const files = entries.filter((e) => e.isFile()).map((f) => f.name).slice(0, 50);
42
+ const parts = [
43
+ `<repo_root>${repoRoot}</repo_root>`,
44
+ `<top_dirs>${dirs.join(", ")}</top_dirs>`,
45
+ `<top_files>${files.join(", ")}</top_files>`
46
+ ];
47
+ return parts.join("\n");
48
+ } catch {
49
+ return `<repo_root>${repoRoot}</repo_root>`;
50
+ }
51
+ }
52
+ async function callModel(messages, model, apiKey) {
53
+ const api = "https://api.morphllm.com/v1/chat/completions";
54
+ const fetchPromise = fetchWithRetry(
55
+ api,
56
+ {
57
+ method: "POST",
58
+ headers: {
59
+ "Content-Type": "application/json",
60
+ Authorization: `Bearer ${apiKey || process.env.MORPH_API_KEY || ""}`
61
+ },
62
+ body: JSON.stringify({
63
+ model,
64
+ temperature: 0,
65
+ max_tokens: 1024,
66
+ messages
67
+ })
68
+ },
69
+ {}
70
+ );
71
+ const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, "morph-warp-grep request timed out");
72
+ if (!resp.ok) {
73
+ const t = await resp.text();
74
+ throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);
75
+ }
76
+ const data = await resp.json();
77
+ const content = data?.choices?.[0]?.message?.content;
78
+ if (!content || typeof content !== "string") {
79
+ throw new Error("Invalid response from model");
80
+ }
81
+ return content;
82
+ }
83
+ async function runWarpGrep(config) {
84
+ const repoRoot = path.resolve(config.repoRoot || process.cwd());
85
+ const messages = [];
86
+ const systemMessage = { role: "system", content: getSystemPrompt() };
87
+ messages.push(systemMessage);
88
+ const queryContent = `<query>${config.query}</query>`;
89
+ messages.push({ role: "user", content: queryContent });
90
+ const initialState = await buildInitialState(repoRoot, config.query);
91
+ messages.push({ role: "user", content: initialState });
92
+ const maxRounds = AGENT_CONFIG.MAX_ROUNDS;
93
+ const model = config.model || DEFAULT_MODEL;
94
+ const provider = config.provider;
95
+ const errors = [];
96
+ const grepState = new GrepState();
97
+ let finishMeta;
98
+ let terminationReason = "terminated";
99
+ for (let round = 1; round <= maxRounds; round += 1) {
100
+ const assistantContent = await callModel(messages, model, config.apiKey).catch((e) => {
101
+ errors.push({ message: e instanceof Error ? e.message : String(e) });
102
+ return "";
103
+ });
104
+ if (!assistantContent) break;
105
+ messages.push({ role: "assistant", content: assistantContent });
106
+ let toolCalls = [];
107
+ try {
108
+ toolCalls = parser.parse(assistantContent);
109
+ } catch (e) {
110
+ errors.push({ message: e instanceof Error ? e.message : String(e) });
111
+ terminationReason = "terminated";
112
+ break;
113
+ }
114
+ if (toolCalls.length === 0) {
115
+ errors.push({ message: "No tool calls produced by the model." });
116
+ terminationReason = "terminated";
117
+ break;
118
+ }
119
+ const finishCalls = toolCalls.filter((c) => c.name === "finish");
120
+ const grepCalls = toolCalls.filter((c) => c.name === "grep");
121
+ const analyseCalls = toolCalls.filter((c) => c.name === "analyse");
122
+ const readCalls = toolCalls.filter((c) => c.name === "read");
123
+ const formatted = [];
124
+ const otherPromises = [];
125
+ for (const c of analyseCalls) {
126
+ const args = c.arguments ?? {};
127
+ otherPromises.push(
128
+ toolAnalyse(provider, args).then(
129
+ (p) => formatAgentToolOutput("analyse", args, p, { isError: false }),
130
+ (err) => formatAgentToolOutput("analyse", args, String(err), { isError: true })
131
+ )
132
+ );
133
+ }
134
+ for (const c of readCalls) {
135
+ const args = c.arguments ?? {};
136
+ otherPromises.push(
137
+ toolRead(provider, args).then(
138
+ (p) => formatAgentToolOutput("read", args, p, { isError: false }),
139
+ (err) => formatAgentToolOutput("read", args, String(err), { isError: true })
140
+ )
141
+ );
142
+ }
143
+ const otherResults = await Promise.all(otherPromises);
144
+ formatted.push(...otherResults);
145
+ for (const c of grepCalls) {
146
+ const args = c.arguments ?? {};
147
+ try {
148
+ const grepRes = await provider.grep({ pattern: args.pattern, path: args.path });
149
+ const rawOutput = Array.isArray(grepRes.lines) ? grepRes.lines.join("\n") : "";
150
+ const newMatches = parseAndFilterGrepOutput(rawOutput, grepState);
151
+ let formattedPayload = formatTurnGrepOutput(newMatches);
152
+ if (formattedPayload === "No new matches found.") {
153
+ formattedPayload = "no new matches";
154
+ }
155
+ formatted.push(formatAgentToolOutput("grep", args, formattedPayload, { isError: false }));
156
+ } catch (err) {
157
+ formatted.push(formatAgentToolOutput("grep", args, String(err), { isError: true }));
158
+ }
159
+ }
160
+ if (formatted.length > 0) {
161
+ messages.push({ role: "user", content: formatted.join("\n") });
162
+ }
163
+ if (finishCalls.length) {
164
+ const fc = finishCalls[0];
165
+ const files = fc.arguments?.files ?? [];
166
+ finishMeta = { files };
167
+ terminationReason = "completed";
168
+ break;
169
+ }
170
+ }
171
+ if (terminationReason !== "completed" || !finishMeta) {
172
+ return { terminationReason, messages, errors };
173
+ }
174
+ const parts = ["Relevant context found:"];
175
+ for (const f of finishMeta.files) {
176
+ const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(", ");
177
+ parts.push(`- ${f.path}: ${ranges}`);
178
+ }
179
+ const payload = parts.join("\n");
180
+ const resolved = await readFinishFiles(
181
+ repoRoot,
182
+ finishMeta.files,
183
+ async (p, s, e) => {
184
+ const rr = await provider.read({ path: p, start: s, end: e });
185
+ return rr.lines.map((l) => {
186
+ const idx = l.indexOf("|");
187
+ return idx >= 0 ? l.slice(idx + 1) : l;
188
+ });
189
+ }
190
+ );
191
+ return {
192
+ terminationReason: "completed",
193
+ messages,
194
+ finish: { payload, metadata: finishMeta, resolved }
195
+ };
196
+ }
197
+
198
+ export {
199
+ runWarpGrep
200
+ };
201
+ //# sourceMappingURL=chunk-EAA7D24N.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/agent/runner.ts"],"sourcesContent":["import { AGENT_CONFIG, DEFAULT_MODEL } from './config.js';\nimport { getSystemPrompt } from './prompt.js';\nimport type { AgentRunResult, ChatMessage, SessionConfig, ToolCall, AgentFinish } from './types.js';\nimport { LLMResponseParser, LLMResponseParseError } from './parser.js';\nimport type { WarpGrepProvider } from '../providers/types.js';\nimport { toolRead } from '../tools/read.js';\nimport { toolAnalyse } from '../tools/analyse.js';\nimport { fetchWithRetry, withTimeout } from '../../utils/resilience.js';\nimport { formatAgentToolOutput } from './formatter.js';\nimport { GrepState, parseAndFilterGrepOutput, formatTurnGrepOutput } from './grep_helpers.js';\nimport { readFinishFiles } from '../tools/finish.js';\nimport path from 'path';\nimport fs from 'fs/promises';\n\ntype EventName =\n | 'initial_state'\n | 'round_start'\n | 'round_end'\n | 'finish'\n | 'error';\n\nexport type EventCallback = (name: EventName, payload: Record<string, unknown>) => void;\n\nconst parser = new LLMResponseParser();\n\nasync function buildInitialState(repoRoot: string, query: string): Promise<string> {\n // Summarize top-level directories and file counts\n try {\n const entries = await fs.readdir(repoRoot, { withFileTypes: true });\n const dirs = entries.filter(e => e.isDirectory()).map(d => d.name).slice(0, 50);\n const files = entries.filter(e => e.isFile()).map(f => f.name).slice(0, 50);\n const parts = [\n `<repo_root>${repoRoot}</repo_root>`,\n `<top_dirs>${dirs.join(', ')}</top_dirs>`,\n `<top_files>${files.join(', ')}</top_files>`,\n ];\n return parts.join('\\n');\n } catch {\n return `<repo_root>${repoRoot}</repo_root>`;\n }\n}\n\nfunction formatAssistantToolBlock(name: string, args: Record<string, unknown>, payload: string, isError = false): string {\n const argStr = Object.entries(args)\n .map(([k, v]) => `${k}=${JSON.stringify(v)}`)\n .join(' ');\n const prefix = isError ? 'error' : 'result';\n return `<${prefix} name=\"${name}\" ${argStr}>\\n${payload}\\n</${prefix}>`;\n}\n\nasync function callModel(messages: ChatMessage[], model: string, apiKey?: string): Promise<string> {\n const api = 'https://api.morphllm.com/v1/chat/completions';\n const fetchPromise = fetchWithRetry(\n api,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey || process.env.MORPH_API_KEY || ''}`,\n },\n body: JSON.stringify({\n model,\n temperature: 0.0,\n max_tokens: 1024,\n messages,\n }),\n },\n {}\n );\n const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, 'morph-warp-grep request timed out');\n if (!resp.ok) {\n const t = await resp.text();\n throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);\n }\n const data = await resp.json();\n const content = data?.choices?.[0]?.message?.content;\n if (!content || typeof content !== 'string') {\n throw new Error('Invalid response from model');\n }\n return content;\n}\n\nexport async function runWarpGrep(config: SessionConfig & { provider: WarpGrepProvider }): Promise<AgentRunResult> {\n const repoRoot = path.resolve(config.repoRoot || process.cwd());\n const messages: ChatMessage[] = [];\n\n // system\n const systemMessage = { role: 'system' as const, content: getSystemPrompt() };\n messages.push(systemMessage);\n // user query\n const queryContent = `<query>${config.query}</query>`;\n messages.push({ role: 'user', content: queryContent });\n // initial state\n const initialState = await buildInitialState(repoRoot, config.query);\n messages.push({ role: 'user', content: initialState });\n\n const maxRounds = AGENT_CONFIG.MAX_ROUNDS;\n const model = config.model || DEFAULT_MODEL;\n const provider = config.provider;\n const errors: Array<{ message: string }> = [];\n const grepState = new GrepState();\n\n let finishMeta: AgentFinish | undefined;\n let terminationReason: AgentRunResult['terminationReason'] = 'terminated';\n\n for (let round = 1; round <= maxRounds; round += 1) {\n // call model\n const assistantContent = await callModel(messages, model, config.apiKey).catch((e: unknown) => {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n return '';\n });\n if (!assistantContent) break;\n messages.push({ role: 'assistant', content: assistantContent });\n\n // parse tool calls\n let toolCalls: ToolCall[] = [];\n try {\n toolCalls = parser.parse(assistantContent);\n } catch (e) {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n terminationReason = 'terminated';\n break;\n }\n if (toolCalls.length === 0) {\n errors.push({ message: 'No tool calls produced by the model.' });\n terminationReason = 'terminated';\n break;\n }\n\n const finishCalls = toolCalls.filter(c => c.name === 'finish');\n const grepCalls = toolCalls.filter(c => c.name === 'grep');\n const analyseCalls = toolCalls.filter(c => c.name === 'analyse');\n const readCalls = toolCalls.filter(c => c.name === 'read');\n\n const formatted: string[] = [];\n\n // Execute non-grep tools in parallel\n const otherPromises: Array<Promise<string>> = [];\n for (const c of analyseCalls) {\n const args = (c.arguments ?? {}) as { path: string; pattern?: string | null };\n otherPromises.push(\n toolAnalyse(provider, args).then(\n p => formatAgentToolOutput('analyse', args, p, { isError: false }),\n err => formatAgentToolOutput('analyse', args, String(err), { isError: true })\n )\n );\n }\n for (const c of readCalls) {\n const args = (c.arguments ?? {}) as { path: string; start?: number; end?: number };\n otherPromises.push(\n toolRead(provider, args).then(\n p => formatAgentToolOutput('read', args, p, { isError: false }),\n err => formatAgentToolOutput('read', args, String(err), { isError: true })\n )\n );\n }\n const otherResults = await Promise.all(otherPromises);\n formatted.push(...otherResults);\n\n // Execute grep calls sequentially like MCP runner to keep outputs compact\n for (const c of grepCalls) {\n const args = (c.arguments ?? {}) as { pattern: string; path: string };\n try {\n const grepRes = await provider.grep({ pattern: args.pattern, path: args.path });\n const rawOutput = Array.isArray(grepRes.lines) ? grepRes.lines.join('\\n') : '';\n const newMatches = parseAndFilterGrepOutput(rawOutput, grepState);\n let formattedPayload = formatTurnGrepOutput(newMatches);\n if (formattedPayload === \"No new matches found.\") {\n formattedPayload = \"no new matches\";\n }\n formatted.push(formatAgentToolOutput('grep', args, formattedPayload, { isError: false }));\n } catch (err) {\n formatted.push(formatAgentToolOutput('grep', args, String(err), { isError: true }));\n }\n }\n\n if (formatted.length > 0) {\n messages.push({ role: 'user', content: formatted.join('\\n') });\n }\n\n if (finishCalls.length) {\n const fc = finishCalls[0];\n const files = ((fc.arguments as any)?.files ?? []) as AgentFinish['files'];\n finishMeta = { files };\n terminationReason = 'completed';\n break;\n }\n }\n\n if (terminationReason !== 'completed' || !finishMeta) {\n return { terminationReason, messages, errors };\n }\n\n // Build finish payload\n const parts: string[] = ['Relevant context found:'];\n for (const f of finishMeta.files) {\n const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(', ');\n parts.push(`- ${f.path}: ${ranges}`);\n }\n const payload = parts.join('\\n');\n\n // Resolve file contents for returned ranges\n const resolved = await readFinishFiles(\n repoRoot,\n finishMeta.files,\n async (p: string, s: number, e: number) => {\n const rr = await provider.read({ path: p, start: s, end: e });\n // rr.lines are \"line|content\" → strip the \"line|\" prefix\n return rr.lines.map(l => {\n const idx = l.indexOf('|');\n return idx >= 0 ? l.slice(idx + 1) : l;\n });\n }\n );\n\n return {\n terminationReason: 'completed',\n messages,\n finish: { payload, metadata: finishMeta, resolved },\n };\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,OAAO,UAAU;AACjB,OAAO,QAAQ;AAWf,IAAM,SAAS,IAAI,kBAAkB;AAErC,eAAe,kBAAkB,UAAkB,OAAgC;AAEjF,MAAI;AACF,UAAM,UAAU,MAAM,GAAG,QAAQ,UAAU,EAAE,eAAe,KAAK,CAAC;AAClE,UAAM,OAAO,QAAQ,OAAO,OAAK,EAAE,YAAY,CAAC,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AAC9E,UAAM,QAAQ,QAAQ,OAAO,OAAK,EAAE,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AAC1E,UAAM,QAAQ;AAAA,MACZ,cAAc,QAAQ;AAAA,MACtB,aAAa,KAAK,KAAK,IAAI,CAAC;AAAA,MAC5B,cAAc,MAAM,KAAK,IAAI,CAAC;AAAA,IAChC;AACA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB,QAAQ;AACN,WAAO,cAAc,QAAQ;AAAA,EAC/B;AACF;AAUA,eAAe,UAAU,UAAyB,OAAe,QAAkC;AACjG,QAAM,MAAM;AACZ,QAAM,eAAe;AAAA,IACnB;AAAA,IACA;AAAA,MACE,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,UAAU,QAAQ,IAAI,iBAAiB,EAAE;AAAA,MACpE;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,aAAa;AAAA,QACb,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC;AAAA,EACH;AACA,QAAM,OAAO,MAAM,YAAY,cAAc,aAAa,YAAY,mCAAmC;AACzG,MAAI,CAAC,KAAK,IAAI;AACZ,UAAM,IAAI,MAAM,KAAK,KAAK;AAC1B,UAAM,IAAI,MAAM,yBAAyB,KAAK,MAAM,KAAK,CAAC,EAAE;AAAA,EAC9D;AACA,QAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,QAAM,UAAU,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7C,MAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,SAAO;AACT;AAEA,eAAsB,YAAY,QAAiF;AACjH,QAAM,WAAW,KAAK,QAAQ,OAAO,YAAY,QAAQ,IAAI,CAAC;AAC9D,QAAM,WAA0B,CAAC;AAGjC,QAAM,gBAAgB,EAAE,MAAM,UAAmB,SAAS,gBAAgB,EAAE;AAC5E,WAAS,KAAK,aAAa;AAE3B,QAAM,eAAe,UAAU,OAAO,KAAK;AAC3C,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,eAAe,MAAM,kBAAkB,UAAU,OAAO,KAAK;AACnE,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,YAAY,aAAa;AAC/B,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,WAAW,OAAO;AACxB,QAAM,SAAqC,CAAC;AAC5C,QAAM,YAAY,IAAI,UAAU;AAEhC,MAAI;AACJ,MAAI,oBAAyD;AAE7D,WAAS,QAAQ,GAAG,SAAS,WAAW,SAAS,GAAG;AAElD,UAAM,mBAAmB,MAAM,UAAU,UAAU,OAAO,OAAO,MAAM,EAAE,MAAM,CAAC,MAAe;AAC7F,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,aAAO;AAAA,IACT,CAAC;AACD,QAAI,CAAC,iBAAkB;AACvB,aAAS,KAAK,EAAE,MAAM,aAAa,SAAS,iBAAiB,CAAC;AAG9D,QAAI,YAAwB,CAAC;AAC7B,QAAI;AACF,kBAAY,OAAO,MAAM,gBAAgB;AAAA,IAC3C,SAAS,GAAG;AACV,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,0BAAoB;AACpB;AAAA,IACF;AACA,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,KAAK,EAAE,SAAS,uCAAuC,CAAC;AAC/D,0BAAoB;AACpB;AAAA,IACF;AAEA,UAAM,cAAc,UAAU,OAAO,OAAK,EAAE,SAAS,QAAQ;AAC7D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,eAAe,UAAU,OAAO,OAAK,EAAE,SAAS,SAAS;AAC/D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AAEzD,UAAM,YAAsB,CAAC;AAG7B,UAAM,gBAAwC,CAAC;AAC/C,eAAW,KAAK,cAAc;AAC5B,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,oBAAc;AAAA,QACZ,YAAY,UAAU,IAAI,EAAE;AAAA,UAC1B,OAAK,sBAAsB,WAAW,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UACjE,SAAO,sBAAsB,WAAW,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC9E;AAAA,MACF;AAAA,IACF;AACA,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,oBAAc;AAAA,QACZ,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,OAAK,sBAAsB,QAAQ,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9D,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AACA,UAAM,eAAe,MAAM,QAAQ,IAAI,aAAa;AACpD,cAAU,KAAK,GAAG,YAAY;AAG9B,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,UAAI;AACF,cAAM,UAAU,MAAM,SAAS,KAAK,EAAE,SAAS,KAAK,SAAS,MAAM,KAAK,KAAK,CAAC;AAC9E,cAAM,YAAY,MAAM,QAAQ,QAAQ,KAAK,IAAI,QAAQ,MAAM,KAAK,IAAI,IAAI;AAC5E,cAAM,aAAa,yBAAyB,WAAW,SAAS;AAChE,YAAI,mBAAmB,qBAAqB,UAAU;AACtD,YAAI,qBAAqB,yBAAyB;AAChD,6BAAmB;AAAA,QACrB;AACA,kBAAU,KAAK,sBAAsB,QAAQ,MAAM,kBAAkB,EAAE,SAAS,MAAM,CAAC,CAAC;AAAA,MAC1F,SAAS,KAAK;AACZ,kBAAU,KAAK,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC,CAAC;AAAA,MACpF;AAAA,IACF;AAEA,QAAI,UAAU,SAAS,GAAG;AACxB,eAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,UAAU,KAAK,IAAI,EAAE,CAAC;AAAA,IAC/D;AAEA,QAAI,YAAY,QAAQ;AACtB,YAAM,KAAK,YAAY,CAAC;AACxB,YAAM,QAAU,GAAG,WAAmB,SAAS,CAAC;AAChD,mBAAa,EAAE,MAAM;AACrB,0BAAoB;AACpB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,sBAAsB,eAAe,CAAC,YAAY;AACpD,WAAO,EAAE,mBAAmB,UAAU,OAAO;AAAA,EAC/C;AAGA,QAAM,QAAkB,CAAC,yBAAyB;AAClD,aAAW,KAAK,WAAW,OAAO;AAChC,UAAM,SAAS,EAAE,MAAM,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,IAAI;AAC7D,UAAM,KAAK,KAAK,EAAE,IAAI,KAAK,MAAM,EAAE;AAAA,EACrC;AACA,QAAM,UAAU,MAAM,KAAK,IAAI;AAG/B,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA,WAAW;AAAA,IACX,OAAO,GAAW,GAAW,MAAc;AACzC,YAAM,KAAK,MAAM,SAAS,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,KAAK,EAAE,CAAC;AAE5D,aAAO,GAAG,MAAM,IAAI,OAAK;AACvB,cAAM,MAAM,EAAE,QAAQ,GAAG;AACzB,eAAO,OAAO,IAAI,EAAE,MAAM,MAAM,CAAC,IAAI;AAAA,MACvC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBAAmB;AAAA,IACnB;AAAA,IACA,QAAQ,EAAE,SAAS,UAAU,YAAY,SAAS;AAAA,EACpD;AACF;","names":[]}
@@ -0,0 +1,44 @@
1
+ // tools/warp_grep/tools/finish.ts
2
+ function normalizeFinishFiles(files) {
3
+ return files.map((f) => {
4
+ const merged = mergeRanges(f.lines);
5
+ return { path: f.path, lines: merged };
6
+ });
7
+ }
8
+ async function readFinishFiles(repoRoot, files, reader) {
9
+ const out = [];
10
+ for (const f of files) {
11
+ const ranges = mergeRanges(f.lines);
12
+ const chunks = [];
13
+ for (const [s, e] of ranges) {
14
+ const lines = await reader(f.path, s, e);
15
+ chunks.push(lines.join("\n"));
16
+ }
17
+ out.push({ path: f.path, ranges, content: chunks.join("\n") });
18
+ }
19
+ return out;
20
+ }
21
+ function mergeRanges(ranges) {
22
+ if (!ranges.length) return [];
23
+ const sorted = [...ranges].sort((a, b) => a[0] - b[0]);
24
+ const merged = [];
25
+ let [cs, ce] = sorted[0];
26
+ for (let i = 1; i < sorted.length; i++) {
27
+ const [s, e] = sorted[i];
28
+ if (s <= ce + 1) {
29
+ ce = Math.max(ce, e);
30
+ } else {
31
+ merged.push([cs, ce]);
32
+ cs = s;
33
+ ce = e;
34
+ }
35
+ }
36
+ merged.push([cs, ce]);
37
+ return merged;
38
+ }
39
+
40
+ export {
41
+ normalizeFinishFiles,
42
+ readFinishFiles
43
+ };
44
+ //# sourceMappingURL=chunk-EK7OQPWD.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/tools/finish.ts"],"sourcesContent":["import type { FinishFileSpec } from '../agent/types.js';\nimport fs from 'fs/promises';\n\nexport function normalizeFinishFiles(files: FinishFileSpec[]): FinishFileSpec[] {\n return files.map((f) => {\n const merged = mergeRanges(f.lines);\n return { path: f.path, lines: merged };\n });\n}\n\nexport async function readFinishFiles(\n repoRoot: string,\n files: FinishFileSpec[],\n reader: (path: string, start: number, end: number) => Promise<string[]>\n): Promise<{ path: string; ranges: Array<[number, number]>; content: string }[]> {\n const out: { path: string; ranges: Array<[number, number]>; content: string }[] = [];\n for (const f of files) {\n const ranges = mergeRanges(f.lines);\n const chunks: string[] = [];\n for (const [s, e] of ranges) {\n const lines = await reader(f.path, s, e);\n chunks.push(lines.join('\\n'));\n }\n out.push({ path: f.path, ranges, content: chunks.join('\\n') });\n }\n return out;\n}\n\nfunction mergeRanges(ranges: Array<[number, number]>): Array<[number, number]> {\n if (!ranges.length) return [];\n const sorted = [...ranges].sort((a, b) => a[0] - b[0]);\n const merged: Array<[number, number]> = [];\n let [cs, ce] = sorted[0];\n for (let i = 1; i < sorted.length; i++) {\n const [s, e] = sorted[i];\n if (s <= ce + 1) {\n ce = Math.max(ce, e);\n } else {\n merged.push([cs, ce]);\n cs = s;\n ce = e;\n }\n }\n merged.push([cs, ce]);\n return merged;\n}\n\n\n"],"mappings":";AAGO,SAAS,qBAAqB,OAA2C;AAC9E,SAAO,MAAM,IAAI,CAAC,MAAM;AACtB,UAAM,SAAS,YAAY,EAAE,KAAK;AAClC,WAAO,EAAE,MAAM,EAAE,MAAM,OAAO,OAAO;AAAA,EACvC,CAAC;AACH;AAEA,eAAsB,gBACpB,UACA,OACA,QAC+E;AAC/E,QAAM,MAA4E,CAAC;AACnF,aAAW,KAAK,OAAO;AACrB,UAAM,SAAS,YAAY,EAAE,KAAK;AAClC,UAAM,SAAmB,CAAC;AAC1B,eAAW,CAAC,GAAG,CAAC,KAAK,QAAQ;AAC3B,YAAM,QAAQ,MAAM,OAAO,EAAE,MAAM,GAAG,CAAC;AACvC,aAAO,KAAK,MAAM,KAAK,IAAI,CAAC;AAAA,IAC9B;AACA,QAAI,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,SAAS,OAAO,KAAK,IAAI,EAAE,CAAC;AAAA,EAC/D;AACA,SAAO;AACT;AAEA,SAAS,YAAY,QAA0D;AAC7E,MAAI,CAAC,OAAO,OAAQ,QAAO,CAAC;AAC5B,QAAM,SAAS,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AACrD,QAAM,SAAkC,CAAC;AACzC,MAAI,CAAC,IAAI,EAAE,IAAI,OAAO,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,GAAG,CAAC,IAAI,OAAO,CAAC;AACvB,QAAI,KAAK,KAAK,GAAG;AACf,WAAK,KAAK,IAAI,IAAI,CAAC;AAAA,IACrB,OAAO;AACL,aAAO,KAAK,CAAC,IAAI,EAAE,CAAC;AACpB,WAAK;AACL,WAAK;AAAA,IACP;AAAA,EACF;AACA,SAAO,KAAK,CAAC,IAAI,EAAE,CAAC;AACpB,SAAO;AACT;","names":[]}
@@ -0,0 +1,11 @@
1
+ // tools/warp_grep/utils/files.ts
2
+ import fs from "fs/promises";
3
+ async function readAllLines(filePath) {
4
+ const content = await fs.readFile(filePath, "utf8");
5
+ return content.split(/\r?\n/);
6
+ }
7
+
8
+ export {
9
+ readAllLines
10
+ };
11
+ //# sourceMappingURL=chunk-G2RSY56Q.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/utils/files.ts"],"sourcesContent":["import fs from 'fs/promises';\n\nexport async function readAllLines(filePath: string): Promise<string[]> {\n const content = await fs.readFile(filePath, 'utf8');\n // Preserve newlines; split keeping consistency\n return content.split(/\\r?\\n/);\n}\n\n\n"],"mappings":";AAAA,OAAO,QAAQ;AAEf,eAAsB,aAAa,UAAqC;AACtE,QAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAElD,SAAO,QAAQ,MAAM,OAAO;AAC9B;","names":[]}
@@ -0,0 +1,140 @@
1
+ // tools/warp_grep/agent/parser.ts
2
+ var LLMResponseParseError = class extends Error {
3
+ constructor(message) {
4
+ super(message);
5
+ this.name = "LLMResponseParseError";
6
+ }
7
+ };
8
+ var LLMResponseParser = class {
9
+ finishSpecSplitRe = /,(?=[^,\s]+:)/;
10
+ parse(text) {
11
+ if (typeof text !== "string") {
12
+ throw new TypeError("Command text must be a string.");
13
+ }
14
+ const lines = text.split(/\r?\n/).map((l) => l.trim());
15
+ const commands = [];
16
+ let finishAccumulator = null;
17
+ lines.forEach((line, idx) => {
18
+ if (!line || line.startsWith("#")) return;
19
+ const ctx = { lineNumber: idx + 1, raw: line };
20
+ const parts = this.splitLine(line, ctx);
21
+ if (parts.length === 0) return;
22
+ const cmd = parts[0];
23
+ switch (cmd) {
24
+ case "analyse":
25
+ this.handleAnalyse(parts, ctx, commands);
26
+ break;
27
+ case "grep":
28
+ this.handleGrep(parts, ctx, commands);
29
+ break;
30
+ case "read":
31
+ this.handleRead(parts, ctx, commands);
32
+ break;
33
+ case "finish":
34
+ finishAccumulator = this.handleFinish(parts, ctx, finishAccumulator);
35
+ break;
36
+ default:
37
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unsupported command '${cmd}'`);
38
+ }
39
+ });
40
+ if (finishAccumulator) {
41
+ const map = finishAccumulator;
42
+ const entries = [...map.entries()];
43
+ const filesPayload = entries.map(([path, ranges]) => ({
44
+ path,
45
+ lines: [...ranges].sort((a, b) => a[0] - b[0])
46
+ }));
47
+ commands.push({ name: "finish", arguments: { files: filesPayload } });
48
+ }
49
+ return commands;
50
+ }
51
+ splitLine(line, ctx) {
52
+ try {
53
+ const parts = [];
54
+ let current = "";
55
+ let inSingle = false;
56
+ for (let i = 0; i < line.length; i++) {
57
+ const ch = line[i];
58
+ if (ch === "'" && line[i - 1] !== "\\") {
59
+ inSingle = !inSingle;
60
+ current += ch;
61
+ } else if (!inSingle && /\s/.test(ch)) {
62
+ if (current) {
63
+ parts.push(current);
64
+ current = "";
65
+ }
66
+ } else {
67
+ current += ch;
68
+ }
69
+ }
70
+ if (current) parts.push(current);
71
+ return parts;
72
+ } catch {
73
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unable to parse line.`);
74
+ }
75
+ }
76
+ handleAnalyse(parts, ctx, commands) {
77
+ if (parts.length < 2) {
78
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: analyse requires <path>`);
79
+ }
80
+ const path = parts[1];
81
+ const pattern = parts[2]?.replace(/^"|"$/g, "") ?? null;
82
+ commands.push({ name: "analyse", arguments: { path, pattern } });
83
+ }
84
+ // no glob tool in MCP
85
+ handleGrep(parts, ctx, commands) {
86
+ if (parts.length < 3) {
87
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep requires '<pattern>' and <path>`);
88
+ }
89
+ const pat = parts[1];
90
+ if (!pat.startsWith("'") || !pat.endsWith("'")) {
91
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep pattern must be single-quoted`);
92
+ }
93
+ commands.push({ name: "grep", arguments: { pattern: pat.slice(1, -1), path: parts[2] } });
94
+ }
95
+ handleRead(parts, ctx, commands) {
96
+ if (parts.length < 2) {
97
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: read requires <path> or <path>:<start-end>`);
98
+ }
99
+ const spec = parts[1];
100
+ const rangeIdx = spec.indexOf(":");
101
+ if (rangeIdx === -1) {
102
+ commands.push({ name: "read", arguments: { path: spec } });
103
+ return;
104
+ }
105
+ const path = spec.slice(0, rangeIdx);
106
+ const range = spec.slice(rangeIdx + 1);
107
+ const [s, e] = range.split("-").map((v) => parseInt(v, 10));
108
+ if (!Number.isFinite(s) || !Number.isFinite(e)) {
109
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid read range '${range}'`);
110
+ }
111
+ commands.push({ name: "read", arguments: { path, start: s, end: e } });
112
+ }
113
+ handleFinish(parts, ctx, acc) {
114
+ const map = acc ?? /* @__PURE__ */ new Map();
115
+ const args = parts.slice(1);
116
+ for (const token of args) {
117
+ const [path, rangesText] = token.split(":", 2);
118
+ if (!path || !rangesText) {
119
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid finish token '${token}'`);
120
+ }
121
+ const rangeSpecs = rangesText.split(",").filter(Boolean);
122
+ for (const spec of rangeSpecs) {
123
+ const [s, e] = spec.split("-").map((v) => parseInt(v, 10));
124
+ if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {
125
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid range '${spec}'`);
126
+ }
127
+ const arr = map.get(path) ?? [];
128
+ arr.push([s, e]);
129
+ map.set(path, arr);
130
+ }
131
+ }
132
+ return map;
133
+ }
134
+ };
135
+
136
+ export {
137
+ LLMResponseParseError,
138
+ LLMResponseParser
139
+ };
140
+ //# sourceMappingURL=chunk-GTOXMAF2.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/agent/parser.ts"],"sourcesContent":["// Parses assistant lines into structured tool calls\nimport type { ToolCall } from './types.js';\n\nexport class LLMResponseParseError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'LLMResponseParseError';\n }\n}\n\ntype LineContext = { lineNumber: number; raw: string };\n\nexport class LLMResponseParser {\n private readonly finishSpecSplitRe = /,(?=[^,\\s]+:)/;\n\n parse(text: string): ToolCall[] {\n if (typeof text !== 'string') {\n throw new TypeError('Command text must be a string.');\n }\n const lines = text.split(/\\r?\\n/).map(l => l.trim());\n const commands: ToolCall[] = [];\n let finishAccumulator: Map<string, number[][]> | null = null;\n\n lines.forEach((line, idx) => {\n if (!line || line.startsWith('#')) return;\n const ctx: LineContext = { lineNumber: idx + 1, raw: line };\n const parts = this.splitLine(line, ctx);\n if (parts.length === 0) return;\n const cmd = parts[0];\n switch (cmd) {\n case 'analyse':\n this.handleAnalyse(parts, ctx, commands);\n break;\n case 'grep':\n this.handleGrep(parts, ctx, commands);\n break;\n case 'read':\n this.handleRead(parts, ctx, commands);\n break;\n case 'finish':\n finishAccumulator = this.handleFinish(parts, ctx, finishAccumulator);\n break;\n default:\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unsupported command '${cmd}'`);\n }\n });\n\n if (finishAccumulator) {\n const map = finishAccumulator as Map<string, number[][]>;\n const entries = [...map.entries()];\n const filesPayload = entries.map(([path, ranges]) => ({\n path,\n lines: [...ranges].sort((a, b) => a[0] - b[0]) as Array<[number, number]>,\n }));\n commands.push({ name: 'finish', arguments: { files: filesPayload } });\n }\n return commands;\n }\n\n private splitLine(line: string, ctx: LineContext): string[] {\n try {\n // Split by whitespace but keep quoted blocks as one\n const parts: string[] = [];\n let current = '';\n let inSingle = false;\n for (let i = 0; i < line.length; i++) {\n const ch = line[i];\n if (ch === \"'\" && line[i - 1] !== '\\\\') {\n inSingle = !inSingle;\n current += ch;\n } else if (!inSingle && /\\s/.test(ch)) {\n if (current) {\n parts.push(current);\n current = '';\n }\n } else {\n current += ch;\n }\n }\n if (current) parts.push(current);\n return parts;\n } catch {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unable to parse line.`);\n }\n }\n\n private handleAnalyse(parts: string[], ctx: LineContext, commands: ToolCall[]) {\n // analyse <path> [pattern]\n if (parts.length < 2) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: analyse requires <path>`);\n }\n const path = parts[1];\n const pattern = parts[2]?.replace(/^\"|\"$/g, '') ?? null;\n commands.push({ name: 'analyse', arguments: { path, pattern } });\n }\n\n // no glob tool in MCP\n\n private handleGrep(parts: string[], ctx: LineContext, commands: ToolCall[]) {\n // grep '<pattern>' <path>\n if (parts.length < 3) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep requires '<pattern>' and <path>`);\n }\n const pat = parts[1];\n if (!pat.startsWith(\"'\") || !pat.endsWith(\"'\")) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep pattern must be single-quoted`);\n }\n commands.push({ name: 'grep', arguments: { pattern: pat.slice(1, -1), path: parts[2] } });\n }\n\n private handleRead(parts: string[], ctx: LineContext, commands: ToolCall[]) {\n // read <path>[:start-end]\n if (parts.length < 2) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: read requires <path> or <path>:<start-end>`);\n }\n const spec = parts[1];\n const rangeIdx = spec.indexOf(':');\n if (rangeIdx === -1) {\n commands.push({ name: 'read', arguments: { path: spec } });\n return;\n }\n const path = spec.slice(0, rangeIdx);\n const range = spec.slice(rangeIdx + 1);\n const [s, e] = range.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e)) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid read range '${range}'`);\n }\n commands.push({ name: 'read', arguments: { path, start: s, end: e } });\n }\n\n private handleFinish(parts: string[], ctx: LineContext, acc: Map<string, number[][]> | null) {\n // finish file1:1-10,20-30 file2:5-7\n const map = acc ?? new Map<string, number[][]>();\n const args = parts.slice(1);\n for (const token of args) {\n const [path, rangesText] = token.split(':', 2);\n if (!path || !rangesText) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid finish token '${token}'`);\n }\n const rangeSpecs = rangesText.split(',').filter(Boolean);\n for (const spec of rangeSpecs) {\n const [s, e] = spec.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid range '${spec}'`);\n }\n const arr = map.get(path) ?? [];\n arr.push([s, e]);\n map.set(path, arr);\n }\n }\n return map;\n }\n}\n\n\n"],"mappings":";AAGO,IAAM,wBAAN,cAAoC,MAAM;AAAA,EAC/C,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAIO,IAAM,oBAAN,MAAwB;AAAA,EACZ,oBAAoB;AAAA,EAErC,MAAM,MAA0B;AAC9B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,UAAU,gCAAgC;AAAA,IACtD;AACA,UAAM,QAAQ,KAAK,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AACnD,UAAM,WAAuB,CAAC;AAC9B,QAAI,oBAAoD;AAExD,UAAM,QAAQ,CAAC,MAAM,QAAQ;AAC3B,UAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AACnC,YAAM,MAAmB,EAAE,YAAY,MAAM,GAAG,KAAK,KAAK;AAC1D,YAAM,QAAQ,KAAK,UAAU,MAAM,GAAG;AACtC,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,MAAM,MAAM,CAAC;AACnB,cAAQ,KAAK;AAAA,QACX,KAAK;AACH,eAAK,cAAc,OAAO,KAAK,QAAQ;AACvC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,KAAK,QAAQ;AACpC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,KAAK,QAAQ;AACpC;AAAA,QACF,KAAK;AACH,8BAAoB,KAAK,aAAa,OAAO,KAAK,iBAAiB;AACnE;AAAA,QACF;AACE,gBAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,0BAA0B,GAAG,GAAG;AAAA,MAC1F;AAAA,IACF,CAAC;AAED,QAAI,mBAAmB;AACrB,YAAM,MAAM;AACZ,YAAM,UAAU,CAAC,GAAG,IAAI,QAAQ,CAAC;AACjC,YAAM,eAAe,QAAQ,IAAI,CAAC,CAAC,MAAM,MAAM,OAAO;AAAA,QACpD;AAAA,QACA,OAAO,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAAA,MAC/C,EAAE;AACF,eAAS,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,OAAO,aAAa,EAAE,CAAC;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAc,KAA4B;AAC1D,QAAI;AAEF,YAAM,QAAkB,CAAC;AACzB,UAAI,UAAU;AACd,UAAI,WAAW;AACf,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAM,KAAK,KAAK,CAAC;AACjB,YAAI,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,MAAM;AACtC,qBAAW,CAAC;AACZ,qBAAW;AAAA,QACb,WAAW,CAAC,YAAY,KAAK,KAAK,EAAE,GAAG;AACrC,cAAI,SAAS;AACX,kBAAM,KAAK,OAAO;AAClB,sBAAU;AAAA,UACZ;AAAA,QACF,OAAO;AACL,qBAAW;AAAA,QACb;AAAA,MACF;AACA,UAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,aAAO;AAAA,IACT,QAAQ;AACN,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,yBAAyB;AAAA,IACjF;AAAA,EACF;AAAA,EAEQ,cAAc,OAAiB,KAAkB,UAAsB;AAE7E,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,2BAA2B;AAAA,IACnF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,MAAM,CAAC,GAAG,QAAQ,UAAU,EAAE,KAAK;AACnD,aAAS,KAAK,EAAE,MAAM,WAAW,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,EACjE;AAAA;AAAA,EAIQ,WAAW,OAAiB,KAAkB,UAAsB;AAE1E,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,wCAAwC;AAAA,IAChG;AACA,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,CAAC,IAAI,WAAW,GAAG,KAAK,CAAC,IAAI,SAAS,GAAG,GAAG;AAC9C,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,sCAAsC;AAAA,IAC9F;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,SAAS,IAAI,MAAM,GAAG,EAAE,GAAG,MAAM,MAAM,CAAC,EAAE,EAAE,CAAC;AAAA,EAC1F;AAAA,EAEQ,WAAW,OAAiB,KAAkB,UAAsB;AAE1E,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,8CAA8C;AAAA,IACtG;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,QAAI,aAAa,IAAI;AACnB,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,KAAK,EAAE,CAAC;AACzD;AAAA,IACF;AACA,UAAM,OAAO,KAAK,MAAM,GAAG,QAAQ;AACnC,UAAM,QAAQ,KAAK,MAAM,WAAW,CAAC;AACrC,UAAM,CAAC,GAAG,CAAC,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACxD,QAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,GAAG;AAC9C,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,yBAAyB,KAAK,GAAG;AAAA,IACzF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,OAAO,GAAG,KAAK,EAAE,EAAE,CAAC;AAAA,EACvE;AAAA,EAEQ,aAAa,OAAiB,KAAkB,KAAqC;AAE3F,UAAM,MAAM,OAAO,oBAAI,IAAwB;AAC/C,UAAM,OAAO,MAAM,MAAM,CAAC;AAC1B,eAAW,SAAS,MAAM;AACxB,YAAM,CAAC,MAAM,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AAC7C,UAAI,CAAC,QAAQ,CAAC,YAAY;AACxB,cAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,2BAA2B,KAAK,GAAG;AAAA,MAC3F;AACA,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,OAAO,OAAO;AACvD,iBAAW,QAAQ,YAAY;AAC7B,cAAM,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACvD,YAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,KAAK,IAAI,GAAG;AACvD,gBAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,oBAAoB,IAAI,GAAG;AAAA,QACnF;AACA,cAAM,MAAM,IAAI,IAAI,IAAI,KAAK,CAAC;AAC9B,YAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACf,YAAI,IAAI,MAAM,GAAG;AAAA,MACnB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -0,0 +1,85 @@
1
+ // tools/warp_grep/agent/prompt.ts
2
+ var SYSTEM_PROMPT = `You are a code search agent. Your task is to find relevant code snippets based on a search query.
3
+
4
+ <workflow>
5
+ You operate in exactly 3 rounds of tool exploration, followed by a final answer:
6
+
7
+ 1. In each round, you can make MULTIPLE tool calls (up to 8) to search in parallel. All tool results will be returned together after each round.
8
+ 2. After your third round of tool calls, your next turn MUST be a single call to the \`finish\` tool with all the context you have found.
9
+ </workflow>
10
+
11
+ <tool_calling>
12
+ You have tools at your disposal to solve the coding task. Follow these rules regarding tool calls:
13
+
14
+ ### 1. \`analyse\` - Explore Directories
15
+ Explore directory structure in a tree-like format.
16
+ **Syntax:** \`analyse <path> [pattern]\`
17
+ - \`<path>\`: Directory path to analyze (defaults to \`.\`)
18
+ - \`[pattern]\`: Optional regex pattern to filter names
19
+
20
+ For example:
21
+ \`\`\`
22
+ analyse src/api
23
+ analyse . "test"
24
+ \`\`\`
25
+
26
+ ### 2. \`read\` - Read File Contents
27
+ Read entire files or specific line ranges.
28
+ **Syntax:** \`read <path>[:start-end]\`
29
+ - \`<path>\`: File path to read
30
+ - \`[:start-end]\`: Optional 1-based, inclusive line range
31
+
32
+ For example:
33
+ \`\`\`
34
+ read src/main.py
35
+ read src/database/connection.py:10-50
36
+ \`\`\`
37
+
38
+ ### 3. \`grep\` - Search with Regex
39
+ Search for regex patterns across files using ripgrep.
40
+ **Syntax:** \`grep '<pattern>' <path>\`
41
+ - \`'<pattern>'\`: Regex pattern (always wrap in single quotes)
42
+ - \`<path>\`: Directory or file to search (use \`.\` for the repo root)
43
+
44
+ For example:
45
+ \`\`\`
46
+ grep 'create_user' .
47
+ grep 'import.*requests' src/api
48
+ grep 'class\\\\s+AuthService' controllers/auth.py
49
+ \`\`\`
50
+
51
+ ### 4. \`finish\` - Submit Final Answer
52
+ Submit your findings when complete.
53
+ **Syntax:** \`finish <file1:range1,range2...> [file2:range3...]\`
54
+ - Provide file paths with colon-separated, comma-separated line ranges
55
+
56
+ For example:
57
+ \`\`\`
58
+ finish src/api/auth.py:25-50,75-80 src/models/user.py:10-15
59
+ \`\`\`
60
+ </tool_calling>
61
+
62
+ <strategy>
63
+ - Use the \`analyse\`, \`grep\`, and \`read\` tools to gather information about the codebase.
64
+ - Leverage the tools smartly to make full use of their potential
65
+ - Make parallel tool calls within each round to investigate multiple paths or files efficiently
66
+ - Be systematic and thorough within your 3-round limit
67
+ </strategy>
68
+
69
+ <output_format>
70
+ - Only output tool calls themselves
71
+ - Do not include explanatory text, reasoning, or commentary
72
+ - Each tool call should be on its own line
73
+ - After 3 rounds of exploration, call \`finish\` with all relevant code snippets you found
74
+ </output_format>
75
+
76
+ Begin your exploration now to find code relevant to the query.`;
77
+ function getSystemPrompt() {
78
+ return SYSTEM_PROMPT;
79
+ }
80
+
81
+ export {
82
+ SYSTEM_PROMPT,
83
+ getSystemPrompt
84
+ };
85
+ //# sourceMappingURL=chunk-HKZB23U7.js.map