@morphllm/morphsdk 0.2.57 → 0.2.58

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. package/dist/anthropic-CaFUHxBW.d.ts +89 -0
  2. package/dist/{chunk-6X5UOY7B.js → chunk-2CASO3ZO.js} +46 -79
  3. package/dist/chunk-2CASO3ZO.js.map +1 -0
  4. package/dist/chunk-374N3GIA.js +118 -0
  5. package/dist/chunk-374N3GIA.js.map +1 -0
  6. package/dist/chunk-3IQIT6MC.js +65 -0
  7. package/dist/chunk-3IQIT6MC.js.map +1 -0
  8. package/dist/chunk-4VGOBA2J.js +57 -0
  9. package/dist/chunk-4VGOBA2J.js.map +1 -0
  10. package/dist/chunk-527P5X2E.js +98 -0
  11. package/dist/chunk-527P5X2E.js.map +1 -0
  12. package/dist/chunk-6N6ZYZYD.js +74 -0
  13. package/dist/chunk-6N6ZYZYD.js.map +1 -0
  14. package/dist/chunk-6Y5JB4JC.js +195 -0
  15. package/dist/chunk-6Y5JB4JC.js.map +1 -0
  16. package/dist/{chunk-QFIHUCTF.js → chunk-7EIHYJSG.js} +18 -18
  17. package/dist/chunk-7EIHYJSG.js.map +1 -0
  18. package/dist/{chunk-TICMYDII.js → chunk-APP75CBN.js} +33 -16
  19. package/dist/chunk-APP75CBN.js.map +1 -0
  20. package/dist/chunk-ILJ3J5IA.js +72 -0
  21. package/dist/chunk-ILJ3J5IA.js.map +1 -0
  22. package/dist/chunk-ISWL67SF.js +1 -0
  23. package/dist/chunk-KW7OEGZK.js +9 -0
  24. package/dist/chunk-KW7OEGZK.js.map +1 -0
  25. package/dist/chunk-Q5AHGIQO.js +205 -0
  26. package/dist/chunk-Q5AHGIQO.js.map +1 -0
  27. package/dist/{chunk-TJIUA27P.js → chunk-XT5ZO6ES.js} +9 -5
  28. package/dist/chunk-XT5ZO6ES.js.map +1 -0
  29. package/dist/{chunk-LVPVVLTI.js → chunk-YV75OQTE.js} +105 -17
  30. package/dist/chunk-YV75OQTE.js.map +1 -0
  31. package/dist/{chunk-ZJIIICRA.js → chunk-ZO4PPFCZ.js} +60 -29
  32. package/dist/chunk-ZO4PPFCZ.js.map +1 -0
  33. package/dist/{client-CFoR--IU.d.ts → client-CextMMm9.d.ts} +10 -15
  34. package/dist/client.cjs +687 -341
  35. package/dist/client.cjs.map +1 -1
  36. package/dist/client.d.ts +3 -2
  37. package/dist/client.js +14 -14
  38. package/dist/finish-kXAcUJyB.d.ts +33 -0
  39. package/dist/gemini-CE80Pbdy.d.ts +117 -0
  40. package/dist/index.cjs +700 -341
  41. package/dist/index.cjs.map +1 -1
  42. package/dist/index.d.ts +4 -3
  43. package/dist/index.js +16 -15
  44. package/dist/openai-Fvpqln7F.d.ts +89 -0
  45. package/dist/tools/warp_grep/agent/config.cjs +8 -4
  46. package/dist/tools/warp_grep/agent/config.cjs.map +1 -1
  47. package/dist/tools/warp_grep/agent/config.d.ts +7 -2
  48. package/dist/tools/warp_grep/agent/config.js +1 -1
  49. package/dist/tools/warp_grep/agent/formatter.cjs +32 -15
  50. package/dist/tools/warp_grep/agent/formatter.cjs.map +1 -1
  51. package/dist/tools/warp_grep/agent/formatter.d.ts +1 -1
  52. package/dist/tools/warp_grep/agent/formatter.js +1 -1
  53. package/dist/tools/warp_grep/agent/parser.cjs +104 -17
  54. package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
  55. package/dist/tools/warp_grep/agent/parser.d.ts +3 -5
  56. package/dist/tools/warp_grep/agent/parser.js +1 -3
  57. package/dist/tools/warp_grep/agent/prompt.cjs +132 -56
  58. package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -1
  59. package/dist/tools/warp_grep/agent/prompt.d.ts +1 -1
  60. package/dist/tools/warp_grep/agent/prompt.js +1 -1
  61. package/dist/tools/warp_grep/agent/runner.cjs +459 -192
  62. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
  63. package/dist/tools/warp_grep/agent/runner.d.ts +1 -0
  64. package/dist/tools/warp_grep/agent/runner.js +6 -8
  65. package/dist/tools/warp_grep/agent/types.cjs.map +1 -1
  66. package/dist/tools/warp_grep/agent/types.d.ts +9 -2
  67. package/dist/tools/warp_grep/anthropic.cjs +650 -260
  68. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  69. package/dist/tools/warp_grep/anthropic.d.ts +4 -74
  70. package/dist/tools/warp_grep/anthropic.js +13 -15
  71. package/dist/tools/warp_grep/client.cjs +1593 -0
  72. package/dist/tools/warp_grep/client.cjs.map +1 -0
  73. package/dist/tools/warp_grep/client.d.ts +87 -0
  74. package/dist/tools/warp_grep/client.js +26 -0
  75. package/dist/tools/warp_grep/gemini.cjs +1587 -0
  76. package/dist/tools/warp_grep/gemini.cjs.map +1 -0
  77. package/dist/tools/warp_grep/gemini.d.ts +7 -0
  78. package/dist/tools/warp_grep/gemini.js +34 -0
  79. package/dist/tools/warp_grep/harness.cjs +556 -220
  80. package/dist/tools/warp_grep/harness.cjs.map +1 -1
  81. package/dist/tools/warp_grep/harness.d.ts +50 -119
  82. package/dist/tools/warp_grep/harness.js +33 -41
  83. package/dist/tools/warp_grep/harness.js.map +1 -1
  84. package/dist/tools/warp_grep/index.cjs +812 -346
  85. package/dist/tools/warp_grep/index.cjs.map +1 -1
  86. package/dist/tools/warp_grep/index.d.ts +11 -6
  87. package/dist/tools/warp_grep/index.js +43 -22
  88. package/dist/tools/warp_grep/openai.cjs +650 -258
  89. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  90. package/dist/tools/warp_grep/openai.d.ts +4 -74
  91. package/dist/tools/warp_grep/openai.js +13 -13
  92. package/dist/tools/warp_grep/providers/local.cjs +66 -27
  93. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  94. package/dist/tools/warp_grep/providers/local.d.ts +4 -9
  95. package/dist/tools/warp_grep/providers/local.js +2 -2
  96. package/dist/tools/warp_grep/providers/remote.cjs +211 -0
  97. package/dist/tools/warp_grep/providers/remote.cjs.map +1 -0
  98. package/dist/tools/warp_grep/providers/remote.d.ts +67 -0
  99. package/dist/tools/warp_grep/providers/remote.js +9 -0
  100. package/dist/tools/warp_grep/providers/types.cjs.map +1 -1
  101. package/dist/tools/warp_grep/providers/types.d.ts +7 -15
  102. package/dist/tools/warp_grep/vercel.cjs +662 -277
  103. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  104. package/dist/tools/warp_grep/vercel.d.ts +4 -51
  105. package/dist/tools/warp_grep/vercel.js +16 -14
  106. package/dist/types-a_hxdPI6.d.ts +144 -0
  107. package/dist/vercel-3yjvfmVB.d.ts +66 -0
  108. package/package.json +12 -2
  109. package/dist/chunk-6X5UOY7B.js.map +0 -1
  110. package/dist/chunk-73RQWOQC.js +0 -16
  111. package/dist/chunk-73RQWOQC.js.map +0 -1
  112. package/dist/chunk-7OQOOB3R.js +0 -1
  113. package/dist/chunk-CFF636UC.js +0 -70
  114. package/dist/chunk-CFF636UC.js.map +0 -1
  115. package/dist/chunk-EK7OQPWD.js +0 -44
  116. package/dist/chunk-EK7OQPWD.js.map +0 -1
  117. package/dist/chunk-GJ5TYNRD.js +0 -107
  118. package/dist/chunk-GJ5TYNRD.js.map +0 -1
  119. package/dist/chunk-HQO45BAJ.js +0 -14
  120. package/dist/chunk-HQO45BAJ.js.map +0 -1
  121. package/dist/chunk-IMYQOKFO.js +0 -83
  122. package/dist/chunk-IMYQOKFO.js.map +0 -1
  123. package/dist/chunk-KBQWGT5L.js +0 -77
  124. package/dist/chunk-KBQWGT5L.js.map +0 -1
  125. package/dist/chunk-LVPVVLTI.js.map +0 -1
  126. package/dist/chunk-QFIHUCTF.js.map +0 -1
  127. package/dist/chunk-TICMYDII.js.map +0 -1
  128. package/dist/chunk-TJIUA27P.js.map +0 -1
  129. package/dist/chunk-WETRQJGU.js +0 -129
  130. package/dist/chunk-WETRQJGU.js.map +0 -1
  131. package/dist/chunk-ZJIIICRA.js.map +0 -1
  132. package/dist/core-CpkYEi_T.d.ts +0 -158
  133. package/dist/tools/warp_grep/tools/analyse.cjs +0 -40
  134. package/dist/tools/warp_grep/tools/analyse.cjs.map +0 -1
  135. package/dist/tools/warp_grep/tools/analyse.d.ts +0 -10
  136. package/dist/tools/warp_grep/tools/analyse.js +0 -8
  137. package/dist/tools/warp_grep/tools/finish.cjs +0 -69
  138. package/dist/tools/warp_grep/tools/finish.cjs.map +0 -1
  139. package/dist/tools/warp_grep/tools/finish.d.ts +0 -10
  140. package/dist/tools/warp_grep/tools/finish.js +0 -10
  141. package/dist/tools/warp_grep/tools/grep.cjs +0 -38
  142. package/dist/tools/warp_grep/tools/grep.cjs.map +0 -1
  143. package/dist/tools/warp_grep/tools/grep.d.ts +0 -8
  144. package/dist/tools/warp_grep/tools/grep.js +0 -15
  145. package/dist/tools/warp_grep/tools/grep.js.map +0 -1
  146. package/dist/tools/warp_grep/tools/read.cjs +0 -38
  147. package/dist/tools/warp_grep/tools/read.cjs.map +0 -1
  148. package/dist/tools/warp_grep/tools/read.d.ts +0 -9
  149. package/dist/tools/warp_grep/tools/read.js +0 -8
  150. package/dist/tools/warp_grep/utils/format.cjs +0 -42
  151. package/dist/tools/warp_grep/utils/format.cjs.map +0 -1
  152. package/dist/tools/warp_grep/utils/format.d.ts +0 -4
  153. package/dist/tools/warp_grep/utils/format.js +0 -18
  154. package/dist/tools/warp_grep/utils/format.js.map +0 -1
  155. /package/dist/{chunk-7OQOOB3R.js.map → chunk-ISWL67SF.js.map} +0 -0
  156. /package/dist/tools/warp_grep/{tools/analyse.js.map → client.js.map} +0 -0
  157. /package/dist/tools/warp_grep/{tools/finish.js.map → gemini.js.map} +0 -0
  158. /package/dist/tools/warp_grep/{tools/read.js.map → providers/remote.js.map} +0 -0
package/dist/index.d.ts CHANGED
@@ -1,13 +1,14 @@
1
- export { A as AnthropicToolFactory, M as MorphClient, a as MorphClientConfig, O as OpenAIToolFactory, V as VercelToolFactory } from './client-CFoR--IU.js';
1
+ export { A as AnthropicToolFactory, M as MorphClient, a as MorphClientConfig, O as OpenAIToolFactory, V as VercelToolFactory } from './client-CextMMm9.js';
2
2
  export { FastApplyClient, applyEdit } from './tools/fastapply/core.js';
3
3
  export { CodebaseSearchClient } from './tools/codebase_search/core.js';
4
- export { W as WarpGrepClient, a as WarpGrepClientConfig, d as WarpGrepContext, b as WarpGrepInput, c as WarpGrepResult, e as WarpGrepToolConfig } from './core-CpkYEi_T.js';
4
+ export { WarpGrepClient } from './tools/warp_grep/client.js';
5
5
  export { BrowserClient } from './tools/browser/core.js';
6
6
  export { MorphGit } from './git/client.js';
7
7
  export { MorphGitConfig } from './git/types.js';
8
+ export { W as WarpGrepClientConfig, c as WarpGrepContext, a as WarpGrepInput, b as WarpGrepResult, d as WarpGrepToolConfig } from './types-a_hxdPI6.js';
9
+ export { GrepResult, ListDirectoryEntry, ReadResult, WarpGrepProvider } from './tools/warp_grep/providers/types.js';
8
10
  export { LocalRipgrepProvider } from './tools/warp_grep/providers/local.js';
9
11
  export { AgentRunResult, ChatMessage, SessionConfig } from './tools/warp_grep/agent/types.js';
10
- export { AnalyseEntry, GrepResult, ReadResult, WarpGrepProvider } from './tools/warp_grep/providers/types.js';
11
12
  export { AnthropicRouter, GeminiRouter, OpenAIRouter, RawRouter } from './modelrouter/core.js';
12
13
  export { ApplyEditConfig, ApplyEditInput, ApplyEditResult, EditChanges, EditFileConfig, EditFileInput, EditFileResult } from './tools/fastapply/types.js';
13
14
  export { CodeSearchResult, CodebaseSearchConfig, CodebaseSearchInput, CodebaseSearchResult, SearchStats } from './tools/codebase_search/types.js';
package/dist/index.js CHANGED
@@ -1,30 +1,31 @@
1
- import "./chunk-7OQOOB3R.js";
1
+ import "./chunk-ISWL67SF.js";
2
+ import "./chunk-ILJ3J5IA.js";
2
3
  import {
3
4
  AnthropicToolFactory,
4
5
  MorphClient,
5
6
  OpenAIToolFactory,
6
7
  VercelToolFactory
7
- } from "./chunk-QFIHUCTF.js";
8
- import "./chunk-KBQWGT5L.js";
9
- import "./chunk-IMYQOKFO.js";
10
- import "./chunk-CFF636UC.js";
8
+ } from "./chunk-7EIHYJSG.js";
9
+ import "./chunk-3IQIT6MC.js";
10
+ import "./chunk-6N6ZYZYD.js";
11
+ import "./chunk-4VGOBA2J.js";
12
+ import "./chunk-KW7OEGZK.js";
11
13
  import {
12
14
  WarpGrepClient
13
- } from "./chunk-GJ5TYNRD.js";
14
- import "./chunk-6X5UOY7B.js";
15
- import "./chunk-HQO45BAJ.js";
16
- import "./chunk-73RQWOQC.js";
17
- import "./chunk-LVPVVLTI.js";
18
- import "./chunk-WETRQJGU.js";
15
+ } from "./chunk-374N3GIA.js";
16
+ import "./chunk-2CASO3ZO.js";
17
+ import "./chunk-527P5X2E.js";
18
+ import "./chunk-6Y5JB4JC.js";
19
+ import "./chunk-APP75CBN.js";
20
+ import "./chunk-YV75OQTE.js";
21
+ import "./chunk-Q5AHGIQO.js";
19
22
  import {
20
23
  LocalRipgrepProvider
21
- } from "./chunk-ZJIIICRA.js";
24
+ } from "./chunk-ZO4PPFCZ.js";
22
25
  import "./chunk-G2RSY56Q.js";
23
26
  import "./chunk-SMGZ6A64.js";
24
27
  import "./chunk-TPP2UGQP.js";
25
- import "./chunk-EK7OQPWD.js";
26
- import "./chunk-TJIUA27P.js";
27
- import "./chunk-TICMYDII.js";
28
+ import "./chunk-XT5ZO6ES.js";
28
29
  import "./chunk-UBX7QYBD.js";
29
30
  import "./chunk-GJU7UOFL.js";
30
31
  import "./chunk-76DJEQEP.js";
@@ -0,0 +1,89 @@
1
+ import { ChatCompletionTool } from 'openai/resources/chat/completions';
2
+ import { formatResult } from './tools/warp_grep/client.js';
3
+ import { getSystemPrompt } from './tools/warp_grep/agent/prompt.js';
4
+ import { d as WarpGrepToolConfig, b as WarpGrepResult } from './types-a_hxdPI6.js';
5
+
6
+ /**
7
+ * OpenAI SDK adapter for morph-warp-grep tool
8
+ */
9
+
10
+ /**
11
+ * OpenAI-native warp grep tool definition
12
+ *
13
+ * @example
14
+ * ```typescript
15
+ * import OpenAI from 'openai';
16
+ * import { warpGrepTool, execute } from '@morphllm/morphsdk/tools/warp-grep/openai';
17
+ *
18
+ * const client = new OpenAI();
19
+ * const response = await client.chat.completions.create({
20
+ * model: 'gpt-4o',
21
+ * tools: [warpGrepTool],
22
+ * messages: [{ role: 'user', content: 'Find authentication middleware' }]
23
+ * });
24
+ *
25
+ * // Execute the tool call
26
+ * const result = await execute({ query: '...' }, { repoRoot: '.' });
27
+ * ```
28
+ */
29
+ declare const warpGrepTool: ChatCompletionTool;
30
+ /**
31
+ * Execute warp grep search
32
+ *
33
+ * @param input - Tool input with query
34
+ * @param config - Configuration with repoRoot and optional provider
35
+ * @returns Search results
36
+ */
37
+ declare function execute(input: {
38
+ query: string;
39
+ } | string, config: WarpGrepToolConfig): Promise<WarpGrepResult>;
40
+
41
+ /**
42
+ * Create a custom warp grep tool with configuration and methods
43
+ *
44
+ * @param config - Configuration options
45
+ * @returns Tool definition with execute and formatResult methods
46
+ *
47
+ * @example Local usage
48
+ * ```typescript
49
+ * import OpenAI from 'openai';
50
+ * import { createWarpGrepTool } from '@morphllm/morphsdk/tools/warp-grep/openai';
51
+ *
52
+ * const tool = createWarpGrepTool({ repoRoot: '.' });
53
+ *
54
+ * const client = new OpenAI();
55
+ * const response = await client.chat.completions.create({
56
+ * model: 'gpt-4o',
57
+ * tools: [tool],
58
+ * messages: [{ role: 'user', content: 'Find authentication middleware' }]
59
+ * });
60
+ * ```
61
+ *
62
+ * @example Remote sandbox (E2B, Modal, etc.)
63
+ * ```typescript
64
+ * const tool = createMorphWarpGrepTool({
65
+ * repoRoot: '/home/repo',
66
+ * remoteCommands: {
67
+ * grep: async (pattern, path) => (await sandbox.run(`rg '${pattern}' '${path}'`)).stdout,
68
+ * read: async (path, start, end) => (await sandbox.run(`sed -n '${start},${end}p' '${path}'`)).stdout,
69
+ * listDir: async (path, maxDepth) => (await sandbox.run(`find '${path}' -maxdepth ${maxDepth}`)).stdout,
70
+ * },
71
+ * });
72
+ * ```
73
+ */
74
+ declare function createWarpGrepTool(config: WarpGrepToolConfig): ChatCompletionTool & {
75
+ execute: (input: unknown) => Promise<WarpGrepResult>;
76
+ formatResult: (result: WarpGrepResult) => string;
77
+ getSystemPrompt: () => string;
78
+ };
79
+
80
+ declare const openai_createWarpGrepTool: typeof createWarpGrepTool;
81
+ declare const openai_execute: typeof execute;
82
+ declare const openai_formatResult: typeof formatResult;
83
+ declare const openai_getSystemPrompt: typeof getSystemPrompt;
84
+ declare const openai_warpGrepTool: typeof warpGrepTool;
85
+ declare namespace openai {
86
+ export { openai_createWarpGrepTool as createWarpGrepTool, warpGrepTool as default, openai_execute as execute, openai_formatResult as formatResult, openai_getSystemPrompt as getSystemPrompt, openai_warpGrepTool as warpGrepTool };
87
+ }
88
+
89
+ export { createWarpGrepTool as c, execute as e, openai as o, warpGrepTool as w };
@@ -26,9 +26,13 @@ __export(config_exports, {
26
26
  });
27
27
  module.exports = __toCommonJS(config_exports);
28
28
  var AGENT_CONFIG = {
29
- // Give the model freedom; failsafe cap to prevent infinite loops
30
- MAX_ROUNDS: 10,
31
- TIMEOUT_MS: 3e4
29
+ MAX_TURNS: 4,
30
+ TIMEOUT_MS: 3e4,
31
+ MAX_CONTEXT_CHARS: 54e4,
32
+ MAX_OUTPUT_LINES: 200,
33
+ MAX_READ_LINES: 800,
34
+ MAX_LIST_DEPTH: 3,
35
+ LIST_TIMEOUT_MS: 2e3
32
36
  };
33
37
  var BUILTIN_EXCLUDES = [
34
38
  // Version control
@@ -110,7 +114,7 @@ var BUILTIN_EXCLUDES = [
110
114
  ".*"
111
115
  ];
112
116
  var DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || "").split(",").map((s) => s.trim()).filter(Boolean).concat(BUILTIN_EXCLUDES);
113
- var DEFAULT_MODEL = "morph-warp-grep";
117
+ var DEFAULT_MODEL = "morph-warp-grep-v1";
114
118
  // Annotate the CommonJS export names for ESM import in node:
115
119
  0 && (module.exports = {
116
120
  AGENT_CONFIG,
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../tools/warp_grep/agent/config.ts"],"sourcesContent":["// Agent configuration defaults for morph-warp-grep\n// Hard-coded: SDK does not expose control over rounds or timeout.\nexport const AGENT_CONFIG = {\n // Give the model freedom; failsafe cap to prevent infinite loops\n MAX_ROUNDS: 10,\n TIMEOUT_MS: 30000,\n};\n\n/**\n * Comprehensive exclusion list for directories and files\n * These patterns are used with ripgrep's -g flag\n */\nconst BUILTIN_EXCLUDES = [\n // Version control\n '.git', '.svn', '.hg', '.bzr',\n \n // Dependencies\n 'node_modules', 'bower_components', '.pnpm', '.yarn',\n 'vendor', 'packages', 'Pods', '.bundle',\n \n // Python\n '__pycache__', '.pytest_cache', '.mypy_cache', '.ruff_cache',\n '.venv', 'venv', '.tox', '.nox', '.eggs', '*.egg-info',\n \n // Build outputs\n 'dist', 'build', 'out', 'output', 'target', '_build',\n '.next', '.nuxt', '.output', '.vercel', '.netlify',\n \n // Cache directories\n '.cache', '.parcel-cache', '.turbo', '.nx', '.gradle',\n \n // IDE/Editor\n '.idea', '.vscode', '.vs',\n \n // Coverage\n 'coverage', '.coverage', 'htmlcov', '.nyc_output',\n \n // Temporary\n 'tmp', 'temp', '.tmp', '.temp',\n \n // Lock files\n 'package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb',\n 'Cargo.lock', 'Gemfile.lock', 'poetry.lock',\n \n // Binary/minified\n '*.min.js', '*.min.css', '*.bundle.js',\n '*.wasm', '*.so', '*.dll', '*.pyc',\n '*.map', '*.js.map',\n \n // Hidden directories catch-all\n '.*',\n];\n\nexport const DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || '')\n .split(',')\n .map(s => s.trim())\n .filter(Boolean)\n .concat(BUILTIN_EXCLUDES);\n\nexport const DEFAULT_MODEL = 'morph-warp-grep';\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,IAAM,eAAe;AAAA;AAAA,EAE1B,YAAY;AAAA,EACZ,YAAY;AACd;AAMA,IAAM,mBAAmB;AAAA;AAAA,EAEvB;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA;AAAA,EAGvB;AAAA,EAAgB;AAAA,EAAoB;AAAA,EAAS;AAAA,EAC7C;AAAA,EAAU;AAAA,EAAY;AAAA,EAAQ;AAAA;AAAA,EAG9B;AAAA,EAAe;AAAA,EAAiB;AAAA,EAAe;AAAA,EAC/C;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAS;AAAA;AAAA,EAG1C;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAO;AAAA,EAAU;AAAA,EAAU;AAAA,EAC5C;AAAA,EAAS;AAAA,EAAS;AAAA,EAAW;AAAA,EAAW;AAAA;AAAA,EAGxC;AAAA,EAAU;AAAA,EAAiB;AAAA,EAAU;AAAA,EAAO;AAAA;AAAA,EAG5C;AAAA,EAAS;AAAA,EAAW;AAAA;AAAA,EAGpB;AAAA,EAAY;AAAA,EAAa;AAAA,EAAW;AAAA;AAAA,EAGpC;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA;AAAA,EAGvB;AAAA,EAAqB;AAAA,EAAa;AAAA,EAAkB;AAAA,EACpD;AAAA,EAAc;AAAA,EAAgB;AAAA;AAAA,EAG9B;AAAA,EAAY;AAAA,EAAa;AAAA,EACzB;AAAA,EAAU;AAAA,EAAQ;AAAA,EAAS;AAAA,EAC3B;AAAA,EAAS;AAAA;AAAA,EAGT;AACF;AAEO,IAAM,oBAAoB,QAAQ,IAAI,2BAA2B,IACrE,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAO,EACd,OAAO,gBAAgB;AAEnB,IAAM,gBAAgB;","names":[]}
1
+ {"version":3,"sources":["../../../../tools/warp_grep/agent/config.ts"],"sourcesContent":["export const AGENT_CONFIG = {\n MAX_TURNS: 4,\n TIMEOUT_MS: 30_000,\n MAX_CONTEXT_CHARS: 540_000,\n MAX_OUTPUT_LINES: 200,\n MAX_READ_LINES: 800,\n MAX_LIST_DEPTH: 3,\n LIST_TIMEOUT_MS: 2_000,\n};\n\n/**\n * Comprehensive exclusion list for directories and files\n * These patterns are used with ripgrep's -g flag\n */\nconst BUILTIN_EXCLUDES = [\n // Version control\n '.git', '.svn', '.hg', '.bzr',\n \n // Dependencies\n 'node_modules', 'bower_components', '.pnpm', '.yarn',\n 'vendor', 'packages', 'Pods', '.bundle',\n \n // Python\n '__pycache__', '.pytest_cache', '.mypy_cache', '.ruff_cache',\n '.venv', 'venv', '.tox', '.nox', '.eggs', '*.egg-info',\n \n // Build outputs\n 'dist', 'build', 'out', 'output', 'target', '_build',\n '.next', '.nuxt', '.output', '.vercel', '.netlify',\n \n // Cache directories\n '.cache', '.parcel-cache', '.turbo', '.nx', '.gradle',\n \n // IDE/Editor\n '.idea', '.vscode', '.vs',\n \n // Coverage\n 'coverage', '.coverage', 'htmlcov', '.nyc_output',\n \n // Temporary\n 'tmp', 'temp', '.tmp', '.temp',\n \n // Lock files\n 'package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb',\n 'Cargo.lock', 'Gemfile.lock', 'poetry.lock',\n \n // Binary/minified\n '*.min.js', '*.min.css', '*.bundle.js',\n '*.wasm', '*.so', '*.dll', '*.pyc',\n '*.map', '*.js.map',\n \n // Hidden directories catch-all\n '.*',\n];\n\nexport const DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || '')\n .split(',')\n .map(s => s.trim())\n .filter(Boolean)\n .concat(BUILTIN_EXCLUDES);\n\nexport const DEFAULT_MODEL = 'morph-warp-grep-v1';\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,IAAM,eAAe;AAAA,EAC1B,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,iBAAiB;AACnB;AAMA,IAAM,mBAAmB;AAAA;AAAA,EAEvB;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA;AAAA,EAGvB;AAAA,EAAgB;AAAA,EAAoB;AAAA,EAAS;AAAA,EAC7C;AAAA,EAAU;AAAA,EAAY;AAAA,EAAQ;AAAA;AAAA,EAG9B;AAAA,EAAe;AAAA,EAAiB;AAAA,EAAe;AAAA,EAC/C;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAS;AAAA;AAAA,EAG1C;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAO;AAAA,EAAU;AAAA,EAAU;AAAA,EAC5C;AAAA,EAAS;AAAA,EAAS;AAAA,EAAW;AAAA,EAAW;AAAA;AAAA,EAGxC;AAAA,EAAU;AAAA,EAAiB;AAAA,EAAU;AAAA,EAAO;AAAA;AAAA,EAG5C;AAAA,EAAS;AAAA,EAAW;AAAA;AAAA,EAGpB;AAAA,EAAY;AAAA,EAAa;AAAA,EAAW;AAAA;AAAA,EAGpC;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA;AAAA,EAGvB;AAAA,EAAqB;AAAA,EAAa;AAAA,EAAkB;AAAA,EACpD;AAAA,EAAc;AAAA,EAAgB;AAAA;AAAA,EAG9B;AAAA,EAAY;AAAA,EAAa;AAAA,EACzB;AAAA,EAAU;AAAA,EAAQ;AAAA,EAAS;AAAA,EAC3B;AAAA,EAAS;AAAA;AAAA,EAGT;AACF;AAEO,IAAM,oBAAoB,QAAQ,IAAI,2BAA2B,IACrE,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAO,EACd,OAAO,gBAAgB;AAEnB,IAAM,gBAAgB;","names":[]}
@@ -1,8 +1,13 @@
1
1
  declare const AGENT_CONFIG: {
2
- MAX_ROUNDS: number;
2
+ MAX_TURNS: number;
3
3
  TIMEOUT_MS: number;
4
+ MAX_CONTEXT_CHARS: number;
5
+ MAX_OUTPUT_LINES: number;
6
+ MAX_READ_LINES: number;
7
+ MAX_LIST_DEPTH: number;
8
+ LIST_TIMEOUT_MS: number;
4
9
  };
5
10
  declare const DEFAULT_EXCLUDES: string[];
6
- declare const DEFAULT_MODEL = "morph-warp-grep";
11
+ declare const DEFAULT_MODEL = "morph-warp-grep-v1";
7
12
 
8
13
  export { AGENT_CONFIG, DEFAULT_EXCLUDES, DEFAULT_MODEL };
@@ -2,7 +2,7 @@ import {
2
2
  AGENT_CONFIG,
3
3
  DEFAULT_EXCLUDES,
4
4
  DEFAULT_MODEL
5
- } from "../../../chunk-TJIUA27P.js";
5
+ } from "../../../chunk-XT5ZO6ES.js";
6
6
  import "../../../chunk-PZ5AY32C.js";
7
7
  export {
8
8
  AGENT_CONFIG,
@@ -39,8 +39,8 @@ var ToolOutputFormatter = class {
39
39
  switch (name) {
40
40
  case "read":
41
41
  return this.formatRead(safeArgs, payload, isError);
42
- case "analyse":
43
- return this.formatAnalyse(safeArgs, payload, isError);
42
+ case "list_directory":
43
+ return this.formatListDirectory(safeArgs, payload, isError);
44
44
  case "grep":
45
45
  return this.formatGrep(safeArgs, payload, isError);
46
46
  default:
@@ -54,38 +54,55 @@ ${payload}
54
54
  return payload;
55
55
  }
56
56
  const path = this.asString(args.path) || "...";
57
- return `<file path="${path}">
57
+ const start = args.start;
58
+ const end = args.end;
59
+ const linesArray = args.lines;
60
+ const attributes = [`path="${path}"`];
61
+ if (linesArray && linesArray.length > 0) {
62
+ const rangeStr = linesArray.map(([s, e]) => `${s}-${e}`).join(",");
63
+ attributes.push(`lines="${rangeStr}"`);
64
+ } else if (start !== void 0 && end !== void 0) {
65
+ attributes.push(`lines="${start}-${end}"`);
66
+ }
67
+ return `<read ${attributes.join(" ")}>
58
68
  ${payload}
59
- </file>`;
69
+ </read>`;
60
70
  }
61
- formatAnalyse(args, payload, isError) {
71
+ formatListDirectory(args, payload, isError) {
62
72
  const path = this.asString(args.path) || ".";
73
+ const pattern = this.asString(args.pattern);
74
+ const attributes = [`path="${path}"`];
75
+ if (pattern) {
76
+ attributes.push(`pattern="${pattern}"`);
77
+ }
63
78
  if (isError) {
64
- return `<analyse_results path="${path}" status="error">
65
- ${payload}
66
- </analyse_results>`;
79
+ attributes.push('status="error"');
67
80
  }
68
- return `<analyse_results path="${path}">
81
+ return `<list_directory ${attributes.join(" ")}>
69
82
  ${payload}
70
- </analyse_results>`;
83
+ </list_directory>`;
71
84
  }
72
85
  formatGrep(args, payload, isError) {
73
86
  const pattern = this.asString(args.pattern);
74
- const path = this.asString(args.path);
87
+ const subDir = this.asString(args.path);
88
+ const glob = this.asString(args.glob);
75
89
  const attributes = [];
76
90
  if (pattern !== void 0) {
77
91
  attributes.push(`pattern="${pattern}"`);
78
92
  }
79
- if (path !== void 0) {
80
- attributes.push(`path="${path}"`);
93
+ if (subDir !== void 0) {
94
+ attributes.push(`sub_dir="${subDir}"`);
95
+ }
96
+ if (glob !== void 0) {
97
+ attributes.push(`glob="${glob}"`);
81
98
  }
82
99
  if (isError) {
83
100
  attributes.push('status="error"');
84
101
  }
85
102
  const attrText = attributes.length ? ` ${attributes.join(" ")}` : "";
86
- return `<grep_output${attrText}>
103
+ return `<grep${attrText}>
87
104
  ${payload}
88
- </grep_output>`;
105
+ </grep>`;
89
106
  }
90
107
  asString(value) {
91
108
  if (value === null || value === void 0) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../tools/warp_grep/agent/formatter.ts"],"sourcesContent":["export class ToolOutputFormatter {\n\tformat(\n\t\ttoolName: string,\n\t\targs: Record<string, unknown> | null | undefined,\n\t\toutput: string,\n\t\toptions: { isError?: boolean } = {}\n\t): string {\n\t\tconst name = (toolName ?? \"\").trim();\n\t\tif (!name) {\n\t\t\treturn \"\";\n\t\t}\n\t\tconst payload = (output as any)?.toString?.()?.trim?.() ?? \"\";\n\t\tconst isError = Boolean(options.isError);\n\t\tconst safeArgs = args ?? {};\n\n\t\tif (!payload && !isError) {\n\t\t\treturn \"\";\n\t\t}\n\n\t\tswitch (name) {\n\t\t\tcase \"read\":\n\t\t\t\treturn this.formatRead(safeArgs, payload, isError);\n\t\t\tcase \"analyse\":\n\t\t\t\treturn this.formatAnalyse(safeArgs, payload, isError);\n\t\t\tcase \"grep\":\n\t\t\t\treturn this.formatGrep(safeArgs, payload, isError);\n\t\t\tdefault:\n\t\t\t\treturn payload ? `<tool_output>\\n${payload}\\n</tool_output>` : \"\";\n\t\t}\n\t}\n\n\tprivate formatRead(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tif (isError) {\n\t\t\treturn payload;\n\t\t}\n\t\tconst path = this.asString(args.path) || \"...\";\n\t\treturn `<file path=\"${path}\">\\n${payload}\\n</file>`;\n\t}\n\n\tprivate formatAnalyse(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst path = this.asString(args.path) || \".\";\n\t\tif (isError) {\n\t\t\treturn `<analyse_results path=\"${path}\" status=\"error\">\\n${payload}\\n</analyse_results>`;\n\t\t}\n\t\treturn `<analyse_results path=\"${path}\">\\n${payload}\\n</analyse_results>`;\n\t}\n\n\tprivate formatGrep(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst pattern = this.asString(args.pattern);\n\t\tconst path = this.asString(args.path);\n\t\tconst attributes: string[] = [];\n\t\tif (pattern !== undefined) {\n\t\t\tattributes.push(`pattern=\"${pattern}\"`);\n\t\t}\n\t\tif (path !== undefined) {\n\t\t\tattributes.push(`path=\"${path}\"`);\n\t\t}\n\t\tif (isError) {\n\t\t\tattributes.push('status=\"error\"');\n\t\t}\n\t\tconst attrText = attributes.length ? ` ${attributes.join(\" \")}` : \"\";\n\t\treturn `<grep_output${attrText}>\\n${payload}\\n</grep_output>`;\n\t}\n\n\tprivate asString(value: unknown): string | undefined {\n\t\tif (value === null || value === undefined) {\n\t\t\treturn undefined;\n\t\t}\n\t\treturn String(value);\n\t}\n}\n\nconst sharedFormatter = new ToolOutputFormatter();\n\nexport function formatAgentToolOutput(\n\ttoolName: string,\n\targs: Record<string, unknown> | null | undefined,\n\toutput: string,\n\toptions: { isError?: boolean } = {}\n): string {\n\treturn sharedFormatter.format(toolName, args, output, options);\n}\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,IAAM,sBAAN,MAA0B;AAAA,EAChC,OACC,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,UAAM,QAAQ,YAAY,IAAI,KAAK;AACnC,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AACA,UAAM,UAAW,QAAgB,WAAW,GAAG,OAAO,KAAK;AAC3D,UAAM,UAAU,QAAQ,QAAQ,OAAO;AACvC,UAAM,WAAW,QAAQ,CAAC;AAE1B,QAAI,CAAC,WAAW,CAAC,SAAS;AACzB,aAAO;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD,KAAK;AACJ,eAAO,KAAK,cAAc,UAAU,SAAS,OAAO;AAAA,MACrD,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD;AACC,eAAO,UAAU;AAAA,EAAkB,OAAO;AAAA,kBAAqB;AAAA,IACjE;AAAA,EACD;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,QAAI,SAAS;AACZ,aAAO;AAAA,IACR;AACA,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,WAAO,eAAe,IAAI;AAAA,EAAO,OAAO;AAAA;AAAA,EACzC;AAAA,EAEQ,cAAc,MAA+B,SAAiB,SAA0B;AAC/F,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,QAAI,SAAS;AACZ,aAAO,0BAA0B,IAAI;AAAA,EAAsB,OAAO;AAAA;AAAA,IACnE;AACA,WAAO,0BAA0B,IAAI;AAAA,EAAO,OAAO;AAAA;AAAA,EACpD;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,UAAM,UAAU,KAAK,SAAS,KAAK,OAAO;AAC1C,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI;AACpC,UAAM,aAAuB,CAAC;AAC9B,QAAI,YAAY,QAAW;AAC1B,iBAAW,KAAK,YAAY,OAAO,GAAG;AAAA,IACvC;AACA,QAAI,SAAS,QAAW;AACvB,iBAAW,KAAK,SAAS,IAAI,GAAG;AAAA,IACjC;AACA,QAAI,SAAS;AACZ,iBAAW,KAAK,gBAAgB;AAAA,IACjC;AACA,UAAM,WAAW,WAAW,SAAS,IAAI,WAAW,KAAK,GAAG,CAAC,KAAK;AAClE,WAAO,eAAe,QAAQ;AAAA,EAAM,OAAO;AAAA;AAAA,EAC5C;AAAA,EAEQ,SAAS,OAAoC;AACpD,QAAI,UAAU,QAAQ,UAAU,QAAW;AAC1C,aAAO;AAAA,IACR;AACA,WAAO,OAAO,KAAK;AAAA,EACpB;AACD;AAEA,IAAM,kBAAkB,IAAI,oBAAoB;AAEzC,SAAS,sBACf,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,SAAO,gBAAgB,OAAO,UAAU,MAAM,QAAQ,OAAO;AAC9D;","names":[]}
1
+ {"version":3,"sources":["../../../../tools/warp_grep/agent/formatter.ts"],"sourcesContent":["export class ToolOutputFormatter {\n\tformat(\n\t\ttoolName: string,\n\t\targs: Record<string, unknown> | null | undefined,\n\t\toutput: string,\n\t\toptions: { isError?: boolean } = {}\n\t): string {\n\t\tconst name = (toolName ?? \"\").trim();\n\t\tif (!name) {\n\t\t\treturn \"\";\n\t\t}\n\t\tconst payload = (output as any)?.toString?.()?.trim?.() ?? \"\";\n\t\tconst isError = Boolean(options.isError);\n\t\tconst safeArgs = args ?? {};\n\n\t\tif (!payload && !isError) {\n\t\t\treturn \"\";\n\t\t}\n\n\t\tswitch (name) {\n\t\t\tcase \"read\":\n\t\t\t\treturn this.formatRead(safeArgs, payload, isError);\n\t\t\tcase \"list_directory\":\n\t\t\t\treturn this.formatListDirectory(safeArgs, payload, isError);\n\t\t\tcase \"grep\":\n\t\t\t\treturn this.formatGrep(safeArgs, payload, isError);\n\t\t\tdefault:\n\t\t\t\treturn payload ? `<tool_output>\\n${payload}\\n</tool_output>` : \"\";\n\t\t}\n\t}\n\n\tprivate formatRead(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tif (isError) {\n\t\t\treturn payload;\n\t\t}\n\t\tconst path = this.asString(args.path) || \"...\";\n\t\tconst start = args.start as number | undefined;\n\t\tconst end = args.end as number | undefined;\n\t\tconst linesArray = args.lines as Array<[number, number]> | undefined;\n\t\tconst attributes: string[] = [`path=\"${path}\"`];\n\t\tif (linesArray && linesArray.length > 0) {\n\t\t\tconst rangeStr = linesArray.map(([s, e]) => `${s}-${e}`).join(',');\n\t\t\tattributes.push(`lines=\"${rangeStr}\"`);\n\t\t} else if (start !== undefined && end !== undefined) {\n\t\t\tattributes.push(`lines=\"${start}-${end}\"`);\n\t\t}\n\t\treturn `<read ${attributes.join(\" \")}>\\n${payload}\\n</read>`;\n\t}\n\n\tprivate formatListDirectory(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst path = this.asString(args.path) || \".\";\n\t\tconst pattern = this.asString(args.pattern);\n\t\tconst attributes: string[] = [`path=\"${path}\"`];\n\t\tif (pattern) {\n\t\t\tattributes.push(`pattern=\"${pattern}\"`);\n\t\t}\n\t\tif (isError) {\n\t\t\tattributes.push('status=\"error\"');\n\t\t}\n\t\treturn `<list_directory ${attributes.join(\" \")}>\\n${payload}\\n</list_directory>`;\n\t}\n\n\tprivate formatGrep(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst pattern = this.asString(args.pattern);\n\t\tconst subDir = this.asString(args.path);\n\t\tconst glob = this.asString(args.glob);\n\t\tconst attributes: string[] = [];\n\t\tif (pattern !== undefined) {\n\t\t\tattributes.push(`pattern=\"${pattern}\"`);\n\t\t}\n\t\tif (subDir !== undefined) {\n\t\t\tattributes.push(`sub_dir=\"${subDir}\"`);\n\t\t}\n\t\tif (glob !== undefined) {\n\t\t\tattributes.push(`glob=\"${glob}\"`);\n\t\t}\n\t\tif (isError) {\n\t\t\tattributes.push('status=\"error\"');\n\t\t}\n\t\tconst attrText = attributes.length ? ` ${attributes.join(\" \")}` : \"\";\n\t\treturn `<grep${attrText}>\\n${payload}\\n</grep>`;\n\t}\n\n\tprivate asString(value: unknown): string | undefined {\n\t\tif (value === null || value === undefined) {\n\t\t\treturn undefined;\n\t\t}\n\t\treturn String(value);\n\t}\n}\n\nconst sharedFormatter = new ToolOutputFormatter();\n\nexport function formatAgentToolOutput(\n\ttoolName: string,\n\targs: Record<string, unknown> | null | undefined,\n\toutput: string,\n\toptions: { isError?: boolean } = {}\n): string {\n\treturn sharedFormatter.format(toolName, args, output, options);\n}\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,IAAM,sBAAN,MAA0B;AAAA,EAChC,OACC,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,UAAM,QAAQ,YAAY,IAAI,KAAK;AACnC,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AACA,UAAM,UAAW,QAAgB,WAAW,GAAG,OAAO,KAAK;AAC3D,UAAM,UAAU,QAAQ,QAAQ,OAAO;AACvC,UAAM,WAAW,QAAQ,CAAC;AAE1B,QAAI,CAAC,WAAW,CAAC,SAAS;AACzB,aAAO;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD,KAAK;AACJ,eAAO,KAAK,oBAAoB,UAAU,SAAS,OAAO;AAAA,MAC3D,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD;AACC,eAAO,UAAU;AAAA,EAAkB,OAAO;AAAA,kBAAqB;AAAA,IACjE;AAAA,EACD;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,QAAI,SAAS;AACZ,aAAO;AAAA,IACR;AACA,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,UAAM,QAAQ,KAAK;AACnB,UAAM,MAAM,KAAK;AACjB,UAAM,aAAa,KAAK;AACxB,UAAM,aAAuB,CAAC,SAAS,IAAI,GAAG;AAC9C,QAAI,cAAc,WAAW,SAAS,GAAG;AACxC,YAAM,WAAW,WAAW,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,GAAG;AACjE,iBAAW,KAAK,UAAU,QAAQ,GAAG;AAAA,IACtC,WAAW,UAAU,UAAa,QAAQ,QAAW;AACpD,iBAAW,KAAK,UAAU,KAAK,IAAI,GAAG,GAAG;AAAA,IAC1C;AACA,WAAO,SAAS,WAAW,KAAK,GAAG,CAAC;AAAA,EAAM,OAAO;AAAA;AAAA,EAClD;AAAA,EAEQ,oBAAoB,MAA+B,SAAiB,SAA0B;AACrG,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,UAAM,UAAU,KAAK,SAAS,KAAK,OAAO;AAC1C,UAAM,aAAuB,CAAC,SAAS,IAAI,GAAG;AAC9C,QAAI,SAAS;AACZ,iBAAW,KAAK,YAAY,OAAO,GAAG;AAAA,IACvC;AACA,QAAI,SAAS;AACZ,iBAAW,KAAK,gBAAgB;AAAA,IACjC;AACA,WAAO,mBAAmB,WAAW,KAAK,GAAG,CAAC;AAAA,EAAM,OAAO;AAAA;AAAA,EAC5D;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,UAAM,UAAU,KAAK,SAAS,KAAK,OAAO;AAC1C,UAAM,SAAS,KAAK,SAAS,KAAK,IAAI;AACtC,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI;AACpC,UAAM,aAAuB,CAAC;AAC9B,QAAI,YAAY,QAAW;AAC1B,iBAAW,KAAK,YAAY,OAAO,GAAG;AAAA,IACvC;AACA,QAAI,WAAW,QAAW;AACzB,iBAAW,KAAK,YAAY,MAAM,GAAG;AAAA,IACtC;AACA,QAAI,SAAS,QAAW;AACvB,iBAAW,KAAK,SAAS,IAAI,GAAG;AAAA,IACjC;AACA,QAAI,SAAS;AACZ,iBAAW,KAAK,gBAAgB;AAAA,IACjC;AACA,UAAM,WAAW,WAAW,SAAS,IAAI,WAAW,KAAK,GAAG,CAAC,KAAK;AAClE,WAAO,QAAQ,QAAQ;AAAA,EAAM,OAAO;AAAA;AAAA,EACrC;AAAA,EAEQ,SAAS,OAAoC;AACpD,QAAI,UAAU,QAAQ,UAAU,QAAW;AAC1C,aAAO;AAAA,IACR;AACA,WAAO,OAAO,KAAK;AAAA,EACpB;AACD;AAEA,IAAM,kBAAkB,IAAI,oBAAoB;AAEzC,SAAS,sBACf,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,SAAO,gBAAgB,OAAO,UAAU,MAAM,QAAQ,OAAO;AAC9D;","names":[]}
@@ -3,7 +3,7 @@ declare class ToolOutputFormatter {
3
3
  isError?: boolean;
4
4
  }): string;
5
5
  private formatRead;
6
- private formatAnalyse;
6
+ private formatListDirectory;
7
7
  private formatGrep;
8
8
  private asString;
9
9
  }
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  ToolOutputFormatter,
3
3
  formatAgentToolOutput
4
- } from "../../../chunk-TICMYDII.js";
4
+ } from "../../../chunk-APP75CBN.js";
5
5
  import "../../../chunk-PZ5AY32C.js";
6
6
  export {
7
7
  ToolOutputFormatter,
@@ -20,19 +20,108 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // tools/warp_grep/agent/parser.ts
21
21
  var parser_exports = {};
22
22
  __export(parser_exports, {
23
- LLMResponseParseError: () => LLMResponseParseError,
24
23
  LLMResponseParser: () => LLMResponseParser
25
24
  });
26
25
  module.exports = __toCommonJS(parser_exports);
27
- var LLMResponseParseError = class extends Error {
28
- constructor(message) {
29
- super(message);
30
- this.name = "LLMResponseParseError";
26
+ var VALID_COMMANDS = ["list_directory", "grep", "read", "finish"];
27
+ function isValidCommand(name) {
28
+ return VALID_COMMANDS.includes(name);
29
+ }
30
+ function getXmlElementText(xml, tagName) {
31
+ const regex = new RegExp(`<${tagName}>([\\s\\S]*?)</${tagName}>`, "i");
32
+ const match = xml.match(regex);
33
+ return match ? match[1].trim() : null;
34
+ }
35
+ function parseNestedXmlTools(text) {
36
+ const tools = [];
37
+ const toolRegex = /<([a-z_][a-z0-9_]*)>([\s\S]*?)<\/\1>/gi;
38
+ let match;
39
+ while ((match = toolRegex.exec(text)) !== null) {
40
+ const rawToolName = match[1].toLowerCase();
41
+ const content = match[2];
42
+ if (!isValidCommand(rawToolName)) continue;
43
+ const toolName = rawToolName;
44
+ if (toolName === "list_directory") {
45
+ const path = getXmlElementText(content, "path");
46
+ const pattern = getXmlElementText(content, "pattern");
47
+ if (path) {
48
+ tools.push({ name: "list_directory", arguments: { path, pattern } });
49
+ }
50
+ } else if (toolName === "grep") {
51
+ const pattern = getXmlElementText(content, "pattern");
52
+ const subDir = getXmlElementText(content, "sub_dir");
53
+ const glob = getXmlElementText(content, "glob");
54
+ if (pattern) {
55
+ tools.push({
56
+ name: "grep",
57
+ arguments: {
58
+ pattern,
59
+ path: subDir || ".",
60
+ ...glob && { glob }
61
+ }
62
+ });
63
+ }
64
+ } else if (toolName === "read") {
65
+ const path = getXmlElementText(content, "path");
66
+ const linesStr = getXmlElementText(content, "lines");
67
+ if (path) {
68
+ const args = { path };
69
+ if (linesStr) {
70
+ const ranges = [];
71
+ for (const rangeStr of linesStr.split(",")) {
72
+ const trimmed = rangeStr.trim();
73
+ if (!trimmed) continue;
74
+ const [s, e] = trimmed.split("-").map((v) => parseInt(v.trim(), 10));
75
+ if (Number.isFinite(s) && Number.isFinite(e)) {
76
+ ranges.push([s, e]);
77
+ } else if (Number.isFinite(s)) {
78
+ ranges.push([s, s]);
79
+ }
80
+ }
81
+ if (ranges.length === 1) {
82
+ args.start = ranges[0][0];
83
+ args.end = ranges[0][1];
84
+ } else if (ranges.length > 1) {
85
+ args.lines = ranges;
86
+ }
87
+ }
88
+ tools.push({ name: "read", arguments: args });
89
+ }
90
+ } else if (toolName === "finish") {
91
+ const fileRegex = /<file>([\s\S]*?)<\/file>/gi;
92
+ const files = [];
93
+ let fileMatch;
94
+ while ((fileMatch = fileRegex.exec(content)) !== null) {
95
+ const fileContent = fileMatch[1];
96
+ const filePath = getXmlElementText(fileContent, "path");
97
+ const linesStr = getXmlElementText(fileContent, "lines");
98
+ if (filePath && linesStr) {
99
+ const ranges = [];
100
+ for (const rangeStr of linesStr.split(",")) {
101
+ if (rangeStr.trim() === "*") {
102
+ ranges.push([1, 999999]);
103
+ } else {
104
+ const [s, e] = rangeStr.split("-").map((v) => parseInt(v.trim(), 10));
105
+ if (Number.isFinite(s) && Number.isFinite(e)) {
106
+ ranges.push([s, e]);
107
+ }
108
+ }
109
+ }
110
+ if (ranges.length > 0) {
111
+ files.push({ path: filePath, lines: ranges });
112
+ }
113
+ }
114
+ }
115
+ if (files.length > 0) {
116
+ tools.push({ name: "finish", arguments: { files } });
117
+ }
118
+ }
31
119
  }
32
- };
33
- var VALID_COMMANDS = ["analyse", "grep", "read", "finish"];
120
+ return tools;
121
+ }
34
122
  function preprocessText(text) {
35
123
  let processed = text.replace(/<think>[\s\S]*?<\/think>/gi, "");
124
+ const nestedTools = parseNestedXmlTools(processed);
36
125
  const openingTagRegex = /<tool_call>|<tool>/gi;
37
126
  const closingTagRegex = /<\/tool_call>|<\/tool>/gi;
38
127
  const openingMatches = processed.match(openingTagRegex) || [];
@@ -69,7 +158,7 @@ function preprocessText(text) {
69
158
  }
70
159
  }
71
160
  }
72
- return toolCallLines;
161
+ return { lines: toolCallLines, nestedTools };
73
162
  }
74
163
  var LLMResponseParser = class {
75
164
  finishSpecSplitRe = /,(?=[^,\s]+:)/;
@@ -77,8 +166,8 @@ var LLMResponseParser = class {
77
166
  if (typeof text !== "string") {
78
167
  throw new TypeError("Command text must be a string.");
79
168
  }
80
- const lines = preprocessText(text);
81
- const commands = [];
169
+ const { lines, nestedTools } = preprocessText(text);
170
+ const commands = [...nestedTools];
82
171
  let finishAccumulator = null;
83
172
  lines.forEach((line) => {
84
173
  if (!line || line.startsWith("#")) return;
@@ -86,8 +175,8 @@ var LLMResponseParser = class {
86
175
  if (parts.length === 0) return;
87
176
  const cmd = parts[0];
88
177
  switch (cmd) {
89
- case "analyse":
90
- this.handleAnalyse(parts, line, commands);
178
+ case "list_directory":
179
+ this.handleListDirectory(parts, line, commands);
91
180
  break;
92
181
  case "grep":
93
182
  this.handleGrep(parts, line, commands);
@@ -138,18 +227,17 @@ var LLMResponseParser = class {
138
227
  skip(message) {
139
228
  return { name: "_skip", arguments: { message } };
140
229
  }
141
- handleAnalyse(parts, rawLine, commands) {
230
+ handleListDirectory(parts, rawLine, commands) {
142
231
  if (parts.length < 2) {
143
232
  commands.push(this.skip(
144
- `[SKIPPED] Your command "${rawLine}" is missing a path. Correct format: analyse <path> [pattern]. Example: analyse src/`
233
+ `[SKIPPED] Your command "${rawLine}" is missing a path. Correct format: list_directory <path> [pattern]. Example: list_directory src/`
145
234
  ));
146
235
  return;
147
236
  }
148
237
  const path = parts[1];
149
238
  const pattern = parts[2]?.replace(/^"|"$/g, "") ?? null;
150
- commands.push({ name: "analyse", arguments: { path, pattern } });
239
+ commands.push({ name: "list_directory", arguments: { path, pattern } });
151
240
  }
152
- // no glob tool in MCP
153
241
  handleGrep(parts, rawLine, commands) {
154
242
  if (parts.length < 3) {
155
243
  commands.push(this.skip(
@@ -221,7 +309,6 @@ var LLMResponseParser = class {
221
309
  };
222
310
  // Annotate the CommonJS export names for ESM import in node:
223
311
  0 && (module.exports = {
224
- LLMResponseParseError,
225
312
  LLMResponseParser
226
313
  });
227
314
  //# sourceMappingURL=parser.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../tools/warp_grep/agent/parser.ts"],"sourcesContent":["// Parses assistant lines into structured tool calls\nimport type { ToolCall } from './types.js';\n\n// Keep for backwards compatibility - no longer thrown, but exported for tests\nexport class LLMResponseParseError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'LLMResponseParseError';\n }\n}\n\n// Valid tool command names\nconst VALID_COMMANDS = ['analyse', 'grep', 'read', 'finish'];\n\n/**\n * Preprocesses text to handle XML tags:\n * 1. Removes <think>...</think> blocks entirely\n * 2. Extracts content from <tool>...</tool> or <tool_call>...</tool_call> tags\n * 3. Passes through raw tool calls (lines starting with valid commands)\n * 4. Discards unclosed <tool...> tags\n */\nfunction preprocessText(text: string): string[] {\n // Step 1: Remove <think>...</think> blocks (including multiline)\n let processed = text.replace(/<think>[\\s\\S]*?<\\/think>/gi, '');\n \n // Step 2: Check for unclosed <tool or <tool_call tags and discard them\n // Find all opening tags and their positions\n const openingTagRegex = /<tool_call>|<tool>/gi;\n const closingTagRegex = /<\\/tool_call>|<\\/tool>/gi;\n \n // Count opening and closing tags\n const openingMatches = processed.match(openingTagRegex) || [];\n const closingMatches = processed.match(closingTagRegex) || [];\n \n // If there are more opening than closing tags, we have unclosed tags\n // In that case, only process complete tag pairs\n if (openingMatches.length > closingMatches.length) {\n // Remove any content after the last complete closing tag\n const lastClosingMatch = /<\\/tool_call>|<\\/tool>/gi;\n let lastClosingIndex = -1;\n let match;\n while ((match = lastClosingMatch.exec(processed)) !== null) {\n lastClosingIndex = match.index + match[0].length;\n }\n if (lastClosingIndex > 0) {\n processed = processed.slice(0, lastClosingIndex);\n }\n }\n \n // Step 3: Extract content from <tool_call>...</tool_call> and <tool>...</tool> tags\n const toolCallLines: string[] = [];\n const toolTagRegex = /<tool_call>([\\s\\S]*?)<\\/tool_call>|<tool>([\\s\\S]*?)<\\/tool>/gi;\n let tagMatch;\n \n while ((tagMatch = toolTagRegex.exec(processed)) !== null) {\n const content = (tagMatch[1] || tagMatch[2] || '').trim();\n if (content) {\n // Split content by newlines in case there are multiple tool calls in one tag\n const lines = content.split(/\\r?\\n/).map(l => l.trim()).filter(l => l);\n toolCallLines.push(...lines);\n }\n }\n \n // Step 4: Also extract raw tool calls (lines starting with valid commands)\n // This provides backwards compatibility\n const allLines = processed.split(/\\r?\\n/).map(l => l.trim());\n for (const line of allLines) {\n if (!line) continue;\n \n // Skip lines that are inside XML tags (already processed above)\n if (line.startsWith('<')) continue;\n \n // Check if line starts with a valid command\n const firstWord = line.split(/\\s/)[0];\n if (VALID_COMMANDS.includes(firstWord)) {\n // Avoid duplicates\n if (!toolCallLines.includes(line)) {\n toolCallLines.push(line);\n }\n }\n }\n \n return toolCallLines;\n}\n\nexport class LLMResponseParser {\n private readonly finishSpecSplitRe = /,(?=[^,\\s]+:)/;\n\n parse(text: string): ToolCall[] {\n if (typeof text !== 'string') {\n // no way we hit this, but sure, we can throw here\n throw new TypeError('Command text must be a string.');\n }\n \n // Preprocess to handle XML tags\n const lines = preprocessText(text);\n \n const commands: ToolCall[] = [];\n let finishAccumulator: Map<string, number[][]> | null = null;\n\n lines.forEach((line) => {\n if (!line || line.startsWith('#')) return;\n const parts = this.splitLine(line);\n if (parts.length === 0) return;\n const cmd = parts[0];\n switch (cmd) {\n case 'analyse':\n this.handleAnalyse(parts, line, commands);\n break;\n case 'grep':\n this.handleGrep(parts, line, commands);\n break;\n case 'read':\n this.handleRead(parts, line, commands);\n break;\n case 'finish':\n finishAccumulator = this.handleFinish(parts, line, commands, finishAccumulator);\n break;\n default:\n // Silently ignore unknown commands after preprocessing\n // (they might be remnants of XML or other content)\n break;\n }\n });\n\n if (finishAccumulator) {\n const map = finishAccumulator as Map<string, number[][]>;\n const entries = [...map.entries()];\n const filesPayload = entries.map(([path, ranges]) => ({\n path,\n lines: [...ranges].sort((a, b) => a[0] - b[0]) as Array<[number, number]>,\n }));\n commands.push({ name: 'finish', arguments: { files: filesPayload } });\n }\n return commands;\n }\n\n private splitLine(line: string): string[] {\n // Split by whitespace but keep quoted blocks as one\n const parts: string[] = [];\n let current = '';\n let inSingle = false;\n for (let i = 0; i < line.length; i++) {\n const ch = line[i];\n if (ch === \"'\" && line[i - 1] !== '\\\\') {\n inSingle = !inSingle;\n current += ch;\n } else if (!inSingle && /\\s/.test(ch)) {\n if (current) {\n parts.push(current);\n current = '';\n }\n } else {\n current += ch;\n }\n }\n if (current) parts.push(current);\n return parts;\n }\n\n /** Helper to create a _skip tool call with an error message */\n private skip(message: string): ToolCall {\n return { name: '_skip', arguments: { message } };\n }\n\n private handleAnalyse(parts: string[], rawLine: string, commands: ToolCall[]) {\n // analyse <path> [pattern]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: analyse <path> [pattern]. Example: analyse src/`\n ));\n return;\n }\n const path = parts[1];\n const pattern = parts[2]?.replace(/^\"|\"$/g, '') ?? null;\n commands.push({ name: 'analyse', arguments: { path, pattern } });\n }\n\n // no glob tool in MCP\n\n private handleGrep(parts: string[], rawLine: string, commands: ToolCall[]) {\n // grep '<pattern>' <path>\n if (parts.length < 3) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing arguments. ` +\n `Correct format: grep '<pattern>' <path>. Example: grep 'TODO' src/`\n ));\n return;\n }\n let pat = parts[1];\n // Be lenient: accept unquoted patterns by treating the first arg as the pattern\n if (pat.startsWith(\"'\") && pat.endsWith(\"'\")) {\n pat = pat.slice(1, -1);\n }\n // If pattern is empty after processing, skip\n if (!pat) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" has an empty pattern. ` +\n `Provide a non-empty search pattern. Example: grep 'function' src/`\n ));\n return;\n }\n commands.push({ name: 'grep', arguments: { pattern: pat, path: parts[2] } });\n }\n\n private handleRead(parts: string[], rawLine: string, commands: ToolCall[]) {\n // read <path>[:start-end]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: read <path> or read <path>:<start>-<end>. Example: read src/index.ts:1-50`\n ));\n return;\n }\n const spec = parts[1];\n const rangeIdx = spec.indexOf(':');\n if (rangeIdx === -1) {\n commands.push({ name: 'read', arguments: { path: spec } });\n return;\n }\n const filePath = spec.slice(0, rangeIdx);\n const range = spec.slice(rangeIdx + 1);\n const [s, e] = range.split('-').map(v => parseInt(v, 10));\n // If range is invalid, fallback to reading the whole file\n if (!Number.isFinite(s) || !Number.isFinite(e)) {\n commands.push({ name: 'read', arguments: { path: filePath } });\n return;\n }\n commands.push({ name: 'read', arguments: { path: filePath, start: s, end: e } });\n }\n\n private handleFinish(parts: string[], rawLine: string, commands: ToolCall[], acc: Map<string, number[][]> | null) {\n // finish file1:1-10,20-30 file2:5-7\n const map = acc ?? new Map<string, number[][]>();\n const args = parts.slice(1);\n for (const token of args) {\n const [filePath, rangesText] = token.split(':', 2);\n if (!filePath || !rangesText) {\n // Skip this malformed token, continue processing others\n commands.push(this.skip(\n `[SKIPPED] Invalid finish token \"${token}\". ` +\n `Correct format: finish <path>:<start>-<end>. Example: finish src/index.ts:1-50`\n ));\n continue;\n }\n const rangeSpecs = rangesText.split(',').filter(Boolean);\n for (const spec of rangeSpecs) {\n const [s, e] = spec.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {\n // Skip this invalid range, continue with others\n commands.push(this.skip(\n `[SKIPPED] Invalid range \"${spec}\" in \"${token}\". ` +\n `Ranges must be <start>-<end> where start <= end. Example: 1-50`\n ));\n continue;\n }\n const arr = map.get(filePath) ?? [];\n arr.push([s, e]);\n map.set(filePath, arr);\n }\n }\n return map;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAIO,IAAM,wBAAN,cAAoC,MAAM;AAAA,EAC/C,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAGA,IAAM,iBAAiB,CAAC,WAAW,QAAQ,QAAQ,QAAQ;AAS3D,SAAS,eAAe,MAAwB;AAE9C,MAAI,YAAY,KAAK,QAAQ,8BAA8B,EAAE;AAI7D,QAAM,kBAAkB;AACxB,QAAM,kBAAkB;AAGxB,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAC5D,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAI5D,MAAI,eAAe,SAAS,eAAe,QAAQ;AAEjD,UAAM,mBAAmB;AACzB,QAAI,mBAAmB;AACvB,QAAI;AACJ,YAAQ,QAAQ,iBAAiB,KAAK,SAAS,OAAO,MAAM;AAC1D,yBAAmB,MAAM,QAAQ,MAAM,CAAC,EAAE;AAAA,IAC5C;AACA,QAAI,mBAAmB,GAAG;AACxB,kBAAY,UAAU,MAAM,GAAG,gBAAgB;AAAA,IACjD;AAAA,EACF;AAGA,QAAM,gBAA0B,CAAC;AACjC,QAAM,eAAe;AACrB,MAAI;AAEJ,UAAQ,WAAW,aAAa,KAAK,SAAS,OAAO,MAAM;AACzD,UAAM,WAAW,SAAS,CAAC,KAAK,SAAS,CAAC,KAAK,IAAI,KAAK;AACxD,QAAI,SAAS;AAEX,YAAM,QAAQ,QAAQ,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAK,CAAC;AACrE,oBAAc,KAAK,GAAG,KAAK;AAAA,IAC7B;AAAA,EACF;AAIA,QAAM,WAAW,UAAU,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAC3D,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAM;AAGX,QAAI,KAAK,WAAW,GAAG,EAAG;AAG1B,UAAM,YAAY,KAAK,MAAM,IAAI,EAAE,CAAC;AACpC,QAAI,eAAe,SAAS,SAAS,GAAG;AAEtC,UAAI,CAAC,cAAc,SAAS,IAAI,GAAG;AACjC,sBAAc,KAAK,IAAI;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,oBAAN,MAAwB;AAAA,EACZ,oBAAoB;AAAA,EAErC,MAAM,MAA0B;AAC9B,QAAI,OAAO,SAAS,UAAU;AAE5B,YAAM,IAAI,UAAU,gCAAgC;AAAA,IACtD;AAGA,UAAM,QAAQ,eAAe,IAAI;AAEjC,UAAM,WAAuB,CAAC;AAC9B,QAAI,oBAAoD;AAExD,UAAM,QAAQ,CAAC,SAAS;AACtB,UAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AACnC,YAAM,QAAQ,KAAK,UAAU,IAAI;AACjC,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,MAAM,MAAM,CAAC;AACnB,cAAQ,KAAK;AAAA,QACX,KAAK;AACH,eAAK,cAAc,OAAO,MAAM,QAAQ;AACxC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,8BAAoB,KAAK,aAAa,OAAO,MAAM,UAAU,iBAAiB;AAC9E;AAAA,QACF;AAGE;AAAA,MACJ;AAAA,IACF,CAAC;AAED,QAAI,mBAAmB;AACrB,YAAM,MAAM;AACZ,YAAM,UAAU,CAAC,GAAG,IAAI,QAAQ,CAAC;AACjC,YAAM,eAAe,QAAQ,IAAI,CAAC,CAAC,MAAM,MAAM,OAAO;AAAA,QACpD;AAAA,QACA,OAAO,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAAA,MAC/C,EAAE;AACF,eAAS,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,OAAO,aAAa,EAAE,CAAC;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAwB;AAExC,UAAM,QAAkB,CAAC;AACzB,QAAI,UAAU;AACd,QAAI,WAAW;AACf,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,KAAK,KAAK,CAAC;AACjB,UAAI,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,MAAM;AACtC,mBAAW,CAAC;AACZ,mBAAW;AAAA,MACb,WAAW,CAAC,YAAY,KAAK,KAAK,EAAE,GAAG;AACrC,YAAI,SAAS;AACX,gBAAM,KAAK,OAAO;AAClB,oBAAU;AAAA,QACZ;AAAA,MACF,OAAO;AACL,mBAAW;AAAA,MACb;AAAA,IACF;AACA,QAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,KAAK,SAA2B;AACtC,WAAO,EAAE,MAAM,SAAS,WAAW,EAAE,QAAQ,EAAE;AAAA,EACjD;AAAA,EAEQ,cAAc,OAAiB,SAAiB,UAAsB;AAE5E,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,MAAM,CAAC,GAAG,QAAQ,UAAU,EAAE,KAAK;AACnD,aAAS,KAAK,EAAE,MAAM,WAAW,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,EACjE;AAAA;AAAA,EAIQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,QAAI,MAAM,MAAM,CAAC;AAEjB,QAAI,IAAI,WAAW,GAAG,KAAK,IAAI,SAAS,GAAG,GAAG;AAC5C,YAAM,IAAI,MAAM,GAAG,EAAE;AAAA,IACvB;AAEA,QAAI,CAAC,KAAK;AACR,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,SAAS,KAAK,MAAM,MAAM,CAAC,EAAE,EAAE,CAAC;AAAA,EAC7E;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,QAAI,aAAa,IAAI;AACnB,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,KAAK,EAAE,CAAC;AACzD;AAAA,IACF;AACA,UAAM,WAAW,KAAK,MAAM,GAAG,QAAQ;AACvC,UAAM,QAAQ,KAAK,MAAM,WAAW,CAAC;AACrC,UAAM,CAAC,GAAG,CAAC,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AAExD,QAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,GAAG;AAC9C,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,SAAS,EAAE,CAAC;AAC7D;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,UAAU,OAAO,GAAG,KAAK,EAAE,EAAE,CAAC;AAAA,EACjF;AAAA,EAEQ,aAAa,OAAiB,SAAiB,UAAsB,KAAqC;AAEhH,UAAM,MAAM,OAAO,oBAAI,IAAwB;AAC/C,UAAM,OAAO,MAAM,MAAM,CAAC;AAC1B,eAAW,SAAS,MAAM;AACxB,YAAM,CAAC,UAAU,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AACjD,UAAI,CAAC,YAAY,CAAC,YAAY;AAE5B,iBAAS,KAAK,KAAK;AAAA,UACjB,mCAAmC,KAAK;AAAA,QAE1C,CAAC;AACD;AAAA,MACF;AACA,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,OAAO,OAAO;AACvD,iBAAW,QAAQ,YAAY;AAC7B,cAAM,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACvD,YAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,KAAK,IAAI,GAAG;AAEvD,mBAAS,KAAK,KAAK;AAAA,YACjB,4BAA4B,IAAI,SAAS,KAAK;AAAA,UAEhD,CAAC;AACD;AAAA,QACF;AACA,cAAM,MAAM,IAAI,IAAI,QAAQ,KAAK,CAAC;AAClC,YAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACf,YAAI,IAAI,UAAU,GAAG;AAAA,MACvB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../../tools/warp_grep/agent/parser.ts"],"sourcesContent":["// Parses assistant lines into structured tool calls\nimport type { ToolCall } from './types.js';\n\nconst VALID_COMMANDS = ['list_directory', 'grep', 'read', 'finish'] as const;\ntype ValidCommand = typeof VALID_COMMANDS[number];\n\nfunction isValidCommand(name: string): name is ValidCommand {\n return VALID_COMMANDS.includes(name as ValidCommand);\n}\n\nfunction getXmlElementText(xml: string, tagName: string): string | null {\n const regex = new RegExp(`<${tagName}>([\\\\s\\\\S]*?)</${tagName}>`, 'i');\n const match = xml.match(regex);\n return match ? match[1].trim() : null;\n}\n\nfunction parseNestedXmlTools(text: string): ToolCall[] {\n const tools: ToolCall[] = [];\n \n // Match any XML tool tags - this allows graceful handling of unknown tools\n // that might be added in future versions (they are simply ignored)\n const toolRegex = /<([a-z_][a-z0-9_]*)>([\\s\\S]*?)<\\/\\1>/gi;\n let match;\n \n while ((match = toolRegex.exec(text)) !== null) {\n const rawToolName = match[1].toLowerCase();\n const content = match[2];\n \n // Skip unknown tools silently - enables forward compatibility\n if (!isValidCommand(rawToolName)) continue;\n \n const toolName = rawToolName;\n \n if (toolName === 'list_directory') {\n const path = getXmlElementText(content, 'path');\n const pattern = getXmlElementText(content, 'pattern');\n if (path) {\n tools.push({ name: 'list_directory', arguments: { path, pattern } });\n }\n } else if (toolName === 'grep') {\n const pattern = getXmlElementText(content, 'pattern');\n const subDir = getXmlElementText(content, 'sub_dir');\n const glob = getXmlElementText(content, 'glob');\n if (pattern) {\n tools.push({ \n name: 'grep', \n arguments: { \n pattern, \n path: subDir || '.', \n ...(glob && { glob }) \n } \n });\n }\n } else if (toolName === 'read') {\n const path = getXmlElementText(content, 'path');\n const linesStr = getXmlElementText(content, 'lines');\n if (path) {\n const args: Record<string, unknown> = { path };\n if (linesStr) {\n const ranges: Array<[number, number]> = [];\n for (const rangeStr of linesStr.split(',')) {\n const trimmed = rangeStr.trim();\n if (!trimmed) continue;\n const [s, e] = trimmed.split('-').map(v => parseInt(v.trim(), 10));\n if (Number.isFinite(s) && Number.isFinite(e)) {\n ranges.push([s, e]);\n } else if (Number.isFinite(s)) {\n // Single line like \"100\"\n ranges.push([s, s]);\n }\n }\n if (ranges.length === 1) {\n args.start = ranges[0][0];\n args.end = ranges[0][1];\n } else if (ranges.length > 1) {\n args.lines = ranges;\n }\n }\n tools.push({ name: 'read', arguments: args });\n }\n } else if (toolName === 'finish') {\n // Parse nested <file> elements\n const fileRegex = /<file>([\\s\\S]*?)<\\/file>/gi;\n const files: Array<{ path: string; lines: Array<[number, number]> }> = [];\n let fileMatch;\n \n while ((fileMatch = fileRegex.exec(content)) !== null) {\n const fileContent = fileMatch[1];\n const filePath = getXmlElementText(fileContent, 'path');\n const linesStr = getXmlElementText(fileContent, 'lines');\n \n if (filePath && linesStr) {\n const ranges: Array<[number, number]> = [];\n for (const rangeStr of linesStr.split(',')) {\n if (rangeStr.trim() === '*') {\n ranges.push([1, 999999]); // Entire file\n } else {\n const [s, e] = rangeStr.split('-').map(v => parseInt(v.trim(), 10));\n if (Number.isFinite(s) && Number.isFinite(e)) {\n ranges.push([s, e]);\n }\n }\n }\n if (ranges.length > 0) {\n files.push({ path: filePath, lines: ranges });\n }\n }\n }\n \n if (files.length > 0) {\n tools.push({ name: 'finish', arguments: { files } });\n }\n }\n }\n \n return tools;\n}\n\nfunction preprocessText(text: string): { lines: string[]; nestedTools: ToolCall[] } {\n let processed = text.replace(/<think>[\\s\\S]*?<\\/think>/gi, '');\n const nestedTools = parseNestedXmlTools(processed);\n const openingTagRegex = /<tool_call>|<tool>/gi;\n const closingTagRegex = /<\\/tool_call>|<\\/tool>/gi;\n \n const openingMatches = processed.match(openingTagRegex) || [];\n const closingMatches = processed.match(closingTagRegex) || [];\n \n if (openingMatches.length > closingMatches.length) {\n const lastClosingMatch = /<\\/tool_call>|<\\/tool>/gi;\n let lastClosingIndex = -1;\n let match;\n while ((match = lastClosingMatch.exec(processed)) !== null) {\n lastClosingIndex = match.index + match[0].length;\n }\n if (lastClosingIndex > 0) {\n processed = processed.slice(0, lastClosingIndex);\n }\n }\n const toolCallLines: string[] = [];\n const toolTagRegex = /<tool_call>([\\s\\S]*?)<\\/tool_call>|<tool>([\\s\\S]*?)<\\/tool>/gi;\n let tagMatch;\n \n while ((tagMatch = toolTagRegex.exec(processed)) !== null) {\n const content = (tagMatch[1] || tagMatch[2] || '').trim();\n if (content) {\n const lines = content.split(/\\r?\\n/).map(l => l.trim()).filter(l => l);\n toolCallLines.push(...lines);\n }\n }\n \n // Also extract raw tool calls (lines starting with valid commands)\n const allLines = processed.split(/\\r?\\n/).map(l => l.trim());\n for (const line of allLines) {\n if (!line) continue;\n if (line.startsWith('<')) continue;\n \n const firstWord = line.split(/\\s/)[0];\n if (VALID_COMMANDS.includes(firstWord as ValidCommand)) {\n if (!toolCallLines.includes(line)) {\n toolCallLines.push(line);\n }\n }\n }\n \n return { lines: toolCallLines, nestedTools };\n}\n\nexport class LLMResponseParser {\n private readonly finishSpecSplitRe = /,(?=[^,\\s]+:)/;\n\n parse(text: string): ToolCall[] {\n if (typeof text !== 'string') {\n throw new TypeError('Command text must be a string.');\n }\n \n // Preprocess to handle XML tags\n const { lines, nestedTools } = preprocessText(text);\n \n // Start with nested XML tools (new format)\n const commands: ToolCall[] = [...nestedTools];\n let finishAccumulator: Map<string, number[][]> | null = null;\n\n lines.forEach((line) => {\n if (!line || line.startsWith('#')) return;\n const parts = this.splitLine(line);\n if (parts.length === 0) return;\n const cmd = parts[0];\n \n switch (cmd) {\n case 'list_directory':\n this.handleListDirectory(parts, line, commands);\n break;\n case 'grep':\n this.handleGrep(parts, line, commands);\n break;\n case 'read':\n this.handleRead(parts, line, commands);\n break;\n case 'finish':\n finishAccumulator = this.handleFinish(parts, line, commands, finishAccumulator);\n break;\n default:\n // Silently ignore unknown commands\n break;\n }\n });\n\n if (finishAccumulator) {\n const map = finishAccumulator as Map<string, number[][]>;\n const entries = [...map.entries()];\n const filesPayload = entries.map(([path, ranges]) => ({\n path,\n lines: [...ranges].sort((a, b) => a[0] - b[0]) as Array<[number, number]>,\n }));\n commands.push({ name: 'finish', arguments: { files: filesPayload } });\n }\n return commands;\n }\n\n private splitLine(line: string): string[] {\n // Split by whitespace but keep quoted blocks as one\n const parts: string[] = [];\n let current = '';\n let inSingle = false;\n for (let i = 0; i < line.length; i++) {\n const ch = line[i];\n if (ch === \"'\" && line[i - 1] !== '\\\\') {\n inSingle = !inSingle;\n current += ch;\n } else if (!inSingle && /\\s/.test(ch)) {\n if (current) {\n parts.push(current);\n current = '';\n }\n } else {\n current += ch;\n }\n }\n if (current) parts.push(current);\n return parts;\n }\n\n /** Helper to create a _skip tool call with an error message */\n private skip(message: string): ToolCall {\n return { name: '_skip', arguments: { message } };\n }\n\n private handleListDirectory(parts: string[], rawLine: string, commands: ToolCall[]) {\n // list_directory <path> [pattern]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: list_directory <path> [pattern]. Example: list_directory src/`\n ));\n return;\n }\n const path = parts[1];\n const pattern = parts[2]?.replace(/^\"|\"$/g, '') ?? null;\n commands.push({ name: 'list_directory', arguments: { path, pattern } });\n }\n\n private handleGrep(parts: string[], rawLine: string, commands: ToolCall[]) {\n // grep '<pattern>' <path>\n if (parts.length < 3) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing arguments. ` +\n `Correct format: grep '<pattern>' <path>. Example: grep 'TODO' src/`\n ));\n return;\n }\n let pat = parts[1];\n // Be lenient: accept unquoted patterns by treating the first arg as the pattern\n if (pat.startsWith(\"'\") && pat.endsWith(\"'\")) {\n pat = pat.slice(1, -1);\n }\n // If pattern is empty after processing, skip\n if (!pat) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" has an empty pattern. ` +\n `Provide a non-empty search pattern. Example: grep 'function' src/`\n ));\n return;\n }\n commands.push({ name: 'grep', arguments: { pattern: pat, path: parts[2] } });\n }\n\n private handleRead(parts: string[], rawLine: string, commands: ToolCall[]) {\n // read <path>[:start-end]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: read <path> or read <path>:<start>-<end>. Example: read src/index.ts:1-50`\n ));\n return;\n }\n const spec = parts[1];\n const rangeIdx = spec.indexOf(':');\n if (rangeIdx === -1) {\n commands.push({ name: 'read', arguments: { path: spec } });\n return;\n }\n const filePath = spec.slice(0, rangeIdx);\n const range = spec.slice(rangeIdx + 1);\n const [s, e] = range.split('-').map(v => parseInt(v, 10));\n // If range is invalid, fallback to reading the whole file\n if (!Number.isFinite(s) || !Number.isFinite(e)) {\n commands.push({ name: 'read', arguments: { path: filePath } });\n return;\n }\n commands.push({ name: 'read', arguments: { path: filePath, start: s, end: e } });\n }\n\n private handleFinish(parts: string[], rawLine: string, commands: ToolCall[], acc: Map<string, number[][]> | null) {\n // finish file1:1-10,20-30 file2:5-7\n const map = acc ?? new Map<string, number[][]>();\n const args = parts.slice(1);\n for (const token of args) {\n const [filePath, rangesText] = token.split(':', 2);\n if (!filePath || !rangesText) {\n // Skip this malformed token, continue processing others\n commands.push(this.skip(\n `[SKIPPED] Invalid finish token \"${token}\". ` +\n `Correct format: finish <path>:<start>-<end>. Example: finish src/index.ts:1-50`\n ));\n continue;\n }\n const rangeSpecs = rangesText.split(',').filter(Boolean);\n for (const spec of rangeSpecs) {\n const [s, e] = spec.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {\n // Skip this invalid range, continue with others\n commands.push(this.skip(\n `[SKIPPED] Invalid range \"${spec}\" in \"${token}\". ` +\n `Ranges must be <start>-<end> where start <= end. Example: 1-50`\n ));\n continue;\n }\n const arr = map.get(filePath) ?? [];\n arr.push([s, e]);\n map.set(filePath, arr);\n }\n }\n return map;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAGA,IAAM,iBAAiB,CAAC,kBAAkB,QAAQ,QAAQ,QAAQ;AAGlE,SAAS,eAAe,MAAoC;AAC1D,SAAO,eAAe,SAAS,IAAoB;AACrD;AAEA,SAAS,kBAAkB,KAAa,SAAgC;AACtE,QAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,kBAAkB,OAAO,KAAK,GAAG;AACrE,QAAM,QAAQ,IAAI,MAAM,KAAK;AAC7B,SAAO,QAAQ,MAAM,CAAC,EAAE,KAAK,IAAI;AACnC;AAEA,SAAS,oBAAoB,MAA0B;AACrD,QAAM,QAAoB,CAAC;AAI3B,QAAM,YAAY;AAClB,MAAI;AAEJ,UAAQ,QAAQ,UAAU,KAAK,IAAI,OAAO,MAAM;AAC9C,UAAM,cAAc,MAAM,CAAC,EAAE,YAAY;AACzC,UAAM,UAAU,MAAM,CAAC;AAGvB,QAAI,CAAC,eAAe,WAAW,EAAG;AAElC,UAAM,WAAW;AAEjB,QAAI,aAAa,kBAAkB;AACjC,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,YAAM,UAAU,kBAAkB,SAAS,SAAS;AACpD,UAAI,MAAM;AACR,cAAM,KAAK,EAAE,MAAM,kBAAkB,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,MACrE;AAAA,IACF,WAAW,aAAa,QAAQ;AAC9B,YAAM,UAAU,kBAAkB,SAAS,SAAS;AACpD,YAAM,SAAS,kBAAkB,SAAS,SAAS;AACnD,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,UAAI,SAAS;AACX,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN,WAAW;AAAA,YACT;AAAA,YACA,MAAM,UAAU;AAAA,YAChB,GAAI,QAAQ,EAAE,KAAK;AAAA,UACrB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,WAAW,aAAa,QAAQ;AAC9B,YAAM,OAAO,kBAAkB,SAAS,MAAM;AAC9C,YAAM,WAAW,kBAAkB,SAAS,OAAO;AACnD,UAAI,MAAM;AACR,cAAM,OAAgC,EAAE,KAAK;AAC7C,YAAI,UAAU;AACZ,gBAAM,SAAkC,CAAC;AACzC,qBAAW,YAAY,SAAS,MAAM,GAAG,GAAG;AAC1C,kBAAM,UAAU,SAAS,KAAK;AAC9B,gBAAI,CAAC,QAAS;AACd,kBAAM,CAAC,GAAG,CAAC,IAAI,QAAQ,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,EAAE,KAAK,GAAG,EAAE,CAAC;AACjE,gBAAI,OAAO,SAAS,CAAC,KAAK,OAAO,SAAS,CAAC,GAAG;AAC5C,qBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,YACpB,WAAW,OAAO,SAAS,CAAC,GAAG;AAE7B,qBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,YACpB;AAAA,UACF;AACA,cAAI,OAAO,WAAW,GAAG;AACvB,iBAAK,QAAQ,OAAO,CAAC,EAAE,CAAC;AACxB,iBAAK,MAAM,OAAO,CAAC,EAAE,CAAC;AAAA,UACxB,WAAW,OAAO,SAAS,GAAG;AAC5B,iBAAK,QAAQ;AAAA,UACf;AAAA,QACF;AACA,cAAM,KAAK,EAAE,MAAM,QAAQ,WAAW,KAAK,CAAC;AAAA,MAC9C;AAAA,IACF,WAAW,aAAa,UAAU;AAEhC,YAAM,YAAY;AAClB,YAAM,QAAiE,CAAC;AACxE,UAAI;AAEJ,cAAQ,YAAY,UAAU,KAAK,OAAO,OAAO,MAAM;AACrD,cAAM,cAAc,UAAU,CAAC;AAC/B,cAAM,WAAW,kBAAkB,aAAa,MAAM;AACtD,cAAM,WAAW,kBAAkB,aAAa,OAAO;AAEvD,YAAI,YAAY,UAAU;AACxB,gBAAM,SAAkC,CAAC;AACzC,qBAAW,YAAY,SAAS,MAAM,GAAG,GAAG;AAC1C,gBAAI,SAAS,KAAK,MAAM,KAAK;AAC3B,qBAAO,KAAK,CAAC,GAAG,MAAM,CAAC;AAAA,YACzB,OAAO;AACL,oBAAM,CAAC,GAAG,CAAC,IAAI,SAAS,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,EAAE,KAAK,GAAG,EAAE,CAAC;AAClE,kBAAI,OAAO,SAAS,CAAC,KAAK,OAAO,SAAS,CAAC,GAAG;AAC5C,uBAAO,KAAK,CAAC,GAAG,CAAC,CAAC;AAAA,cACpB;AAAA,YACF;AAAA,UACF;AACA,cAAI,OAAO,SAAS,GAAG;AACrB,kBAAM,KAAK,EAAE,MAAM,UAAU,OAAO,OAAO,CAAC;AAAA,UAC9C;AAAA,QACF;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,GAAG;AACpB,cAAM,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,MAAM,EAAE,CAAC;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,MAA4D;AAClF,MAAI,YAAY,KAAK,QAAQ,8BAA8B,EAAE;AAC7D,QAAM,cAAc,oBAAoB,SAAS;AACjD,QAAM,kBAAkB;AACxB,QAAM,kBAAkB;AAExB,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAC5D,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAE5D,MAAI,eAAe,SAAS,eAAe,QAAQ;AACjD,UAAM,mBAAmB;AACzB,QAAI,mBAAmB;AACvB,QAAI;AACJ,YAAQ,QAAQ,iBAAiB,KAAK,SAAS,OAAO,MAAM;AAC1D,yBAAmB,MAAM,QAAQ,MAAM,CAAC,EAAE;AAAA,IAC5C;AACA,QAAI,mBAAmB,GAAG;AACxB,kBAAY,UAAU,MAAM,GAAG,gBAAgB;AAAA,IACjD;AAAA,EACF;AACA,QAAM,gBAA0B,CAAC;AACjC,QAAM,eAAe;AACrB,MAAI;AAEJ,UAAQ,WAAW,aAAa,KAAK,SAAS,OAAO,MAAM;AACzD,UAAM,WAAW,SAAS,CAAC,KAAK,SAAS,CAAC,KAAK,IAAI,KAAK;AACxD,QAAI,SAAS;AACX,YAAM,QAAQ,QAAQ,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAK,CAAC;AACrE,oBAAc,KAAK,GAAG,KAAK;AAAA,IAC7B;AAAA,EACF;AAGA,QAAM,WAAW,UAAU,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAC3D,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAM;AACX,QAAI,KAAK,WAAW,GAAG,EAAG;AAE1B,UAAM,YAAY,KAAK,MAAM,IAAI,EAAE,CAAC;AACpC,QAAI,eAAe,SAAS,SAAyB,GAAG;AACtD,UAAI,CAAC,cAAc,SAAS,IAAI,GAAG;AACjC,sBAAc,KAAK,IAAI;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,eAAe,YAAY;AAC7C;AAEO,IAAM,oBAAN,MAAwB;AAAA,EACZ,oBAAoB;AAAA,EAErC,MAAM,MAA0B;AAC9B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,UAAU,gCAAgC;AAAA,IACtD;AAGA,UAAM,EAAE,OAAO,YAAY,IAAI,eAAe,IAAI;AAGlD,UAAM,WAAuB,CAAC,GAAG,WAAW;AAC5C,QAAI,oBAAoD;AAExD,UAAM,QAAQ,CAAC,SAAS;AACtB,UAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AACnC,YAAM,QAAQ,KAAK,UAAU,IAAI;AACjC,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,MAAM,MAAM,CAAC;AAEnB,cAAQ,KAAK;AAAA,QACX,KAAK;AACH,eAAK,oBAAoB,OAAO,MAAM,QAAQ;AAC9C;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,8BAAoB,KAAK,aAAa,OAAO,MAAM,UAAU,iBAAiB;AAC9E;AAAA,QACF;AAEE;AAAA,MACJ;AAAA,IACF,CAAC;AAED,QAAI,mBAAmB;AACrB,YAAM,MAAM;AACZ,YAAM,UAAU,CAAC,GAAG,IAAI,QAAQ,CAAC;AACjC,YAAM,eAAe,QAAQ,IAAI,CAAC,CAAC,MAAM,MAAM,OAAO;AAAA,QACpD;AAAA,QACA,OAAO,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAAA,MAC/C,EAAE;AACF,eAAS,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,OAAO,aAAa,EAAE,CAAC;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAwB;AAExC,UAAM,QAAkB,CAAC;AACzB,QAAI,UAAU;AACd,QAAI,WAAW;AACf,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,KAAK,KAAK,CAAC;AACjB,UAAI,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,MAAM;AACtC,mBAAW,CAAC;AACZ,mBAAW;AAAA,MACb,WAAW,CAAC,YAAY,KAAK,KAAK,EAAE,GAAG;AACrC,YAAI,SAAS;AACX,gBAAM,KAAK,OAAO;AAClB,oBAAU;AAAA,QACZ;AAAA,MACF,OAAO;AACL,mBAAW;AAAA,MACb;AAAA,IACF;AACA,QAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,KAAK,SAA2B;AACtC,WAAO,EAAE,MAAM,SAAS,WAAW,EAAE,QAAQ,EAAE;AAAA,EACjD;AAAA,EAEQ,oBAAoB,OAAiB,SAAiB,UAAsB;AAElF,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,MAAM,CAAC,GAAG,QAAQ,UAAU,EAAE,KAAK;AACnD,aAAS,KAAK,EAAE,MAAM,kBAAkB,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,EACxE;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,QAAI,MAAM,MAAM,CAAC;AAEjB,QAAI,IAAI,WAAW,GAAG,KAAK,IAAI,SAAS,GAAG,GAAG;AAC5C,YAAM,IAAI,MAAM,GAAG,EAAE;AAAA,IACvB;AAEA,QAAI,CAAC,KAAK;AACR,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,SAAS,KAAK,MAAM,MAAM,CAAC,EAAE,EAAE,CAAC;AAAA,EAC7E;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,QAAI,aAAa,IAAI;AACnB,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,KAAK,EAAE,CAAC;AACzD;AAAA,IACF;AACA,UAAM,WAAW,KAAK,MAAM,GAAG,QAAQ;AACvC,UAAM,QAAQ,KAAK,MAAM,WAAW,CAAC;AACrC,UAAM,CAAC,GAAG,CAAC,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AAExD,QAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,GAAG;AAC9C,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,SAAS,EAAE,CAAC;AAC7D;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,UAAU,OAAO,GAAG,KAAK,EAAE,EAAE,CAAC;AAAA,EACjF;AAAA,EAEQ,aAAa,OAAiB,SAAiB,UAAsB,KAAqC;AAEhH,UAAM,MAAM,OAAO,oBAAI,IAAwB;AAC/C,UAAM,OAAO,MAAM,MAAM,CAAC;AAC1B,eAAW,SAAS,MAAM;AACxB,YAAM,CAAC,UAAU,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AACjD,UAAI,CAAC,YAAY,CAAC,YAAY;AAE5B,iBAAS,KAAK,KAAK;AAAA,UACjB,mCAAmC,KAAK;AAAA,QAE1C,CAAC;AACD;AAAA,MACF;AACA,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,OAAO,OAAO;AACvD,iBAAW,QAAQ,YAAY;AAC7B,cAAM,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACvD,YAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,KAAK,IAAI,GAAG;AAEvD,mBAAS,KAAK,KAAK;AAAA,YACjB,4BAA4B,IAAI,SAAS,KAAK;AAAA,UAEhD,CAAC;AACD;AAAA,QACF;AACA,cAAM,MAAM,IAAI,IAAI,QAAQ,KAAK,CAAC;AAClC,YAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACf,YAAI,IAAI,UAAU,GAAG;AAAA,MACvB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -1,18 +1,16 @@
1
1
  import { ToolCall } from './types.js';
2
+ import '../../utils/resilience.js';
2
3
 
3
- declare class LLMResponseParseError extends Error {
4
- constructor(message: string);
5
- }
6
4
  declare class LLMResponseParser {
7
5
  private readonly finishSpecSplitRe;
8
6
  parse(text: string): ToolCall[];
9
7
  private splitLine;
10
8
  /** Helper to create a _skip tool call with an error message */
11
9
  private skip;
12
- private handleAnalyse;
10
+ private handleListDirectory;
13
11
  private handleGrep;
14
12
  private handleRead;
15
13
  private handleFinish;
16
14
  }
17
15
 
18
- export { LLMResponseParseError, LLMResponseParser };
16
+ export { LLMResponseParser };