@morphllm/morphsdk 0.2.45 → 0.2.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. package/README.md +1 -1
  2. package/dist/{chunk-TVFGHXPE.js → chunk-3FTAIJBH.js} +4 -4
  3. package/dist/{chunk-ZRLEAPZV.js → chunk-76DJEQEP.js} +4 -4
  4. package/dist/{chunk-W3XLPMV3.js → chunk-7HS6YXA3.js} +21 -5
  5. package/dist/{chunk-W3XLPMV3.js.map → chunk-7HS6YXA3.js.map} +1 -1
  6. package/dist/chunk-BLXC5R4W.js +82 -0
  7. package/dist/chunk-BLXC5R4W.js.map +1 -0
  8. package/dist/chunk-FA6UGPVL.js +105 -0
  9. package/dist/chunk-FA6UGPVL.js.map +1 -0
  10. package/dist/{chunk-PEGZVGG4.js → chunk-G4AWE5A2.js} +4 -4
  11. package/dist/{chunk-OUEJ6XEO.js → chunk-GJU7UOFL.js} +4 -4
  12. package/dist/{chunk-Q7PDN7TS.js → chunk-GZMUGMOZ.js} +1 -1
  13. package/dist/{chunk-Q7PDN7TS.js.map → chunk-GZMUGMOZ.js.map} +1 -1
  14. package/dist/chunk-JYBVRF72.js +1 -0
  15. package/dist/chunk-OOZSGWSK.js +70 -0
  16. package/dist/chunk-OOZSGWSK.js.map +1 -0
  17. package/dist/{chunk-GDR65N2J.js → chunk-OXHGFHEU.js} +53 -26
  18. package/dist/chunk-OXHGFHEU.js.map +1 -0
  19. package/dist/{chunk-VBBJGWHY.js → chunk-P2XKFWFD.js} +2 -2
  20. package/dist/chunk-RAKREIXE.js +76 -0
  21. package/dist/chunk-RAKREIXE.js.map +1 -0
  22. package/dist/chunk-SDI2FI6G.js +283 -0
  23. package/dist/chunk-SDI2FI6G.js.map +1 -0
  24. package/dist/{chunk-GTOXMAF2.js → chunk-SWQPIKPY.js} +44 -3
  25. package/dist/chunk-SWQPIKPY.js.map +1 -0
  26. package/dist/chunk-TJIUA27P.js +94 -0
  27. package/dist/chunk-TJIUA27P.js.map +1 -0
  28. package/dist/{chunk-O5DA5V5S.js → chunk-UBX7QYBD.js} +4 -4
  29. package/dist/{chunk-UYBIKZPM.js → chunk-UCWTZSW5.js} +3 -3
  30. package/dist/{chunk-X4CQ6D3G.js → chunk-UIZT3KVJ.js} +4 -4
  31. package/dist/chunk-WETRQJGU.js +129 -0
  32. package/dist/chunk-WETRQJGU.js.map +1 -0
  33. package/dist/{chunk-RSLIOCOE.js → chunk-XQIVYQD6.js} +3 -2
  34. package/dist/chunk-XQIVYQD6.js.map +1 -0
  35. package/dist/client-BGctTHu9.d.ts +318 -0
  36. package/dist/client.cjs +1886 -44
  37. package/dist/client.cjs.map +1 -1
  38. package/dist/client.d.ts +14 -110
  39. package/dist/client.js +28 -3
  40. package/dist/core-DxiUwyBe.d.ts +156 -0
  41. package/dist/git/client.cjs +52 -25
  42. package/dist/git/client.cjs.map +1 -1
  43. package/dist/git/client.d.ts +17 -8
  44. package/dist/git/client.js +1 -1
  45. package/dist/git/index.cjs +52 -25
  46. package/dist/git/index.cjs.map +1 -1
  47. package/dist/git/index.d.ts +1 -1
  48. package/dist/git/index.js +2 -2
  49. package/dist/git/types.cjs.map +1 -1
  50. package/dist/git/types.d.ts +20 -2
  51. package/dist/index.cjs +1965 -46
  52. package/dist/index.cjs.map +1 -1
  53. package/dist/index.d.ts +8 -1
  54. package/dist/index.js +47 -5
  55. package/dist/tools/codebase_search/anthropic.js +2 -2
  56. package/dist/tools/codebase_search/index.js +9 -9
  57. package/dist/tools/codebase_search/openai.js +2 -2
  58. package/dist/tools/codebase_search/vercel.js +2 -2
  59. package/dist/tools/fastapply/anthropic.js +2 -2
  60. package/dist/tools/fastapply/index.js +7 -7
  61. package/dist/tools/fastapply/openai.js +2 -2
  62. package/dist/tools/fastapply/vercel.js +2 -2
  63. package/dist/tools/index.js +7 -7
  64. package/dist/tools/warp_grep/agent/config.cjs +80 -1
  65. package/dist/tools/warp_grep/agent/config.cjs.map +1 -1
  66. package/dist/tools/warp_grep/agent/config.js +1 -1
  67. package/dist/tools/warp_grep/agent/parser.cjs +43 -2
  68. package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
  69. package/dist/tools/warp_grep/agent/parser.js +1 -1
  70. package/dist/tools/warp_grep/agent/prompt.cjs +89 -45
  71. package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -1
  72. package/dist/tools/warp_grep/agent/prompt.d.ts +1 -1
  73. package/dist/tools/warp_grep/agent/prompt.js +1 -1
  74. package/dist/tools/warp_grep/agent/runner.cjs +229 -49
  75. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
  76. package/dist/tools/warp_grep/agent/runner.js +4 -4
  77. package/dist/tools/warp_grep/agent/types.js +0 -1
  78. package/dist/tools/warp_grep/anthropic.cjs +313 -84
  79. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  80. package/dist/tools/warp_grep/anthropic.d.ts +75 -12
  81. package/dist/tools/warp_grep/anthropic.js +22 -9
  82. package/dist/tools/warp_grep/index.cjs +417 -127
  83. package/dist/tools/warp_grep/index.cjs.map +1 -1
  84. package/dist/tools/warp_grep/index.d.ts +17 -4
  85. package/dist/tools/warp_grep/index.js +30 -22
  86. package/dist/tools/warp_grep/openai.cjs +316 -84
  87. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  88. package/dist/tools/warp_grep/openai.d.ts +73 -29
  89. package/dist/tools/warp_grep/openai.js +22 -9
  90. package/dist/tools/warp_grep/providers/command.cjs +80 -1
  91. package/dist/tools/warp_grep/providers/command.cjs.map +1 -1
  92. package/dist/tools/warp_grep/providers/command.js +2 -2
  93. package/dist/tools/warp_grep/providers/local.cjs +82 -2
  94. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  95. package/dist/tools/warp_grep/providers/local.js +3 -3
  96. package/dist/tools/warp_grep/utils/ripgrep.cjs +2 -1
  97. package/dist/tools/warp_grep/utils/ripgrep.cjs.map +1 -1
  98. package/dist/tools/warp_grep/utils/ripgrep.js +1 -1
  99. package/dist/tools/warp_grep/vercel.cjs +293 -58
  100. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  101. package/dist/tools/warp_grep/vercel.d.ts +40 -19
  102. package/dist/tools/warp_grep/vercel.js +18 -9
  103. package/package.json +2 -1
  104. package/dist/chunk-AFEPUNAO.js +0 -15
  105. package/dist/chunk-AFEPUNAO.js.map +0 -1
  106. package/dist/chunk-GDR65N2J.js.map +0 -1
  107. package/dist/chunk-GTOXMAF2.js.map +0 -1
  108. package/dist/chunk-HKZB23U7.js +0 -85
  109. package/dist/chunk-HKZB23U7.js.map +0 -1
  110. package/dist/chunk-IQHKEIQX.js +0 -54
  111. package/dist/chunk-IQHKEIQX.js.map +0 -1
  112. package/dist/chunk-JKFVDM62.js +0 -45
  113. package/dist/chunk-JKFVDM62.js.map +0 -1
  114. package/dist/chunk-K6FQZZ2E.js +0 -104
  115. package/dist/chunk-K6FQZZ2E.js.map +0 -1
  116. package/dist/chunk-KL4YVZRF.js +0 -57
  117. package/dist/chunk-KL4YVZRF.js.map +0 -1
  118. package/dist/chunk-RSLIOCOE.js.map +0 -1
  119. package/dist/chunk-XYPMN4A3.js +0 -1
  120. /package/dist/{chunk-TVFGHXPE.js.map → chunk-3FTAIJBH.js.map} +0 -0
  121. /package/dist/{chunk-ZRLEAPZV.js.map → chunk-76DJEQEP.js.map} +0 -0
  122. /package/dist/{chunk-PEGZVGG4.js.map → chunk-G4AWE5A2.js.map} +0 -0
  123. /package/dist/{chunk-OUEJ6XEO.js.map → chunk-GJU7UOFL.js.map} +0 -0
  124. /package/dist/{chunk-XYPMN4A3.js.map → chunk-JYBVRF72.js.map} +0 -0
  125. /package/dist/{chunk-VBBJGWHY.js.map → chunk-P2XKFWFD.js.map} +0 -0
  126. /package/dist/{chunk-O5DA5V5S.js.map → chunk-UBX7QYBD.js.map} +0 -0
  127. /package/dist/{chunk-UYBIKZPM.js.map → chunk-UCWTZSW5.js.map} +0 -0
  128. /package/dist/{chunk-X4CQ6D3G.js.map → chunk-UIZT3KVJ.js.map} +0 -0
package/dist/client.cjs CHANGED
@@ -890,10 +890,1096 @@ async function checkHealth(config = {}) {
890
890
  }
891
891
  }
892
892
 
893
+ // tools/warp_grep/agent/config.ts
894
+ var AGENT_CONFIG = {
895
+ // Give the model freedom; failsafe cap to prevent infinite loops
896
+ MAX_ROUNDS: 10,
897
+ TIMEOUT_MS: 3e4
898
+ };
899
+ var BUILTIN_EXCLUDES = [
900
+ // Version control
901
+ ".git",
902
+ ".svn",
903
+ ".hg",
904
+ ".bzr",
905
+ // Dependencies
906
+ "node_modules",
907
+ "bower_components",
908
+ ".pnpm",
909
+ ".yarn",
910
+ "vendor",
911
+ "packages",
912
+ "Pods",
913
+ ".bundle",
914
+ // Python
915
+ "__pycache__",
916
+ ".pytest_cache",
917
+ ".mypy_cache",
918
+ ".ruff_cache",
919
+ ".venv",
920
+ "venv",
921
+ ".tox",
922
+ ".nox",
923
+ ".eggs",
924
+ "*.egg-info",
925
+ // Build outputs
926
+ "dist",
927
+ "build",
928
+ "out",
929
+ "output",
930
+ "target",
931
+ "_build",
932
+ ".next",
933
+ ".nuxt",
934
+ ".output",
935
+ ".vercel",
936
+ ".netlify",
937
+ // Cache directories
938
+ ".cache",
939
+ ".parcel-cache",
940
+ ".turbo",
941
+ ".nx",
942
+ ".gradle",
943
+ // IDE/Editor
944
+ ".idea",
945
+ ".vscode",
946
+ ".vs",
947
+ // Coverage
948
+ "coverage",
949
+ ".coverage",
950
+ "htmlcov",
951
+ ".nyc_output",
952
+ // Temporary
953
+ "tmp",
954
+ "temp",
955
+ ".tmp",
956
+ ".temp",
957
+ // Lock files
958
+ "package-lock.json",
959
+ "yarn.lock",
960
+ "pnpm-lock.yaml",
961
+ "bun.lockb",
962
+ "Cargo.lock",
963
+ "Gemfile.lock",
964
+ "poetry.lock",
965
+ // Binary/minified
966
+ "*.min.js",
967
+ "*.min.css",
968
+ "*.bundle.js",
969
+ "*.wasm",
970
+ "*.so",
971
+ "*.dll",
972
+ "*.pyc",
973
+ "*.map",
974
+ "*.js.map",
975
+ // Hidden directories catch-all
976
+ ".*"
977
+ ];
978
+ var DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || "").split(",").map((s) => s.trim()).filter(Boolean).concat(BUILTIN_EXCLUDES);
979
+ var DEFAULT_MODEL = "morph-warp-grep";
980
+
981
+ // tools/warp_grep/agent/prompt.ts
982
+ var SYSTEM_PROMPT = `You are a code search agent. Your task is to find all relevant code for a given query.
983
+
984
+ <workflow>
985
+ You have exactly 4 turns. The 4th turn MUST be a \`finish\` call. Each turn allows up to 8 parallel tool calls.
986
+
987
+ - Turn 1: Map the territory OR dive deep (based on query specificity)
988
+ - Turn 2-3: Refine based on findings
989
+ - Turn 4: MUST call \`finish\` with all relevant code locations
990
+ - You MAY call \`finish\` early if confident\u2014but never before at least 1 search turn.
991
+
992
+ Remember, if the task feels easy to you, it is strongly desirable to call \`finish\` early using fewer turns, but quality over speed.
993
+ </workflow>
994
+
995
+ <tools>
996
+ ### \`analyse <path> [pattern]\`
997
+ Directory tree or file search. Shows structure of a path, optionally filtered by regex pattern.
998
+ - \`path\`: Required. Directory or file path (use \`.\` for repo root)
999
+ - \`pattern\`: Optional regex to filter results
1000
+
1001
+ Examples:
1002
+ \`\`\`
1003
+ analyse .
1004
+ analyse src/api
1005
+ analyse . ".*\\.ts$"
1006
+ analyse src "test.*"
1007
+ \`\`\`
1008
+
1009
+ ### \`read <path>[:start-end]\`
1010
+ Read file contents. Line range is 1-based, inclusive.
1011
+ - Returns numbered lines for easy reference
1012
+ - Omit range to read entire file
1013
+
1014
+ Examples:
1015
+ \`\`\`
1016
+ read src/main.py
1017
+ read src/db/conn.py:10-50
1018
+ read package.json:1-20
1019
+ \`\`\`
1020
+
1021
+ ### \`grep '<pattern>' <path>\`
1022
+ Ripgrep search. Finds pattern matches across files.
1023
+ - \`'<pattern>'\`: Required. Regex pattern wrapped in single quotes
1024
+ - \`<path>\`: Required. Directory or file to search (use \`.\` for repo root)
1025
+
1026
+ Examples:
1027
+ \`\`\`
1028
+ grep 'class.*Service' src/
1029
+ grep 'def authenticate' .
1030
+ grep 'import.*from' src/components/
1031
+ grep 'TODO' .
1032
+ \`\`\`
1033
+
1034
+ ### \`finish <file1:ranges> [file2:ranges ...]\`
1035
+ Submit final answer with all relevant code locations.
1036
+ - Include generous line ranges\u2014don't be stingy with context
1037
+ - Ranges are comma-separated: \`file.py:10-30,50-60\`
1038
+ - ALWAYS include import statements at the top of files (usually lines 1-20)
1039
+ - If code spans multiple files, include ALL of them
1040
+ - Small files can be returned in full
1041
+
1042
+ Examples:
1043
+ \`\`\`
1044
+ finish src/auth.py:1-15,25-50,75-80 src/models/user.py:1-10,20-45
1045
+ finish src/index.ts:1-100
1046
+ \`\`\`
1047
+ </tools>
1048
+
1049
+ <strategy>
1050
+ **Before your first tool call, classify the query:**
1051
+
1052
+ | Query Type | Turn 1 Strategy | Early Finish? |
1053
+ |------------|-----------------|---------------|
1054
+ | **Specific** (function name, error string, unique identifier) | 8 parallel greps on likely paths | Often by turn 2 |
1055
+ | **Conceptual** (how does X work, where is Y handled) | analyse + 2-3 broad greps | Rarely early |
1056
+ | **Exploratory** (find all tests, list API endpoints) | analyse at multiple depths | Usually needs 3 turns |
1057
+
1058
+ **Parallel call patterns:**
1059
+ - **Shotgun grep**: Same pattern, 8 different directories\u2014fast coverage
1060
+ - **Variant grep**: 8 pattern variations (synonyms, naming conventions)\u2014catches inconsistent codebases
1061
+ - **Funnel**: 1 analyse + 7 greps\u2014orient and search simultaneously
1062
+ - **Deep read**: 8 reads on files you already identified\u2014gather full context fast
1063
+ </strategy>
1064
+
1065
+ <output_format>
1066
+ EVERY response MUST follow this exact format:
1067
+
1068
+ 1. First, wrap your reasoning in \`<think>...</think>\` tags containing:
1069
+ - Query classification (specific/conceptual/exploratory)
1070
+ - Confidence estimate (can I finish in 1-2 turns?)
1071
+ - This turn's parallel strategy
1072
+ - What signals would let me finish early?
1073
+
1074
+ 2. Then, output tool calls wrapped in \`<tool_call>...</tool_call>\` tags, one per line.
1075
+
1076
+ Example:
1077
+ \`\`\`
1078
+ <think>
1079
+ This is a specific query about authentication. I'll grep for auth-related patterns.
1080
+ High confidence I can finish in 2 turns if I find the auth module.
1081
+ Strategy: Shotgun grep across likely directories.
1082
+ </think>
1083
+ <tool_call>grep 'authenticate' src/</tool_call>
1084
+ <tool_call>grep 'login' src/</tool_call>
1085
+ <tool_call>analyse src/auth</tool_call>
1086
+ \`\`\`
1087
+
1088
+ No commentary outside \`<think>\`. No explanations after tool calls.
1089
+ </output_format>
1090
+
1091
+ <finishing_requirements>
1092
+ When calling \`finish\`:
1093
+ - Include the import section (typically lines 1-20) of each file
1094
+ - Include all function/class definitions that are relevant
1095
+ - Include any type definitions, interfaces, or constants used
1096
+ - Better to over-include than leave the user missing context
1097
+ - If unsure about boundaries, include more rather than less
1098
+ </finishing_requirements>
1099
+
1100
+ Begin your exploration now to find code relevant to the query.`;
1101
+ function getSystemPrompt() {
1102
+ return SYSTEM_PROMPT;
1103
+ }
1104
+
1105
+ // tools/warp_grep/agent/parser.ts
1106
+ var LLMResponseParseError = class extends Error {
1107
+ constructor(message) {
1108
+ super(message);
1109
+ this.name = "LLMResponseParseError";
1110
+ }
1111
+ };
1112
+ var VALID_COMMANDS = ["analyse", "grep", "read", "finish"];
1113
+ function preprocessText(text) {
1114
+ let processed = text.replace(/<think>[\s\S]*?<\/think>/gi, "");
1115
+ const openingTagRegex = /<tool_call>|<tool>/gi;
1116
+ const closingTagRegex = /<\/tool_call>|<\/tool>/gi;
1117
+ const openingMatches = processed.match(openingTagRegex) || [];
1118
+ const closingMatches = processed.match(closingTagRegex) || [];
1119
+ if (openingMatches.length > closingMatches.length) {
1120
+ const lastClosingMatch = /<\/tool_call>|<\/tool>/gi;
1121
+ let lastClosingIndex = -1;
1122
+ let match;
1123
+ while ((match = lastClosingMatch.exec(processed)) !== null) {
1124
+ lastClosingIndex = match.index + match[0].length;
1125
+ }
1126
+ if (lastClosingIndex > 0) {
1127
+ processed = processed.slice(0, lastClosingIndex);
1128
+ }
1129
+ }
1130
+ const toolCallLines = [];
1131
+ const toolTagRegex = /<tool_call>([\s\S]*?)<\/tool_call>|<tool>([\s\S]*?)<\/tool>/gi;
1132
+ let tagMatch;
1133
+ while ((tagMatch = toolTagRegex.exec(processed)) !== null) {
1134
+ const content = (tagMatch[1] || tagMatch[2] || "").trim();
1135
+ if (content) {
1136
+ const lines = content.split(/\r?\n/).map((l) => l.trim()).filter((l) => l);
1137
+ toolCallLines.push(...lines);
1138
+ }
1139
+ }
1140
+ const allLines = processed.split(/\r?\n/).map((l) => l.trim());
1141
+ for (const line of allLines) {
1142
+ if (!line) continue;
1143
+ if (line.startsWith("<")) continue;
1144
+ const firstWord = line.split(/\s/)[0];
1145
+ if (VALID_COMMANDS.includes(firstWord)) {
1146
+ if (!toolCallLines.includes(line)) {
1147
+ toolCallLines.push(line);
1148
+ }
1149
+ }
1150
+ }
1151
+ return toolCallLines;
1152
+ }
1153
+ var LLMResponseParser = class {
1154
+ finishSpecSplitRe = /,(?=[^,\s]+:)/;
1155
+ parse(text) {
1156
+ if (typeof text !== "string") {
1157
+ throw new TypeError("Command text must be a string.");
1158
+ }
1159
+ const lines = preprocessText(text);
1160
+ const commands = [];
1161
+ let finishAccumulator = null;
1162
+ lines.forEach((line, idx) => {
1163
+ if (!line || line.startsWith("#")) return;
1164
+ const ctx = { lineNumber: idx + 1, raw: line };
1165
+ const parts = this.splitLine(line, ctx);
1166
+ if (parts.length === 0) return;
1167
+ const cmd = parts[0];
1168
+ switch (cmd) {
1169
+ case "analyse":
1170
+ this.handleAnalyse(parts, ctx, commands);
1171
+ break;
1172
+ case "grep":
1173
+ this.handleGrep(parts, ctx, commands);
1174
+ break;
1175
+ case "read":
1176
+ this.handleRead(parts, ctx, commands);
1177
+ break;
1178
+ case "finish":
1179
+ finishAccumulator = this.handleFinish(parts, ctx, finishAccumulator);
1180
+ break;
1181
+ default:
1182
+ break;
1183
+ }
1184
+ });
1185
+ if (finishAccumulator) {
1186
+ const map = finishAccumulator;
1187
+ const entries = [...map.entries()];
1188
+ const filesPayload = entries.map(([path4, ranges]) => ({
1189
+ path: path4,
1190
+ lines: [...ranges].sort((a, b) => a[0] - b[0])
1191
+ }));
1192
+ commands.push({ name: "finish", arguments: { files: filesPayload } });
1193
+ }
1194
+ return commands;
1195
+ }
1196
+ splitLine(line, ctx) {
1197
+ try {
1198
+ const parts = [];
1199
+ let current = "";
1200
+ let inSingle = false;
1201
+ for (let i = 0; i < line.length; i++) {
1202
+ const ch = line[i];
1203
+ if (ch === "'" && line[i - 1] !== "\\") {
1204
+ inSingle = !inSingle;
1205
+ current += ch;
1206
+ } else if (!inSingle && /\s/.test(ch)) {
1207
+ if (current) {
1208
+ parts.push(current);
1209
+ current = "";
1210
+ }
1211
+ } else {
1212
+ current += ch;
1213
+ }
1214
+ }
1215
+ if (current) parts.push(current);
1216
+ return parts;
1217
+ } catch {
1218
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unable to parse line.`);
1219
+ }
1220
+ }
1221
+ handleAnalyse(parts, ctx, commands) {
1222
+ if (parts.length < 2) {
1223
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: analyse requires <path>`);
1224
+ }
1225
+ const path4 = parts[1];
1226
+ const pattern = parts[2]?.replace(/^"|"$/g, "") ?? null;
1227
+ commands.push({ name: "analyse", arguments: { path: path4, pattern } });
1228
+ }
1229
+ // no glob tool in MCP
1230
+ handleGrep(parts, ctx, commands) {
1231
+ if (parts.length < 3) {
1232
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep requires '<pattern>' and <path>`);
1233
+ }
1234
+ const pat = parts[1];
1235
+ if (!pat.startsWith("'") || !pat.endsWith("'")) {
1236
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep pattern must be single-quoted`);
1237
+ }
1238
+ commands.push({ name: "grep", arguments: { pattern: pat.slice(1, -1), path: parts[2] } });
1239
+ }
1240
+ handleRead(parts, ctx, commands) {
1241
+ if (parts.length < 2) {
1242
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: read requires <path> or <path>:<start-end>`);
1243
+ }
1244
+ const spec = parts[1];
1245
+ const rangeIdx = spec.indexOf(":");
1246
+ if (rangeIdx === -1) {
1247
+ commands.push({ name: "read", arguments: { path: spec } });
1248
+ return;
1249
+ }
1250
+ const path4 = spec.slice(0, rangeIdx);
1251
+ const range = spec.slice(rangeIdx + 1);
1252
+ const [s, e] = range.split("-").map((v) => parseInt(v, 10));
1253
+ if (!Number.isFinite(s) || !Number.isFinite(e)) {
1254
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid read range '${range}'`);
1255
+ }
1256
+ commands.push({ name: "read", arguments: { path: path4, start: s, end: e } });
1257
+ }
1258
+ handleFinish(parts, ctx, acc) {
1259
+ const map = acc ?? /* @__PURE__ */ new Map();
1260
+ const args = parts.slice(1);
1261
+ for (const token of args) {
1262
+ const [path4, rangesText] = token.split(":", 2);
1263
+ if (!path4 || !rangesText) {
1264
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid finish token '${token}'`);
1265
+ }
1266
+ const rangeSpecs = rangesText.split(",").filter(Boolean);
1267
+ for (const spec of rangeSpecs) {
1268
+ const [s, e] = spec.split("-").map((v) => parseInt(v, 10));
1269
+ if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {
1270
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid range '${spec}'`);
1271
+ }
1272
+ const arr = map.get(path4) ?? [];
1273
+ arr.push([s, e]);
1274
+ map.set(path4, arr);
1275
+ }
1276
+ }
1277
+ return map;
1278
+ }
1279
+ };
1280
+
1281
+ // tools/warp_grep/tools/read.ts
1282
+ async function toolRead(provider, args) {
1283
+ const res = await provider.read({ path: args.path, start: args.start, end: args.end });
1284
+ return res.lines.join("\n");
1285
+ }
1286
+
1287
+ // tools/warp_grep/tools/analyse.ts
1288
+ async function toolAnalyse(provider, args) {
1289
+ const list = await provider.analyse({
1290
+ path: args.path,
1291
+ pattern: args.pattern ?? null,
1292
+ maxResults: args.maxResults ?? 100,
1293
+ maxDepth: args.maxDepth ?? 2
1294
+ });
1295
+ if (!list.length) return "empty";
1296
+ return list.map((e) => `${" ".repeat(e.depth)}- ${e.type === "dir" ? "[D]" : "[F]"} ${e.name}`).join("\n");
1297
+ }
1298
+
1299
+ // tools/warp_grep/agent/formatter.ts
1300
+ var ToolOutputFormatter = class {
1301
+ format(toolName, args, output, options = {}) {
1302
+ const name = (toolName ?? "").trim();
1303
+ if (!name) {
1304
+ return "";
1305
+ }
1306
+ const payload = output?.toString?.()?.trim?.() ?? "";
1307
+ const isError = Boolean(options.isError);
1308
+ const safeArgs = args ?? {};
1309
+ if (!payload && !isError) {
1310
+ return "";
1311
+ }
1312
+ switch (name) {
1313
+ case "read":
1314
+ return this.formatRead(safeArgs, payload, isError);
1315
+ case "analyse":
1316
+ return this.formatAnalyse(safeArgs, payload, isError);
1317
+ case "grep":
1318
+ return this.formatGrep(safeArgs, payload, isError);
1319
+ default:
1320
+ return payload ? `<tool_output>
1321
+ ${payload}
1322
+ </tool_output>` : "";
1323
+ }
1324
+ }
1325
+ formatRead(args, payload, isError) {
1326
+ if (isError) {
1327
+ return payload;
1328
+ }
1329
+ const path4 = this.asString(args.path) || "...";
1330
+ return `<file path="${path4}">
1331
+ ${payload}
1332
+ </file>`;
1333
+ }
1334
+ formatAnalyse(args, payload, isError) {
1335
+ const path4 = this.asString(args.path) || ".";
1336
+ if (isError) {
1337
+ return `<analyse_results path="${path4}" status="error">
1338
+ ${payload}
1339
+ </analyse_results>`;
1340
+ }
1341
+ return `<analyse_results path="${path4}">
1342
+ ${payload}
1343
+ </analyse_results>`;
1344
+ }
1345
+ formatGrep(args, payload, isError) {
1346
+ const pattern = this.asString(args.pattern);
1347
+ const path4 = this.asString(args.path);
1348
+ const attributes = [];
1349
+ if (pattern !== void 0) {
1350
+ attributes.push(`pattern="${pattern}"`);
1351
+ }
1352
+ if (path4 !== void 0) {
1353
+ attributes.push(`path="${path4}"`);
1354
+ }
1355
+ if (isError) {
1356
+ attributes.push('status="error"');
1357
+ }
1358
+ const attrText = attributes.length ? ` ${attributes.join(" ")}` : "";
1359
+ return `<grep_output${attrText}>
1360
+ ${payload}
1361
+ </grep_output>`;
1362
+ }
1363
+ asString(value) {
1364
+ if (value === null || value === void 0) {
1365
+ return void 0;
1366
+ }
1367
+ return String(value);
1368
+ }
1369
+ };
1370
+ var sharedFormatter = new ToolOutputFormatter();
1371
+ function formatAgentToolOutput(toolName, args, output, options = {}) {
1372
+ return sharedFormatter.format(toolName, args, output, options);
1373
+ }
1374
+
1375
+ // tools/warp_grep/agent/grep_helpers.ts
1376
+ var GrepState = class {
1377
+ seenLines = /* @__PURE__ */ new Set();
1378
+ isNew(path4, lineNumber) {
1379
+ const key = this.makeKey(path4, lineNumber);
1380
+ return !this.seenLines.has(key);
1381
+ }
1382
+ add(path4, lineNumber) {
1383
+ this.seenLines.add(this.makeKey(path4, lineNumber));
1384
+ }
1385
+ makeKey(path4, lineNumber) {
1386
+ return `${path4}:${lineNumber}`;
1387
+ }
1388
+ };
1389
+ var MAX_GREP_OUTPUT_CHARS_PER_TURN = 6e4;
1390
+ function extractMatchFields(payload) {
1391
+ const text = payload.replace(/\r?\n$/, "");
1392
+ if (!text || text.startsWith("[error]")) {
1393
+ return null;
1394
+ }
1395
+ const firstSep = text.indexOf(":");
1396
+ if (firstSep === -1) {
1397
+ return null;
1398
+ }
1399
+ let filePath = text.slice(0, firstSep).trim();
1400
+ if (!filePath) {
1401
+ return null;
1402
+ }
1403
+ if (filePath.startsWith("./") || filePath.startsWith(".\\")) {
1404
+ filePath = filePath.slice(2);
1405
+ }
1406
+ const remainder = text.slice(firstSep + 1);
1407
+ const secondSep = remainder.indexOf(":");
1408
+ if (secondSep === -1) {
1409
+ return null;
1410
+ }
1411
+ const linePart = remainder.slice(0, secondSep);
1412
+ const lineNumber = Number.parseInt(linePart, 10);
1413
+ if (!Number.isInteger(lineNumber) || lineNumber <= 0) {
1414
+ return null;
1415
+ }
1416
+ let contentSegment = remainder.slice(secondSep + 1);
1417
+ const columnSep = contentSegment.indexOf(":");
1418
+ if (columnSep !== -1 && /^\d+$/.test(contentSegment.slice(0, columnSep))) {
1419
+ contentSegment = contentSegment.slice(columnSep + 1);
1420
+ }
1421
+ const content = contentSegment.trim();
1422
+ if (!content) {
1423
+ return null;
1424
+ }
1425
+ return { path: filePath, lineNumber, content };
1426
+ }
1427
+ function parseAndFilterGrepOutput(rawOutput, state) {
1428
+ const matches = [];
1429
+ if (typeof rawOutput !== "string" || !rawOutput.trim()) {
1430
+ return matches;
1431
+ }
1432
+ for (const line of rawOutput.split(/\r?\n/)) {
1433
+ const fields = extractMatchFields(line);
1434
+ if (!fields) {
1435
+ continue;
1436
+ }
1437
+ if (state.isNew(fields.path, fields.lineNumber)) {
1438
+ matches.push(fields);
1439
+ state.add(fields.path, fields.lineNumber);
1440
+ }
1441
+ }
1442
+ return matches;
1443
+ }
1444
+ function truncateOutput(payload, maxChars) {
1445
+ if (payload.length <= maxChars) {
1446
+ return payload;
1447
+ }
1448
+ const note = "... (output truncated)";
1449
+ const available = maxChars - note.length - 1;
1450
+ if (available <= 0) {
1451
+ return note;
1452
+ }
1453
+ if (payload.length <= available) {
1454
+ return `${payload.slice(0, available).replace(/\n$/, "")}
1455
+ ${note}`;
1456
+ }
1457
+ const core = payload.slice(0, Math.max(0, available - 1));
1458
+ const trimmed = core.replace(/\n$/, "").replace(/\s+$/, "");
1459
+ const snippet = trimmed ? `${trimmed}\u2026` : "\u2026";
1460
+ return `${snippet}
1461
+ ${note}`;
1462
+ }
1463
+ function formatTurnGrepOutput(matches, maxChars = MAX_GREP_OUTPUT_CHARS_PER_TURN) {
1464
+ if (!matches || matches.length === 0) {
1465
+ return "No new matches found.";
1466
+ }
1467
+ const matchesByFile = /* @__PURE__ */ new Map();
1468
+ for (const match of matches) {
1469
+ if (!matchesByFile.has(match.path)) {
1470
+ matchesByFile.set(match.path, []);
1471
+ }
1472
+ matchesByFile.get(match.path).push(match);
1473
+ }
1474
+ const lines = [];
1475
+ const sortedPaths = Array.from(matchesByFile.keys()).sort();
1476
+ sortedPaths.forEach((filePath, index) => {
1477
+ if (index > 0) {
1478
+ lines.push("");
1479
+ }
1480
+ lines.push(filePath);
1481
+ const sortedMatches = matchesByFile.get(filePath).slice().sort((a, b) => a.lineNumber - b.lineNumber);
1482
+ for (const match of sortedMatches) {
1483
+ lines.push(`${match.lineNumber}:${match.content}`);
1484
+ }
1485
+ });
1486
+ return truncateOutput(lines.join("\n"), maxChars);
1487
+ }
1488
+
1489
+ // tools/warp_grep/tools/finish.ts
1490
+ async function readFinishFiles(repoRoot, files, reader) {
1491
+ const out = [];
1492
+ for (const f of files) {
1493
+ const ranges = mergeRanges(f.lines);
1494
+ const chunks = [];
1495
+ for (const [s, e] of ranges) {
1496
+ const lines = await reader(f.path, s, e);
1497
+ chunks.push(lines.join("\n"));
1498
+ }
1499
+ out.push({ path: f.path, ranges, content: chunks.join("\n") });
1500
+ }
1501
+ return out;
1502
+ }
1503
+ function mergeRanges(ranges) {
1504
+ if (!ranges.length) return [];
1505
+ const sorted = [...ranges].sort((a, b) => a[0] - b[0]);
1506
+ const merged = [];
1507
+ let [cs, ce] = sorted[0];
1508
+ for (let i = 1; i < sorted.length; i++) {
1509
+ const [s, e] = sorted[i];
1510
+ if (s <= ce + 1) {
1511
+ ce = Math.max(ce, e);
1512
+ } else {
1513
+ merged.push([cs, ce]);
1514
+ cs = s;
1515
+ ce = e;
1516
+ }
1517
+ }
1518
+ merged.push([cs, ce]);
1519
+ return merged;
1520
+ }
1521
+
1522
+ // tools/warp_grep/agent/runner.ts
1523
+ var import_path2 = __toESM(require("path"), 1);
1524
+ var import_promises2 = __toESM(require("fs/promises"), 1);
1525
+ var parser = new LLMResponseParser();
1526
+ async function buildInitialState(repoRoot, query) {
1527
+ try {
1528
+ const entries = await import_promises2.default.readdir(repoRoot, { withFileTypes: true });
1529
+ const dirs = entries.filter((e) => e.isDirectory()).map((d) => d.name).slice(0, 50);
1530
+ const files = entries.filter((e) => e.isFile()).map((f) => f.name).slice(0, 50);
1531
+ const parts = [
1532
+ `<repo_root>${repoRoot}</repo_root>`,
1533
+ `<top_dirs>${dirs.join(", ")}</top_dirs>`,
1534
+ `<top_files>${files.join(", ")}</top_files>`
1535
+ ];
1536
+ return parts.join("\n");
1537
+ } catch {
1538
+ return `<repo_root>${repoRoot}</repo_root>`;
1539
+ }
1540
+ }
1541
+ async function callModel(messages, model, apiKey) {
1542
+ const api = "https://api.morphllm.com/v1/chat/completions";
1543
+ const fetchPromise = fetchWithRetry(
1544
+ api,
1545
+ {
1546
+ method: "POST",
1547
+ headers: {
1548
+ "Content-Type": "application/json",
1549
+ Authorization: `Bearer ${apiKey || process.env.MORPH_API_KEY || ""}`
1550
+ },
1551
+ body: JSON.stringify({
1552
+ model,
1553
+ temperature: 0,
1554
+ max_tokens: 1024,
1555
+ messages
1556
+ })
1557
+ },
1558
+ {}
1559
+ );
1560
+ const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, "morph-warp-grep request timed out");
1561
+ if (!resp.ok) {
1562
+ const t = await resp.text();
1563
+ throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);
1564
+ }
1565
+ const data = await resp.json();
1566
+ const content = data?.choices?.[0]?.message?.content;
1567
+ if (!content || typeof content !== "string") {
1568
+ throw new Error("Invalid response from model");
1569
+ }
1570
+ return content;
1571
+ }
1572
+ async function runWarpGrep(config) {
1573
+ const repoRoot = import_path2.default.resolve(config.repoRoot || process.cwd());
1574
+ const messages = [];
1575
+ const systemMessage = { role: "system", content: getSystemPrompt() };
1576
+ messages.push(systemMessage);
1577
+ const queryContent = `<query>${config.query}</query>`;
1578
+ messages.push({ role: "user", content: queryContent });
1579
+ const initialState = await buildInitialState(repoRoot, config.query);
1580
+ messages.push({ role: "user", content: initialState });
1581
+ const maxRounds = AGENT_CONFIG.MAX_ROUNDS;
1582
+ const model = config.model || DEFAULT_MODEL;
1583
+ const provider = config.provider;
1584
+ const errors = [];
1585
+ const grepState = new GrepState();
1586
+ let finishMeta;
1587
+ let terminationReason = "terminated";
1588
+ for (let round = 1; round <= maxRounds; round += 1) {
1589
+ const assistantContent = await callModel(messages, model, config.apiKey).catch((e) => {
1590
+ errors.push({ message: e instanceof Error ? e.message : String(e) });
1591
+ return "";
1592
+ });
1593
+ if (!assistantContent) break;
1594
+ messages.push({ role: "assistant", content: assistantContent });
1595
+ let toolCalls = [];
1596
+ try {
1597
+ toolCalls = parser.parse(assistantContent);
1598
+ } catch (e) {
1599
+ errors.push({ message: e instanceof Error ? e.message : String(e) });
1600
+ terminationReason = "terminated";
1601
+ break;
1602
+ }
1603
+ if (toolCalls.length === 0) {
1604
+ errors.push({ message: "No tool calls produced by the model." });
1605
+ terminationReason = "terminated";
1606
+ break;
1607
+ }
1608
+ const finishCalls = toolCalls.filter((c) => c.name === "finish");
1609
+ const grepCalls = toolCalls.filter((c) => c.name === "grep");
1610
+ const analyseCalls = toolCalls.filter((c) => c.name === "analyse");
1611
+ const readCalls = toolCalls.filter((c) => c.name === "read");
1612
+ const formatted = [];
1613
+ const otherPromises = [];
1614
+ for (const c of analyseCalls) {
1615
+ const args = c.arguments ?? {};
1616
+ otherPromises.push(
1617
+ toolAnalyse(provider, args).then(
1618
+ (p) => formatAgentToolOutput("analyse", args, p, { isError: false }),
1619
+ (err) => formatAgentToolOutput("analyse", args, String(err), { isError: true })
1620
+ )
1621
+ );
1622
+ }
1623
+ for (const c of readCalls) {
1624
+ const args = c.arguments ?? {};
1625
+ otherPromises.push(
1626
+ toolRead(provider, args).then(
1627
+ (p) => formatAgentToolOutput("read", args, p, { isError: false }),
1628
+ (err) => formatAgentToolOutput("read", args, String(err), { isError: true })
1629
+ )
1630
+ );
1631
+ }
1632
+ const otherResults = await Promise.all(otherPromises);
1633
+ formatted.push(...otherResults);
1634
+ for (const c of grepCalls) {
1635
+ const args = c.arguments ?? {};
1636
+ try {
1637
+ const grepRes = await provider.grep({ pattern: args.pattern, path: args.path });
1638
+ const rawOutput = Array.isArray(grepRes.lines) ? grepRes.lines.join("\n") : "";
1639
+ const newMatches = parseAndFilterGrepOutput(rawOutput, grepState);
1640
+ let formattedPayload = formatTurnGrepOutput(newMatches);
1641
+ if (formattedPayload === "No new matches found.") {
1642
+ formattedPayload = "no new matches";
1643
+ }
1644
+ formatted.push(formatAgentToolOutput("grep", args, formattedPayload, { isError: false }));
1645
+ } catch (err) {
1646
+ formatted.push(formatAgentToolOutput("grep", args, String(err), { isError: true }));
1647
+ }
1648
+ }
1649
+ if (formatted.length > 0) {
1650
+ const turnsUsed = round;
1651
+ const turnsRemaining = 4 - turnsUsed;
1652
+ let turnMessage;
1653
+ if (turnsRemaining === 0) {
1654
+ turnMessage = `
1655
+
1656
+ [Turn ${turnsUsed}/4] This is your LAST turn. You MUST call the finish tool now.`;
1657
+ } else if (turnsRemaining === 1) {
1658
+ turnMessage = `
1659
+
1660
+ [Turn ${turnsUsed}/4] You have 1 turn remaining. Next turn you MUST call the finish tool.`;
1661
+ } else {
1662
+ turnMessage = `
1663
+
1664
+ [Turn ${turnsUsed}/4] You have ${turnsRemaining} turns remaining.`;
1665
+ }
1666
+ messages.push({ role: "user", content: formatted.join("\n") + turnMessage });
1667
+ }
1668
+ if (finishCalls.length) {
1669
+ const fc = finishCalls[0];
1670
+ const files = fc.arguments?.files ?? [];
1671
+ finishMeta = { files };
1672
+ terminationReason = "completed";
1673
+ break;
1674
+ }
1675
+ }
1676
+ if (terminationReason !== "completed" || !finishMeta) {
1677
+ return { terminationReason, messages, errors };
1678
+ }
1679
+ const parts = ["Relevant context found:"];
1680
+ for (const f of finishMeta.files) {
1681
+ const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(", ");
1682
+ parts.push(`- ${f.path}: ${ranges}`);
1683
+ }
1684
+ const payload = parts.join("\n");
1685
+ const resolved = await readFinishFiles(
1686
+ repoRoot,
1687
+ finishMeta.files,
1688
+ async (p, s, e) => {
1689
+ const rr = await provider.read({ path: p, start: s, end: e });
1690
+ return rr.lines.map((l) => {
1691
+ const idx = l.indexOf("|");
1692
+ return idx >= 0 ? l.slice(idx + 1) : l;
1693
+ });
1694
+ }
1695
+ );
1696
+ return {
1697
+ terminationReason: "completed",
1698
+ messages,
1699
+ finish: { payload, metadata: finishMeta, resolved }
1700
+ };
1701
+ }
1702
+
1703
+ // tools/warp_grep/providers/local.ts
1704
+ var import_promises4 = __toESM(require("fs/promises"), 1);
1705
+ var import_path4 = __toESM(require("path"), 1);
1706
+
1707
+ // tools/warp_grep/utils/ripgrep.ts
1708
+ var import_child_process = require("child_process");
1709
+ var import_ripgrep = require("@vscode/ripgrep");
1710
+ function runRipgrep(args, opts) {
1711
+ return new Promise((resolve2) => {
1712
+ const child = (0, import_child_process.spawn)(import_ripgrep.rgPath, args, {
1713
+ cwd: opts?.cwd,
1714
+ env: { ...process.env, ...opts?.env || {} },
1715
+ stdio: ["ignore", "pipe", "pipe"]
1716
+ });
1717
+ let stdout = "";
1718
+ let stderr = "";
1719
+ child.stdout.on("data", (d) => stdout += d.toString());
1720
+ child.stderr.on("data", (d) => stderr += d.toString());
1721
+ child.on("close", (code) => {
1722
+ resolve2({ stdout, stderr, exitCode: typeof code === "number" ? code : -1 });
1723
+ });
1724
+ child.on("error", () => {
1725
+ resolve2({ stdout: "", stderr: "Failed to spawn ripgrep (rg). Ensure it is installed.", exitCode: -1 });
1726
+ });
1727
+ });
1728
+ }
1729
+
1730
+ // tools/warp_grep/utils/paths.ts
1731
+ var import_fs = __toESM(require("fs"), 1);
1732
+ var import_path3 = __toESM(require("path"), 1);
1733
+ function resolveUnderRepo(repoRoot, targetPath) {
1734
+ const absRoot = import_path3.default.resolve(repoRoot);
1735
+ const resolved = import_path3.default.resolve(absRoot, targetPath);
1736
+ ensureWithinRepo(absRoot, resolved);
1737
+ return resolved;
1738
+ }
1739
+ function ensureWithinRepo(repoRoot, absTarget) {
1740
+ const rel = import_path3.default.relative(import_path3.default.resolve(repoRoot), import_path3.default.resolve(absTarget));
1741
+ if (rel.startsWith("..") || import_path3.default.isAbsolute(rel)) {
1742
+ throw new Error(`Path outside repository root: ${absTarget}`);
1743
+ }
1744
+ }
1745
+ function toRepoRelative(repoRoot, absPath) {
1746
+ return import_path3.default.relative(import_path3.default.resolve(repoRoot), import_path3.default.resolve(absPath));
1747
+ }
1748
+ function isSymlink(p) {
1749
+ try {
1750
+ const st = import_fs.default.lstatSync(p);
1751
+ return st.isSymbolicLink();
1752
+ } catch {
1753
+ return false;
1754
+ }
1755
+ }
1756
+ function isTextualFile(filePath, maxBytes = 2e6) {
1757
+ try {
1758
+ const st = import_fs.default.statSync(filePath);
1759
+ if (!st.isFile()) return false;
1760
+ if (st.size > maxBytes) return false;
1761
+ const fd = import_fs.default.openSync(filePath, "r");
1762
+ const buf = Buffer.alloc(512);
1763
+ const read = import_fs.default.readSync(fd, buf, 0, buf.length, 0);
1764
+ import_fs.default.closeSync(fd);
1765
+ for (let i = 0; i < read; i++) {
1766
+ const c = buf[i];
1767
+ if (c === 0) return false;
1768
+ }
1769
+ return true;
1770
+ } catch {
1771
+ return false;
1772
+ }
1773
+ }
1774
+
1775
+ // tools/warp_grep/utils/files.ts
1776
+ var import_promises3 = __toESM(require("fs/promises"), 1);
1777
+ async function readAllLines(filePath) {
1778
+ const content = await import_promises3.default.readFile(filePath, "utf8");
1779
+ return content.split(/\r?\n/);
1780
+ }
1781
+
1782
+ // tools/warp_grep/providers/local.ts
1783
+ var LocalRipgrepProvider = class {
1784
+ constructor(repoRoot, excludes = DEFAULT_EXCLUDES) {
1785
+ this.repoRoot = repoRoot;
1786
+ this.excludes = excludes;
1787
+ }
1788
+ async grep(params) {
1789
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1790
+ const stat = await import_promises4.default.stat(abs).catch(() => null);
1791
+ if (!stat) return { lines: [] };
1792
+ const targetArg = abs === import_path4.default.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
1793
+ const args = [
1794
+ "--no-config",
1795
+ "--no-heading",
1796
+ "--with-filename",
1797
+ "--line-number",
1798
+ "--color=never",
1799
+ "--trim",
1800
+ "--max-columns=400",
1801
+ ...this.excludes.flatMap((e) => ["-g", `!${e}`]),
1802
+ params.pattern,
1803
+ targetArg || "."
1804
+ ];
1805
+ const res = await runRipgrep(args, { cwd: this.repoRoot });
1806
+ if (res.exitCode === -1) {
1807
+ throw new Error(res.stderr || "ripgrep (rg) execution failed.");
1808
+ }
1809
+ if (res.exitCode !== 0 && res.exitCode !== 1) {
1810
+ throw new Error(res.stderr || `ripgrep failed with code ${res.exitCode}`);
1811
+ }
1812
+ const lines = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
1813
+ return { lines };
1814
+ }
1815
+ async glob(params) {
1816
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1817
+ const targetArg = abs === import_path4.default.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
1818
+ const args = [
1819
+ "--no-config",
1820
+ "--files",
1821
+ "-g",
1822
+ params.pattern,
1823
+ ...this.excludes.flatMap((e) => ["-g", `!${e}`]),
1824
+ targetArg || "."
1825
+ ];
1826
+ const res = await runRipgrep(args, { cwd: this.repoRoot });
1827
+ if (res.exitCode === -1) {
1828
+ throw new Error(res.stderr || "ripgrep (rg) execution failed.");
1829
+ }
1830
+ const files = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
1831
+ return { files };
1832
+ }
1833
+ async read(params) {
1834
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1835
+ const stat = await import_promises4.default.stat(abs).catch(() => null);
1836
+ if (!stat || !stat.isFile()) {
1837
+ throw new Error(`Path is not a file: ${params.path}`);
1838
+ }
1839
+ if (isSymlink(abs)) {
1840
+ throw new Error(`Refusing to read symlink: ${params.path}`);
1841
+ }
1842
+ if (!isTextualFile(abs)) {
1843
+ throw new Error(`Non-text or too-large file: ${params.path}`);
1844
+ }
1845
+ const lines = await readAllLines(abs);
1846
+ const total = lines.length;
1847
+ const s = params.start ?? 1;
1848
+ const e = Math.min(params.end ?? total, total);
1849
+ if (s > total && total > 0) {
1850
+ throw new Error(`start ${s} exceeds file length (${total})`);
1851
+ }
1852
+ const out = [];
1853
+ for (let i = s; i <= e; i += 1) {
1854
+ const content = lines[i - 1] ?? "";
1855
+ out.push(`${i}|${content}`);
1856
+ }
1857
+ return { lines: out };
1858
+ }
1859
+ async analyse(params) {
1860
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1861
+ const stat = await import_promises4.default.stat(abs).catch(() => null);
1862
+ if (!stat || !stat.isDirectory()) {
1863
+ return [];
1864
+ }
1865
+ const maxResults = params.maxResults ?? 100;
1866
+ const maxDepth = params.maxDepth ?? 2;
1867
+ const regex = params.pattern ? new RegExp(params.pattern) : null;
1868
+ const results = [];
1869
+ async function walk(dir, depth) {
1870
+ if (depth > maxDepth || results.length >= maxResults) return;
1871
+ const entries = await import_promises4.default.readdir(dir, { withFileTypes: true });
1872
+ for (const entry of entries) {
1873
+ const full = import_path4.default.join(dir, entry.name);
1874
+ const rel = toRepoRelative(abs, full).replace(/^[.][/\\]?/, "");
1875
+ if (DEFAULT_EXCLUDES.some((ex) => rel.split(import_path4.default.sep).includes(ex))) continue;
1876
+ if (regex && !regex.test(entry.name)) continue;
1877
+ if (results.length >= maxResults) break;
1878
+ results.push({
1879
+ name: entry.name,
1880
+ path: toRepoRelative(import_path4.default.resolve(""), full),
1881
+ // relative display
1882
+ type: entry.isDirectory() ? "dir" : "file",
1883
+ depth
1884
+ });
1885
+ if (entry.isDirectory()) {
1886
+ await walk(full, depth + 1);
1887
+ }
1888
+ }
1889
+ }
1890
+ await walk(abs, 0);
1891
+ return results;
1892
+ }
1893
+ };
1894
+
1895
+ // tools/warp_grep/core.ts
1896
+ var WarpGrepClient = class {
1897
+ config;
1898
+ constructor(config = {}) {
1899
+ this.config = {
1900
+ apiKey: config.apiKey,
1901
+ debug: config.debug,
1902
+ timeout: config.timeout,
1903
+ retryConfig: config.retryConfig
1904
+ };
1905
+ }
1906
+ /**
1907
+ * Execute a code search query
1908
+ *
1909
+ * @param input - Search parameters including query, repoRoot, and optional provider
1910
+ * @returns Search results with relevant code contexts
1911
+ *
1912
+ * @example
1913
+ * ```typescript
1914
+ * const result = await client.execute({
1915
+ * query: 'Find authentication middleware',
1916
+ * repoRoot: '.'
1917
+ * });
1918
+ *
1919
+ * if (result.success) {
1920
+ * for (const ctx of result.contexts) {
1921
+ * console.log(`File: ${ctx.file}`);
1922
+ * console.log(ctx.content);
1923
+ * }
1924
+ * }
1925
+ * ```
1926
+ */
1927
+ async execute(input) {
1928
+ const provider = input.provider ?? new LocalRipgrepProvider(input.repoRoot, input.excludes);
1929
+ const result = await runWarpGrep({
1930
+ query: input.query,
1931
+ repoRoot: input.repoRoot,
1932
+ provider,
1933
+ excludes: input.excludes,
1934
+ includes: input.includes,
1935
+ debug: input.debug ?? this.config.debug ?? false,
1936
+ apiKey: this.config.apiKey
1937
+ });
1938
+ const finish = result.finish;
1939
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
1940
+ return {
1941
+ success: false,
1942
+ error: "Search did not complete"
1943
+ };
1944
+ }
1945
+ const contexts = (finish.resolved ?? []).map((r) => ({
1946
+ file: r.path,
1947
+ content: r.content
1948
+ }));
1949
+ return {
1950
+ success: true,
1951
+ contexts,
1952
+ summary: finish.payload
1953
+ };
1954
+ }
1955
+ };
1956
+ function formatResult(result) {
1957
+ if (!result.success) {
1958
+ return `Search failed: ${result.error}`;
1959
+ }
1960
+ if (!result.contexts || result.contexts.length === 0) {
1961
+ return "No relevant code found. Try rephrasing your query.";
1962
+ }
1963
+ const lines = [];
1964
+ lines.push(`Found ${result.contexts.length} relevant code sections:
1965
+ `);
1966
+ result.contexts.forEach((ctx, i) => {
1967
+ lines.push(`${i + 1}. ${ctx.file}`);
1968
+ lines.push("```");
1969
+ lines.push(ctx.content);
1970
+ lines.push("```");
1971
+ lines.push("");
1972
+ });
1973
+ if (result.summary) {
1974
+ lines.push(`Summary: ${result.summary}`);
1975
+ }
1976
+ return lines.join("\n");
1977
+ }
1978
+
893
1979
  // git/client.ts
894
1980
  var import_isomorphic_git = __toESM(require("isomorphic-git"), 1);
895
1981
  var import_node = __toESM(require("isomorphic-git/http/node"), 1);
896
- var import_fs = __toESM(require("fs"), 1);
1982
+ var import_fs2 = __toESM(require("fs"), 1);
897
1983
  var DEFAULT_PROXY_URL = "https://repos.morphllm.com";
898
1984
  var MorphGit = class {
899
1985
  apiKey;
@@ -950,12 +2036,12 @@ var MorphGit = class {
950
2036
  throw new Error(`Failed to create repository: ${error}`);
951
2037
  }
952
2038
  await import_isomorphic_git.default.init({
953
- fs: import_fs.default,
2039
+ fs: import_fs2.default,
954
2040
  dir,
955
2041
  defaultBranch
956
2042
  });
957
2043
  await import_isomorphic_git.default.addRemote({
958
- fs: import_fs.default,
2044
+ fs: import_fs2.default,
959
2045
  dir,
960
2046
  remote: "origin",
961
2047
  url: `${this.proxyUrl}/v1/repos/${repoId}`
@@ -976,7 +2062,7 @@ var MorphGit = class {
976
2062
  async clone(options) {
977
2063
  const { repoId, dir, branch = "main", depth, singleBranch = true } = options;
978
2064
  await import_isomorphic_git.default.clone({
979
- fs: import_fs.default,
2065
+ fs: import_fs2.default,
980
2066
  http: import_node.default,
981
2067
  dir,
982
2068
  url: `${this.proxyUrl}/v1/repos/${repoId}`,
@@ -993,42 +2079,65 @@ var MorphGit = class {
993
2079
  * ```ts
994
2080
  * await morphGit.push({
995
2081
  * dir: './my-project',
996
- * branch: 'main' // Required: explicit branch name
2082
+ * branch: 'main', // Required: explicit branch name
2083
+ * index: true // Optional: generate embeddings (default: true)
997
2084
  * });
998
2085
  * ```
999
2086
  */
1000
2087
  async push(options) {
1001
- const { dir, remote = "origin", branch, waitForEmbeddings } = options;
2088
+ const { dir, remote = "origin", branch, waitForEmbeddings, index = true } = options;
1002
2089
  if (!branch) {
1003
2090
  throw new Error(
1004
2091
  'branch is required for push operations. Specify the branch explicitly: { dir: "./my-project", branch: "main" }'
1005
2092
  );
1006
2093
  }
1007
- let commitHash;
2094
+ const commitHash = await import_isomorphic_git.default.resolveRef({ fs: import_fs2.default, dir, ref: "HEAD" });
1008
2095
  let repoId;
1009
- if (waitForEmbeddings) {
1010
- commitHash = await import_isomorphic_git.default.resolveRef({ fs: import_fs.default, dir, ref: "HEAD" });
1011
- const remotes = await import_isomorphic_git.default.listRemotes({ fs: import_fs.default, dir });
1012
- const originRemote = remotes.find((r) => r.remote === remote);
1013
- if (originRemote) {
1014
- const match = originRemote.url.match(/\/repos\/([^\/]+)$/);
1015
- if (match) {
1016
- repoId = match[1];
1017
- }
2096
+ const remotes = await import_isomorphic_git.default.listRemotes({ fs: import_fs2.default, dir });
2097
+ const originRemote = remotes.find((r) => r.remote === remote);
2098
+ if (originRemote) {
2099
+ const match = originRemote.url.match(/\/repos\/([^\/]+)$/);
2100
+ if (match) {
2101
+ repoId = match[1];
1018
2102
  }
1019
2103
  }
1020
2104
  await import_isomorphic_git.default.push({
1021
- fs: import_fs.default,
2105
+ fs: import_fs2.default,
1022
2106
  http: import_node.default,
1023
2107
  dir,
1024
2108
  remote,
1025
2109
  ref: branch,
1026
2110
  onAuth: this.getAuthCallback()
1027
2111
  });
1028
- if (waitForEmbeddings && repoId && commitHash) {
2112
+ if (repoId && commitHash) {
2113
+ await this.configureCommit({ repoId, commitHash, branch, index });
2114
+ }
2115
+ if (waitForEmbeddings && repoId && commitHash && index) {
1029
2116
  await this.waitForEmbeddings({ repoId, commitHash });
1030
2117
  }
1031
2118
  }
2119
+ /**
2120
+ * Configure commit settings on the backend after push.
2121
+ * Sets the index flag to control embedding generation.
2122
+ * @private
2123
+ */
2124
+ async configureCommit(options) {
2125
+ const { repoId, commitHash, branch, index } = options;
2126
+ const response = await fetch(
2127
+ `${this.proxyUrl}/v1/repos/${repoId}/commits/${commitHash}/config`,
2128
+ {
2129
+ method: "POST",
2130
+ headers: {
2131
+ "Authorization": `Bearer ${this.apiKey}`,
2132
+ "Content-Type": "application/json"
2133
+ },
2134
+ body: JSON.stringify({ index, branch })
2135
+ }
2136
+ );
2137
+ if (!response.ok) {
2138
+ console.warn(`Failed to configure commit: ${response.status}`);
2139
+ }
2140
+ }
1032
2141
  /**
1033
2142
  * Pull changes from remote repository
1034
2143
  *
@@ -1048,7 +2157,7 @@ var MorphGit = class {
1048
2157
  );
1049
2158
  }
1050
2159
  await import_isomorphic_git.default.pull({
1051
- fs: import_fs.default,
2160
+ fs: import_fs2.default,
1052
2161
  http: import_node.default,
1053
2162
  dir,
1054
2163
  remote,
@@ -1117,7 +2226,7 @@ var MorphGit = class {
1117
2226
  async add(options) {
1118
2227
  const { dir, filepath } = options;
1119
2228
  await import_isomorphic_git.default.add({
1120
- fs: import_fs.default,
2229
+ fs: import_fs2.default,
1121
2230
  dir,
1122
2231
  filepath
1123
2232
  });
@@ -1136,7 +2245,7 @@ var MorphGit = class {
1136
2245
  async remove(options) {
1137
2246
  const { dir, filepath } = options;
1138
2247
  await import_isomorphic_git.default.remove({
1139
- fs: import_fs.default,
2248
+ fs: import_fs2.default,
1140
2249
  dir,
1141
2250
  filepath
1142
2251
  });
@@ -1153,6 +2262,7 @@ var MorphGit = class {
1153
2262
  * name: 'AI Agent',
1154
2263
  * email: 'ai@example.com'
1155
2264
  * },
2265
+ * metadata: { issueId: 'PROJ-123', source: 'agent' },
1156
2266
  * chatHistory: [
1157
2267
  * { role: 'user', content: 'Please add a new feature' },
1158
2268
  * { role: 'assistant', content: 'I will add that feature' }
@@ -1162,28 +2272,30 @@ var MorphGit = class {
1162
2272
  * ```
1163
2273
  */
1164
2274
  async commit(options) {
1165
- const { dir, message, author, chatHistory, recordingId } = options;
2275
+ const { dir, message, author, metadata, chatHistory, recordingId } = options;
1166
2276
  const commitAuthor = author || {
1167
2277
  name: "Morph SDK",
1168
2278
  email: "sdk@morphllm.com"
1169
2279
  };
1170
2280
  const sha = await import_isomorphic_git.default.commit({
1171
- fs: import_fs.default,
2281
+ fs: import_fs2.default,
1172
2282
  dir,
1173
2283
  message,
1174
2284
  author: commitAuthor
1175
2285
  });
1176
- if (chatHistory || recordingId) {
1177
- const metadata = {
2286
+ if (metadata || chatHistory || recordingId) {
2287
+ const notes = {
2288
+ metadata,
1178
2289
  chatHistory,
1179
- recordingId
2290
+ recordingId,
2291
+ _version: 1
1180
2292
  };
1181
2293
  await import_isomorphic_git.default.addNote({
1182
- fs: import_fs.default,
2294
+ fs: import_fs2.default,
1183
2295
  dir,
1184
2296
  ref: "refs/notes/morph-metadata",
1185
2297
  oid: sha,
1186
- note: JSON.stringify(metadata, null, 2),
2298
+ note: JSON.stringify(notes, null, 2),
1187
2299
  author: commitAuthor
1188
2300
  });
1189
2301
  }
@@ -1207,7 +2319,7 @@ var MorphGit = class {
1207
2319
  throw new Error("filepath is required for status check");
1208
2320
  }
1209
2321
  const status = await import_isomorphic_git.default.status({
1210
- fs: import_fs.default,
2322
+ fs: import_fs2.default,
1211
2323
  dir,
1212
2324
  filepath
1213
2325
  });
@@ -1227,7 +2339,7 @@ var MorphGit = class {
1227
2339
  async log(options) {
1228
2340
  const { dir, depth, ref } = options;
1229
2341
  const commits = await import_isomorphic_git.default.log({
1230
- fs: import_fs.default,
2342
+ fs: import_fs2.default,
1231
2343
  dir,
1232
2344
  depth,
1233
2345
  ref
@@ -1248,7 +2360,7 @@ var MorphGit = class {
1248
2360
  async checkout(options) {
1249
2361
  const { dir, ref } = options;
1250
2362
  await import_isomorphic_git.default.checkout({
1251
- fs: import_fs.default,
2363
+ fs: import_fs2.default,
1252
2364
  dir,
1253
2365
  ref
1254
2366
  });
@@ -1268,7 +2380,7 @@ var MorphGit = class {
1268
2380
  async branch(options) {
1269
2381
  const { dir, name, checkout = false } = options;
1270
2382
  await import_isomorphic_git.default.branch({
1271
- fs: import_fs.default,
2383
+ fs: import_fs2.default,
1272
2384
  dir,
1273
2385
  ref: name,
1274
2386
  checkout
@@ -1287,7 +2399,7 @@ var MorphGit = class {
1287
2399
  async listBranches(options) {
1288
2400
  const { dir } = options;
1289
2401
  const branches = await import_isomorphic_git.default.listBranches({
1290
- fs: import_fs.default,
2402
+ fs: import_fs2.default,
1291
2403
  dir
1292
2404
  });
1293
2405
  return branches;
@@ -1305,7 +2417,7 @@ var MorphGit = class {
1305
2417
  async currentBranch(options) {
1306
2418
  const { dir } = options;
1307
2419
  const branch = await import_isomorphic_git.default.currentBranch({
1308
- fs: import_fs.default,
2420
+ fs: import_fs2.default,
1309
2421
  dir
1310
2422
  });
1311
2423
  return branch || void 0;
@@ -1323,7 +2435,7 @@ var MorphGit = class {
1323
2435
  async statusMatrix(options) {
1324
2436
  const { dir } = options;
1325
2437
  const matrix = await import_isomorphic_git.default.statusMatrix({
1326
- fs: import_fs.default,
2438
+ fs: import_fs2.default,
1327
2439
  dir
1328
2440
  });
1329
2441
  return matrix.map(([filepath, HEADStatus, workdirStatus, stageStatus]) => {
@@ -1365,38 +2477,39 @@ var MorphGit = class {
1365
2477
  async resolveRef(options) {
1366
2478
  const { dir, ref } = options;
1367
2479
  const oid = await import_isomorphic_git.default.resolveRef({
1368
- fs: import_fs.default,
2480
+ fs: import_fs2.default,
1369
2481
  dir,
1370
2482
  ref
1371
2483
  });
1372
2484
  return oid;
1373
2485
  }
1374
2486
  /**
1375
- * Get metadata (chat history, recording ID) attached to a commit
2487
+ * Get notes (metadata, chat history, recording ID) attached to a commit
1376
2488
  *
1377
2489
  * @example
1378
2490
  * ```ts
1379
- * const metadata = await morphGit.getCommitMetadata({
2491
+ * const notes = await morphGit.getCommitMetadata({
1380
2492
  * dir: './my-project',
1381
2493
  * commitSha: 'abc123...'
1382
2494
  * });
1383
2495
  *
1384
- * if (metadata) {
1385
- * console.log('Chat history:', metadata.chatHistory);
1386
- * console.log('Recording ID:', metadata.recordingId);
2496
+ * if (notes) {
2497
+ * console.log('Metadata:', notes.metadata);
2498
+ * console.log('Chat history:', notes.chatHistory);
2499
+ * console.log('Recording ID:', notes.recordingId);
1387
2500
  * }
1388
2501
  * ```
1389
2502
  */
1390
2503
  async getCommitMetadata(options) {
1391
2504
  try {
1392
2505
  const note = await import_isomorphic_git.default.readNote({
1393
- fs: import_fs.default,
2506
+ fs: import_fs2.default,
1394
2507
  dir: options.dir,
1395
2508
  ref: "refs/notes/morph-metadata",
1396
2509
  oid: options.commitSha
1397
2510
  });
1398
- const metadata = JSON.parse(new TextDecoder().decode(note));
1399
- return metadata;
2511
+ const notes = JSON.parse(new TextDecoder().decode(note));
2512
+ return notes;
1400
2513
  } catch (err) {
1401
2514
  return null;
1402
2515
  }
@@ -1607,6 +2720,718 @@ var RawRouter = class extends BaseRouter {
1607
2720
  }
1608
2721
  };
1609
2722
 
2723
+ // tools/warp_grep/prompts.ts
2724
+ var WARP_GREP_DESCRIPTION = "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.";
2725
+
2726
+ // tools/warp_grep/openai.ts
2727
+ var TOOL_PARAMETERS = {
2728
+ type: "object",
2729
+ properties: {
2730
+ query: { type: "string", description: "Free-form repository question" }
2731
+ },
2732
+ required: ["query"]
2733
+ };
2734
+ async function execute(input, config) {
2735
+ const parsed = typeof input === "string" ? JSON.parse(input) : input;
2736
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
2737
+ const result = await runWarpGrep({
2738
+ query: parsed.query,
2739
+ repoRoot: config.repoRoot,
2740
+ provider,
2741
+ excludes: config.excludes,
2742
+ includes: config.includes,
2743
+ debug: config.debug ?? false,
2744
+ apiKey: config.apiKey
2745
+ });
2746
+ const finish = result.finish;
2747
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
2748
+ return { success: false, error: "Search did not complete" };
2749
+ }
2750
+ const contexts = (finish.resolved ?? []).map((r) => ({
2751
+ file: r.path,
2752
+ content: r.content
2753
+ }));
2754
+ return { success: true, contexts, summary: finish.payload };
2755
+ }
2756
+ function createMorphWarpGrepTool(config) {
2757
+ const tool4 = {
2758
+ type: "function",
2759
+ function: {
2760
+ name: "morph-warp-grep",
2761
+ description: config.description ?? WARP_GREP_DESCRIPTION,
2762
+ parameters: TOOL_PARAMETERS
2763
+ }
2764
+ };
2765
+ return Object.assign(tool4, {
2766
+ execute: async (input) => {
2767
+ return execute(input, config);
2768
+ },
2769
+ formatResult: (result) => {
2770
+ return formatResult(result);
2771
+ },
2772
+ getSystemPrompt: () => {
2773
+ return getSystemPrompt();
2774
+ }
2775
+ });
2776
+ }
2777
+
2778
+ // tools/codebase_search/prompts.ts
2779
+ var CODEBASE_SEARCH_DESCRIPTION = `Semantic search that finds code by meaning, not exact text.
2780
+
2781
+ Use this to explore unfamiliar codebases or ask "how/where/what" questions:
2782
+ - "How does X work?" - Find implementation details
2783
+ - "Where is Y handled?" - Locate specific functionality
2784
+ - "What happens when Z?" - Understand flow
2785
+
2786
+ The tool uses two-stage retrieval (embedding similarity + reranking) to find the most semantically relevant code chunks.
2787
+
2788
+ Returns code chunks with file paths, line ranges, and full content ranked by relevance.`;
2789
+ var CODEBASE_SEARCH_SYSTEM_PROMPT = `You have access to the codebase_search tool that performs semantic code search.
2790
+
2791
+ When searching:
2792
+ - Use natural language queries describing what you're looking for
2793
+ - Be specific about functionality, not variable names
2794
+ - Use target_directories to narrow search if you know the area
2795
+ - Results are ranked by relevance (rerank score is most important)
2796
+
2797
+ The tool returns:
2798
+ - File paths with symbol names (e.g. "src/auth.ts::AuthService@L1-L17")
2799
+ - Line ranges for precise navigation
2800
+ - Full code content for each match
2801
+ - Dual relevance scores: embedding similarity + rerank score
2802
+
2803
+ Use results to understand code or answer questions. The content is provided in full - avoid re-reading unless you need more context.`;
2804
+
2805
+ // tools/codebase_search/openai.ts
2806
+ function createCodebaseSearchTool(config) {
2807
+ const toolDefinition = {
2808
+ type: "function",
2809
+ function: {
2810
+ name: "codebase_search",
2811
+ description: CODEBASE_SEARCH_DESCRIPTION,
2812
+ parameters: {
2813
+ type: "object",
2814
+ properties: {
2815
+ query: {
2816
+ type: "string",
2817
+ description: 'A complete question about what you want to understand. Ask as if talking to a colleague: "How does X work?", "What happens when Y?", "Where is Z handled?"'
2818
+ },
2819
+ target_directories: {
2820
+ type: "array",
2821
+ items: { type: "string" },
2822
+ description: "Prefix directory paths to limit search scope (single directory only, no glob patterns). Use [] to search entire repo."
2823
+ },
2824
+ explanation: {
2825
+ type: "string",
2826
+ description: "One sentence explanation as to why this tool is being used, and how it contributes to the goal."
2827
+ },
2828
+ limit: {
2829
+ type: "number",
2830
+ description: "Maximum results to return (default: 10)"
2831
+ }
2832
+ },
2833
+ required: ["query", "target_directories", "explanation"]
2834
+ }
2835
+ }
2836
+ };
2837
+ return Object.assign(toolDefinition, {
2838
+ execute: async (input) => {
2839
+ const parsedInput = typeof input === "string" ? JSON.parse(input) : input;
2840
+ return executeCodebaseSearch(parsedInput, config);
2841
+ },
2842
+ formatResult: (result) => {
2843
+ return formatResult2(result);
2844
+ },
2845
+ getSystemPrompt: () => {
2846
+ return CODEBASE_SEARCH_SYSTEM_PROMPT;
2847
+ }
2848
+ });
2849
+ }
2850
+ function formatResult2(result) {
2851
+ if (!result.success) {
2852
+ return `Search failed: ${result.error}`;
2853
+ }
2854
+ if (result.results.length === 0) {
2855
+ return "No matching code found. Try rephrasing your query or removing directory filters.";
2856
+ }
2857
+ const lines = [];
2858
+ lines.push(`Found ${result.results.length} relevant code sections (${result.stats.searchTimeMs}ms):
2859
+ `);
2860
+ result.results.forEach((r, i) => {
2861
+ const relevance = (r.rerankScore * 100).toFixed(1);
2862
+ lines.push(`${i + 1}. ${r.filepath} (${relevance}% relevant)`);
2863
+ lines.push(` Symbol: ${r.symbolPath}`);
2864
+ lines.push(` Language: ${r.language}`);
2865
+ lines.push(` Lines: ${r.startLine}-${r.endLine}`);
2866
+ lines.push(` Code:`);
2867
+ const codeLines = r.content.split("\n");
2868
+ codeLines.slice(0, Math.min(codeLines.length, 20)).forEach((line) => {
2869
+ lines.push(` ${line}`);
2870
+ });
2871
+ if (codeLines.length > 20) {
2872
+ lines.push(` ... (${codeLines.length - 20} more lines)`);
2873
+ }
2874
+ lines.push("");
2875
+ });
2876
+ return lines.join("\n");
2877
+ }
2878
+
2879
+ // tools/fastapply/prompts.ts
2880
+ var EDIT_FILE_TOOL_DESCRIPTION = `Use this tool to make an edit to an existing file.
2881
+
2882
+ This will be read by a less intelligent model, which will quickly apply the edit. You should make it clear what the edit is, while also minimizing the unchanged code you write.
2883
+
2884
+ When writing the edit, you should specify each edit in sequence, with the special comment // ... existing code ... to represent unchanged code in between edited lines.
2885
+
2886
+ For example:
2887
+
2888
+ // ... existing code ...
2889
+ FIRST_EDIT
2890
+ // ... existing code ...
2891
+ SECOND_EDIT
2892
+ // ... existing code ...
2893
+ THIRD_EDIT
2894
+ // ... existing code ...
2895
+
2896
+ You should still bias towards repeating as few lines of the original file as possible to convey the change.
2897
+ But, each edit should contain minimally sufficient context of unchanged lines around the code you're editing to resolve ambiguity.
2898
+
2899
+ DO NOT omit spans of pre-existing code (or comments) without using the // ... existing code ... comment to indicate its absence. If you omit the existing code comment, the model may inadvertently delete these lines.
2900
+
2901
+ If you plan on deleting a section, you must provide context before and after to delete it.
2902
+
2903
+ Make sure it is clear what the edit should be, and where it should be applied.
2904
+ Make edits to a file in a single edit_file call instead of multiple edit_file calls to the same file. The apply model can handle many distinct edits at once.`;
2905
+ var EDIT_FILE_SYSTEM_PROMPT = `When the user is asking for edits to their code, use the edit_file tool to highlight the changes necessary and add comments to indicate where unchanged code has been skipped. For example:
2906
+
2907
+ // ... existing code ...
2908
+ {{ edit_1 }}
2909
+ // ... existing code ...
2910
+ {{ edit_2 }}
2911
+ // ... existing code ...
2912
+
2913
+ Often this will mean that the start/end of the file will be skipped, but that's okay! Rewrite the entire file ONLY if specifically requested. Always provide a brief explanation of the updates, unless the user specifically requests only the code.
2914
+
2915
+ These edit codeblocks are also read by a less intelligent language model, colloquially called the apply model, to update the file. To help specify the edit to the apply model, you will be very careful when generating the codeblock to not introduce ambiguity. You will specify all unchanged regions (code and comments) of the file with "// ... existing code ..." comment markers. This will ensure the apply model will not delete existing unchanged code or comments when editing the file.`;
2916
+
2917
+ // tools/fastapply/openai.ts
2918
+ var editFileTool = {
2919
+ type: "function",
2920
+ function: {
2921
+ name: "edit_file",
2922
+ description: EDIT_FILE_TOOL_DESCRIPTION,
2923
+ parameters: {
2924
+ type: "object",
2925
+ properties: {
2926
+ target_filepath: {
2927
+ type: "string",
2928
+ description: "The path of the target file to modify"
2929
+ },
2930
+ instructions: {
2931
+ type: "string",
2932
+ description: "A single sentence describing what you are changing (first person)"
2933
+ },
2934
+ code_edit: {
2935
+ type: "string",
2936
+ description: "The lazy edit with // ... existing code ... markers"
2937
+ }
2938
+ },
2939
+ required: ["target_filepath", "instructions", "code_edit"]
2940
+ }
2941
+ }
2942
+ };
2943
+ async function execute2(input, config) {
2944
+ return executeEditFile(input, config);
2945
+ }
2946
+ function getSystemPrompt2() {
2947
+ return EDIT_FILE_SYSTEM_PROMPT;
2948
+ }
2949
+ function formatResult3(result) {
2950
+ if (!result.success) {
2951
+ return `Error editing file: ${result.error}`;
2952
+ }
2953
+ const { changes } = result;
2954
+ const summary = [
2955
+ changes.linesAdded && `+${changes.linesAdded} lines`,
2956
+ changes.linesRemoved && `-${changes.linesRemoved} lines`,
2957
+ changes.linesModified && `~${changes.linesModified} lines modified`
2958
+ ].filter(Boolean).join(", ");
2959
+ if (result.udiff) {
2960
+ return `Successfully applied changes to ${result.filepath}:
2961
+
2962
+ ${result.udiff}
2963
+
2964
+ Summary: ${summary}`;
2965
+ }
2966
+ return `Successfully applied changes to ${result.filepath}. ${summary}`;
2967
+ }
2968
+ function createEditFileTool(config = {}) {
2969
+ const toolDef = {
2970
+ ...editFileTool,
2971
+ ...config.description && {
2972
+ function: {
2973
+ ...editFileTool.function,
2974
+ description: config.description
2975
+ }
2976
+ }
2977
+ };
2978
+ return Object.assign({}, toolDef, {
2979
+ execute: async (input) => {
2980
+ const parsedInput = typeof input === "string" ? JSON.parse(input) : input;
2981
+ return execute2(parsedInput, config);
2982
+ },
2983
+ formatResult: (result) => {
2984
+ return formatResult3(result);
2985
+ },
2986
+ getSystemPrompt: () => {
2987
+ return getSystemPrompt2();
2988
+ }
2989
+ });
2990
+ }
2991
+
2992
+ // factories/openai.ts
2993
+ var OpenAIToolFactory = class {
2994
+ constructor(config) {
2995
+ this.config = config;
2996
+ }
2997
+ /**
2998
+ * Create an OpenAI-compatible warp grep tool
2999
+ *
3000
+ * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)
3001
+ * @returns OpenAI ChatCompletionTool with execute and formatResult methods
3002
+ */
3003
+ createWarpGrepTool(toolConfig) {
3004
+ return createMorphWarpGrepTool({
3005
+ ...toolConfig,
3006
+ apiKey: this.config.apiKey
3007
+ });
3008
+ }
3009
+ /**
3010
+ * Create an OpenAI-compatible codebase search tool
3011
+ *
3012
+ * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)
3013
+ * @returns OpenAI ChatCompletionTool with execute and formatResult methods
3014
+ */
3015
+ createCodebaseSearchTool(toolConfig) {
3016
+ return createCodebaseSearchTool({
3017
+ ...toolConfig,
3018
+ apiKey: this.config.apiKey
3019
+ });
3020
+ }
3021
+ /**
3022
+ * Create an OpenAI-compatible edit file tool
3023
+ *
3024
+ * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
3025
+ * @returns OpenAI ChatCompletionTool with execute and formatResult methods
3026
+ */
3027
+ createEditFileTool(toolConfig = {}) {
3028
+ return createEditFileTool({
3029
+ ...toolConfig,
3030
+ morphApiKey: this.config.apiKey
3031
+ });
3032
+ }
3033
+ };
3034
+
3035
+ // tools/warp_grep/anthropic.ts
3036
+ var INPUT_SCHEMA = {
3037
+ type: "object",
3038
+ properties: {
3039
+ query: { type: "string", description: "Free-form repository question" }
3040
+ },
3041
+ required: ["query"]
3042
+ };
3043
+ async function execute3(input, config) {
3044
+ const parsed = typeof input === "string" ? JSON.parse(input) : input;
3045
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
3046
+ const result = await runWarpGrep({
3047
+ query: parsed.query,
3048
+ repoRoot: config.repoRoot,
3049
+ provider,
3050
+ excludes: config.excludes,
3051
+ includes: config.includes,
3052
+ debug: config.debug ?? false,
3053
+ apiKey: config.apiKey
3054
+ });
3055
+ const finish = result.finish;
3056
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
3057
+ return { success: false, error: "Search did not complete" };
3058
+ }
3059
+ const contexts = (finish.resolved ?? []).map((r) => ({
3060
+ file: r.path,
3061
+ content: r.content
3062
+ }));
3063
+ return { success: true, contexts, summary: finish.payload };
3064
+ }
3065
+ function createMorphWarpGrepTool2(config) {
3066
+ const tool4 = {
3067
+ name: "morph-warp-grep",
3068
+ description: config.description ?? WARP_GREP_DESCRIPTION,
3069
+ input_schema: INPUT_SCHEMA
3070
+ };
3071
+ return Object.assign(tool4, {
3072
+ execute: async (input) => {
3073
+ return execute3(input, config);
3074
+ },
3075
+ formatResult: (result) => {
3076
+ return formatResult(result);
3077
+ },
3078
+ getSystemPrompt: () => {
3079
+ return getSystemPrompt();
3080
+ }
3081
+ });
3082
+ }
3083
+
3084
+ // tools/codebase_search/anthropic.ts
3085
+ function createCodebaseSearchTool2(config) {
3086
+ const toolDefinition = {
3087
+ name: "codebase_search",
3088
+ description: CODEBASE_SEARCH_DESCRIPTION,
3089
+ input_schema: {
3090
+ type: "object",
3091
+ properties: {
3092
+ explanation: {
3093
+ type: "string",
3094
+ description: "One sentence explanation as to why this tool is being used, and how it contributes to the goal."
3095
+ },
3096
+ query: {
3097
+ type: "string",
3098
+ description: 'A complete question about what you want to understand. Ask as if talking to a colleague: "How does X work?", "What happens when Y?", "Where is Z handled?"'
3099
+ },
3100
+ target_directories: {
3101
+ type: "array",
3102
+ items: { type: "string" },
3103
+ description: "Prefix directory paths to limit search scope (single directory only, no glob patterns). Use [] to search entire repo."
3104
+ },
3105
+ limit: {
3106
+ type: "number",
3107
+ description: "Maximum results to return (default: 10)"
3108
+ }
3109
+ },
3110
+ required: ["query", "target_directories", "explanation"]
3111
+ },
3112
+ cache_control: { type: "ephemeral" }
3113
+ };
3114
+ return Object.assign(toolDefinition, {
3115
+ execute: async (input) => {
3116
+ return executeCodebaseSearch(input, config);
3117
+ },
3118
+ formatResult: (result) => {
3119
+ return formatResult4(result);
3120
+ },
3121
+ getSystemPrompt: () => {
3122
+ return CODEBASE_SEARCH_SYSTEM_PROMPT;
3123
+ }
3124
+ });
3125
+ }
3126
+ function formatResult4(result) {
3127
+ if (!result.success) {
3128
+ return `Search failed: ${result.error}`;
3129
+ }
3130
+ if (result.results.length === 0) {
3131
+ return "No matching code found. Try rephrasing your query or broadening the search scope.";
3132
+ }
3133
+ const lines = [];
3134
+ lines.push(`Found ${result.results.length} relevant code sections (searched ${result.stats.candidatesRetrieved} candidates in ${result.stats.searchTimeMs}ms):
3135
+ `);
3136
+ result.results.forEach((r, i) => {
3137
+ const relevance = (r.rerankScore * 100).toFixed(1);
3138
+ lines.push(`${i + 1}. ${r.filepath} (${relevance}% relevant)`);
3139
+ lines.push(` Symbol: ${r.symbolPath}`);
3140
+ lines.push(` Language: ${r.language}`);
3141
+ lines.push(` Lines: ${r.startLine}-${r.endLine}`);
3142
+ lines.push(` Code:`);
3143
+ const codeLines = r.content.split("\n");
3144
+ codeLines.slice(0, Math.min(codeLines.length, 20)).forEach((line) => {
3145
+ lines.push(` ${line}`);
3146
+ });
3147
+ if (codeLines.length > 20) {
3148
+ lines.push(` ... (${codeLines.length - 20} more lines)`);
3149
+ }
3150
+ lines.push("");
3151
+ });
3152
+ return lines.join("\n");
3153
+ }
3154
+
3155
+ // tools/fastapply/anthropic.ts
3156
+ var editFileTool2 = {
3157
+ name: "edit_file",
3158
+ description: EDIT_FILE_TOOL_DESCRIPTION,
3159
+ input_schema: {
3160
+ type: "object",
3161
+ properties: {
3162
+ target_filepath: {
3163
+ type: "string",
3164
+ description: "The path of the target file to modify"
3165
+ },
3166
+ instructions: {
3167
+ type: "string",
3168
+ description: "A single sentence describing what you are changing (first person)"
3169
+ },
3170
+ code_edit: {
3171
+ type: "string",
3172
+ description: "The lazy edit with // ... existing code ... markers"
3173
+ }
3174
+ },
3175
+ required: ["target_filepath", "instructions", "code_edit"]
3176
+ }
3177
+ };
3178
+ function formatResult5(result) {
3179
+ if (!result.success) {
3180
+ return `Error editing file: ${result.error}`;
3181
+ }
3182
+ const { changes } = result;
3183
+ const summary = [
3184
+ changes.linesAdded && `+${changes.linesAdded} lines`,
3185
+ changes.linesRemoved && `-${changes.linesRemoved} lines`,
3186
+ changes.linesModified && `~${changes.linesModified} lines modified`
3187
+ ].filter(Boolean).join(", ");
3188
+ if (result.udiff) {
3189
+ return `Successfully applied changes to ${result.filepath}:
3190
+
3191
+ ${result.udiff}
3192
+
3193
+ Summary: ${summary}`;
3194
+ }
3195
+ return `Successfully applied changes to ${result.filepath}. ${summary}`;
3196
+ }
3197
+ function createEditFileTool2(config = {}) {
3198
+ const toolDef = {
3199
+ ...editFileTool2,
3200
+ ...config.description && { description: config.description }
3201
+ };
3202
+ return Object.assign({}, toolDef, {
3203
+ execute: async (input) => {
3204
+ return executeEditFile(input, config);
3205
+ },
3206
+ formatResult: (result) => {
3207
+ return formatResult5(result);
3208
+ },
3209
+ getSystemPrompt: () => {
3210
+ return EDIT_FILE_SYSTEM_PROMPT;
3211
+ }
3212
+ });
3213
+ }
3214
+
3215
+ // factories/anthropic.ts
3216
+ var AnthropicToolFactory = class {
3217
+ constructor(config) {
3218
+ this.config = config;
3219
+ }
3220
+ /**
3221
+ * Create an Anthropic-compatible warp grep tool
3222
+ *
3223
+ * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)
3224
+ * @returns Anthropic Tool with execute and formatResult methods
3225
+ */
3226
+ createWarpGrepTool(toolConfig) {
3227
+ return createMorphWarpGrepTool2({
3228
+ ...toolConfig,
3229
+ apiKey: this.config.apiKey
3230
+ });
3231
+ }
3232
+ /**
3233
+ * Create an Anthropic-compatible codebase search tool
3234
+ *
3235
+ * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)
3236
+ * @returns Anthropic Tool with execute and formatResult methods
3237
+ */
3238
+ createCodebaseSearchTool(toolConfig) {
3239
+ return createCodebaseSearchTool2({
3240
+ ...toolConfig,
3241
+ apiKey: this.config.apiKey
3242
+ });
3243
+ }
3244
+ /**
3245
+ * Create an Anthropic-compatible edit file tool
3246
+ *
3247
+ * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
3248
+ * @returns Anthropic Tool with execute and formatResult methods
3249
+ */
3250
+ createEditFileTool(toolConfig = {}) {
3251
+ return createEditFileTool2({
3252
+ ...toolConfig,
3253
+ morphApiKey: this.config.apiKey
3254
+ });
3255
+ }
3256
+ };
3257
+
3258
+ // tools/warp_grep/vercel.ts
3259
+ var import_ai = require("ai");
3260
+ var import_zod = require("zod");
3261
+ var warpGrepSchema = import_zod.z.object({
3262
+ query: import_zod.z.string().describe("Free-form repository question")
3263
+ });
3264
+ function createMorphWarpGrepTool3(config) {
3265
+ return (0, import_ai.tool)({
3266
+ description: config.description ?? WARP_GREP_DESCRIPTION,
3267
+ inputSchema: warpGrepSchema,
3268
+ execute: async (params) => {
3269
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
3270
+ const result = await runWarpGrep({
3271
+ query: params.query,
3272
+ repoRoot: config.repoRoot,
3273
+ provider,
3274
+ excludes: config.excludes,
3275
+ includes: config.includes,
3276
+ debug: config.debug ?? false,
3277
+ apiKey: config.apiKey
3278
+ });
3279
+ const finish = result.finish;
3280
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
3281
+ return { success: false, error: "Search did not complete" };
3282
+ }
3283
+ const contexts = (finish.resolved ?? []).map((r) => ({
3284
+ file: r.path,
3285
+ content: r.content
3286
+ }));
3287
+ return { success: true, contexts, summary: finish.payload };
3288
+ }
3289
+ });
3290
+ }
3291
+
3292
+ // tools/codebase_search/vercel.ts
3293
+ var import_ai2 = require("ai");
3294
+ var import_zod2 = require("zod");
3295
+ function createCodebaseSearchTool3(config) {
3296
+ const schema = import_zod2.z.object({
3297
+ query: import_zod2.z.string().describe('A complete question about what you want to understand. Ask as if talking to a colleague: "How does X work?", "What happens when Y?", "Where is Z handled?"'),
3298
+ target_directories: import_zod2.z.array(import_zod2.z.string()).describe("Prefix directory paths to limit search scope (single directory only, no glob patterns). Use [] to search entire repo."),
3299
+ explanation: import_zod2.z.string().describe("One sentence explanation as to why this tool is being used, and how it contributes to the goal."),
3300
+ limit: import_zod2.z.number().optional().describe("Max results to return (default: 10)")
3301
+ });
3302
+ return (0, import_ai2.tool)({
3303
+ description: CODEBASE_SEARCH_DESCRIPTION,
3304
+ inputSchema: schema,
3305
+ execute: async (params) => {
3306
+ const { query, target_directories, explanation, limit } = params;
3307
+ const result = await executeCodebaseSearch(
3308
+ { query, target_directories, explanation, limit },
3309
+ config
3310
+ );
3311
+ if (!result.success) {
3312
+ return {
3313
+ error: result.error,
3314
+ results: []
3315
+ };
3316
+ }
3317
+ return {
3318
+ found: result.results.length,
3319
+ searchTime: `${result.stats.searchTimeMs}ms`,
3320
+ results: result.results.map((r) => ({
3321
+ file: r.filepath,
3322
+ symbol: r.symbolPath,
3323
+ lines: `${r.startLine}-${r.endLine}`,
3324
+ language: r.language,
3325
+ relevance: `${(r.rerankScore * 100).toFixed(1)}%`,
3326
+ code: r.content
3327
+ }))
3328
+ };
3329
+ }
3330
+ });
3331
+ }
3332
+
3333
+ // tools/fastapply/vercel.ts
3334
+ var import_ai3 = require("ai");
3335
+ var import_zod3 = require("zod");
3336
+ var editFileSchema = import_zod3.z.object({
3337
+ target_filepath: import_zod3.z.string().describe("The path of the target file to modify"),
3338
+ instructions: import_zod3.z.string().describe("A single sentence describing what you are changing (first person)"),
3339
+ code_edit: import_zod3.z.string().describe("The lazy edit with // ... existing code ... markers")
3340
+ });
3341
+ var editFileTool3 = (0, import_ai3.tool)({
3342
+ description: EDIT_FILE_TOOL_DESCRIPTION,
3343
+ inputSchema: editFileSchema,
3344
+ execute: async (params) => {
3345
+ const result = await executeEditFile({
3346
+ target_filepath: params.target_filepath,
3347
+ instructions: params.instructions,
3348
+ code_edit: params.code_edit
3349
+ });
3350
+ if (!result.success) {
3351
+ throw new Error(`Failed to edit file: ${result.error}`);
3352
+ }
3353
+ return {
3354
+ success: true,
3355
+ filepath: result.filepath,
3356
+ changes: result.changes,
3357
+ udiff: result.udiff
3358
+ };
3359
+ }
3360
+ });
3361
+ function createEditFileTool3(config = {}) {
3362
+ const schema = import_zod3.z.object({
3363
+ target_filepath: import_zod3.z.string().describe("The path of the target file to modify"),
3364
+ instructions: import_zod3.z.string().describe("A single sentence describing what you are changing (first person)"),
3365
+ code_edit: import_zod3.z.string().describe("The lazy edit with // ... existing code ... markers")
3366
+ });
3367
+ return (0, import_ai3.tool)({
3368
+ description: config.description || EDIT_FILE_TOOL_DESCRIPTION,
3369
+ inputSchema: schema,
3370
+ execute: async (params) => {
3371
+ const result = await executeEditFile(
3372
+ {
3373
+ target_filepath: params.target_filepath,
3374
+ instructions: params.instructions,
3375
+ code_edit: params.code_edit
3376
+ },
3377
+ config
3378
+ );
3379
+ if (!result.success) {
3380
+ throw new Error(`Failed to edit file: ${result.error}`);
3381
+ }
3382
+ return {
3383
+ success: true,
3384
+ filepath: result.filepath,
3385
+ changes: result.changes,
3386
+ udiff: result.udiff
3387
+ };
3388
+ }
3389
+ });
3390
+ }
3391
+
3392
+ // factories/vercel.ts
3393
+ var VercelToolFactory = class {
3394
+ constructor(config) {
3395
+ this.config = config;
3396
+ }
3397
+ /**
3398
+ * Create a Vercel AI SDK-compatible warp grep tool
3399
+ *
3400
+ * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)
3401
+ * @returns Vercel AI SDK tool
3402
+ */
3403
+ createWarpGrepTool(toolConfig) {
3404
+ return createMorphWarpGrepTool3({
3405
+ ...toolConfig,
3406
+ apiKey: this.config.apiKey
3407
+ });
3408
+ }
3409
+ /**
3410
+ * Create a Vercel AI SDK-compatible codebase search tool
3411
+ *
3412
+ * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)
3413
+ * @returns Vercel AI SDK tool
3414
+ */
3415
+ createCodebaseSearchTool(toolConfig) {
3416
+ return createCodebaseSearchTool3({
3417
+ ...toolConfig,
3418
+ apiKey: this.config.apiKey
3419
+ });
3420
+ }
3421
+ /**
3422
+ * Create a Vercel AI SDK-compatible edit file tool
3423
+ *
3424
+ * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
3425
+ * @returns Vercel AI SDK tool
3426
+ */
3427
+ createEditFileTool(toolConfig = {}) {
3428
+ return createEditFileTool3({
3429
+ ...toolConfig,
3430
+ morphApiKey: this.config.apiKey
3431
+ });
3432
+ }
3433
+ };
3434
+
1610
3435
  // client.ts
1611
3436
  var MorphClient = class {
1612
3437
  /** Client configuration */
@@ -1615,12 +3440,20 @@ var MorphClient = class {
1615
3440
  fastApply;
1616
3441
  /** CodebaseSearch tool for semantic code search */
1617
3442
  codebaseSearch;
3443
+ /** WarpGrep tool for fast code search using ripgrep */
3444
+ warpGrep;
1618
3445
  /** Browser tool for AI-powered browser automation */
1619
3446
  browser;
1620
3447
  /** Git tool for version control operations */
1621
3448
  git;
1622
3449
  /** Model routers for intelligent model selection */
1623
3450
  routers;
3451
+ /** OpenAI-compatible tool factories */
3452
+ openai;
3453
+ /** Anthropic-compatible tool factories */
3454
+ anthropic;
3455
+ /** Vercel AI SDK tool factories */
3456
+ vercel;
1624
3457
  /**
1625
3458
  * Create a new Morph SDK client
1626
3459
  *
@@ -1649,6 +3482,12 @@ var MorphClient = class {
1649
3482
  timeout: config.timeout,
1650
3483
  retryConfig: config.retryConfig
1651
3484
  });
3485
+ this.warpGrep = new WarpGrepClient({
3486
+ apiKey: config.apiKey,
3487
+ debug: config.debug,
3488
+ timeout: config.timeout,
3489
+ retryConfig: config.retryConfig
3490
+ });
1652
3491
  this.browser = new BrowserClient({
1653
3492
  apiKey: config.apiKey,
1654
3493
  debug: config.debug,
@@ -1685,6 +3524,9 @@ var MorphClient = class {
1685
3524
  retryConfig: config.retryConfig
1686
3525
  })
1687
3526
  };
3527
+ this.openai = new OpenAIToolFactory(config);
3528
+ this.anthropic = new AnthropicToolFactory(config);
3529
+ this.vercel = new VercelToolFactory(config);
1688
3530
  }
1689
3531
  };
1690
3532
  // Annotate the CommonJS export names for ESM import in node: