@morphllm/morphsdk 0.2.44 → 0.2.46

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. package/README.md +1 -1
  2. package/dist/{chunk-TVFGHXPE.js → chunk-3FTAIJBH.js} +4 -4
  3. package/dist/chunk-5JTJOQUX.js +283 -0
  4. package/dist/chunk-5JTJOQUX.js.map +1 -0
  5. package/dist/{chunk-ZRLEAPZV.js → chunk-76DJEQEP.js} +4 -4
  6. package/dist/{chunk-W3XLPMV3.js → chunk-7HS6YXA3.js} +21 -5
  7. package/dist/{chunk-W3XLPMV3.js.map → chunk-7HS6YXA3.js.map} +1 -1
  8. package/dist/chunk-7T7YOPJV.js +82 -0
  9. package/dist/chunk-7T7YOPJV.js.map +1 -0
  10. package/dist/chunk-CL45IWIU.js +105 -0
  11. package/dist/chunk-CL45IWIU.js.map +1 -0
  12. package/dist/chunk-D6OD3IST.js +70 -0
  13. package/dist/chunk-D6OD3IST.js.map +1 -0
  14. package/dist/{chunk-PEGZVGG4.js → chunk-G4AWE5A2.js} +4 -4
  15. package/dist/{chunk-OUEJ6XEO.js → chunk-GJU7UOFL.js} +4 -4
  16. package/dist/{chunk-Q7PDN7TS.js → chunk-GZMUGMOZ.js} +1 -1
  17. package/dist/{chunk-Q7PDN7TS.js.map → chunk-GZMUGMOZ.js.map} +1 -1
  18. package/dist/chunk-JYBVRF72.js +1 -0
  19. package/dist/{chunk-EYHXBQQX.js → chunk-LVY5LPEX.js} +70 -10
  20. package/dist/chunk-LVY5LPEX.js.map +1 -0
  21. package/dist/{chunk-GDR65N2J.js → chunk-OXHGFHEU.js} +53 -26
  22. package/dist/chunk-OXHGFHEU.js.map +1 -0
  23. package/dist/{chunk-VBBJGWHY.js → chunk-P2XKFWFD.js} +2 -2
  24. package/dist/chunk-PABIV7X6.js +76 -0
  25. package/dist/chunk-PABIV7X6.js.map +1 -0
  26. package/dist/{chunk-GTOXMAF2.js → chunk-SWQPIKPY.js} +44 -3
  27. package/dist/chunk-SWQPIKPY.js.map +1 -0
  28. package/dist/chunk-TJIUA27P.js +94 -0
  29. package/dist/chunk-TJIUA27P.js.map +1 -0
  30. package/dist/{chunk-O5DA5V5S.js → chunk-UBX7QYBD.js} +4 -4
  31. package/dist/{chunk-X4CQ6D3G.js → chunk-UIZT3KVJ.js} +4 -4
  32. package/dist/{chunk-UYBIKZPM.js → chunk-UXYK7WZX.js} +2 -2
  33. package/dist/chunk-WETRQJGU.js +129 -0
  34. package/dist/chunk-WETRQJGU.js.map +1 -0
  35. package/dist/client-BGctTHu9.d.ts +318 -0
  36. package/dist/client.cjs +1954 -53
  37. package/dist/client.cjs.map +1 -1
  38. package/dist/client.d.ts +14 -110
  39. package/dist/client.js +29 -4
  40. package/dist/core-DxiUwyBe.d.ts +156 -0
  41. package/dist/git/client.cjs +52 -25
  42. package/dist/git/client.cjs.map +1 -1
  43. package/dist/git/client.d.ts +17 -8
  44. package/dist/git/client.js +1 -1
  45. package/dist/git/index.cjs +52 -25
  46. package/dist/git/index.cjs.map +1 -1
  47. package/dist/git/index.d.ts +1 -1
  48. package/dist/git/index.js +2 -2
  49. package/dist/git/types.cjs.map +1 -1
  50. package/dist/git/types.d.ts +20 -2
  51. package/dist/index.cjs +2033 -55
  52. package/dist/index.cjs.map +1 -1
  53. package/dist/index.d.ts +8 -1
  54. package/dist/index.js +48 -6
  55. package/dist/tools/browser/anthropic.cjs +1 -0
  56. package/dist/tools/browser/anthropic.cjs.map +1 -1
  57. package/dist/tools/browser/anthropic.js +1 -1
  58. package/dist/tools/browser/core.cjs +69 -9
  59. package/dist/tools/browser/core.cjs.map +1 -1
  60. package/dist/tools/browser/core.js +1 -1
  61. package/dist/tools/browser/index.cjs +69 -9
  62. package/dist/tools/browser/index.cjs.map +1 -1
  63. package/dist/tools/browser/index.js +1 -1
  64. package/dist/tools/browser/openai.cjs +1 -0
  65. package/dist/tools/browser/openai.cjs.map +1 -1
  66. package/dist/tools/browser/openai.js +1 -1
  67. package/dist/tools/browser/types.cjs.map +1 -1
  68. package/dist/tools/browser/types.d.ts +2 -0
  69. package/dist/tools/browser/vercel.cjs +1 -0
  70. package/dist/tools/browser/vercel.cjs.map +1 -1
  71. package/dist/tools/browser/vercel.js +1 -1
  72. package/dist/tools/codebase_search/anthropic.js +2 -2
  73. package/dist/tools/codebase_search/index.js +9 -9
  74. package/dist/tools/codebase_search/openai.js +2 -2
  75. package/dist/tools/codebase_search/vercel.js +2 -2
  76. package/dist/tools/fastapply/anthropic.js +2 -2
  77. package/dist/tools/fastapply/index.js +7 -7
  78. package/dist/tools/fastapply/openai.js +2 -2
  79. package/dist/tools/fastapply/vercel.js +2 -2
  80. package/dist/tools/index.js +7 -7
  81. package/dist/tools/warp_grep/agent/config.cjs +80 -1
  82. package/dist/tools/warp_grep/agent/config.cjs.map +1 -1
  83. package/dist/tools/warp_grep/agent/config.js +1 -1
  84. package/dist/tools/warp_grep/agent/parser.cjs +43 -2
  85. package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
  86. package/dist/tools/warp_grep/agent/parser.js +1 -1
  87. package/dist/tools/warp_grep/agent/prompt.cjs +89 -45
  88. package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -1
  89. package/dist/tools/warp_grep/agent/prompt.d.ts +1 -1
  90. package/dist/tools/warp_grep/agent/prompt.js +1 -1
  91. package/dist/tools/warp_grep/agent/runner.cjs +229 -49
  92. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
  93. package/dist/tools/warp_grep/agent/runner.js +4 -4
  94. package/dist/tools/warp_grep/agent/types.js +0 -1
  95. package/dist/tools/warp_grep/anthropic.cjs +311 -83
  96. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  97. package/dist/tools/warp_grep/anthropic.d.ts +75 -12
  98. package/dist/tools/warp_grep/anthropic.js +21 -8
  99. package/dist/tools/warp_grep/index.cjs +415 -126
  100. package/dist/tools/warp_grep/index.cjs.map +1 -1
  101. package/dist/tools/warp_grep/index.d.ts +17 -4
  102. package/dist/tools/warp_grep/index.js +29 -21
  103. package/dist/tools/warp_grep/openai.cjs +314 -83
  104. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  105. package/dist/tools/warp_grep/openai.d.ts +73 -29
  106. package/dist/tools/warp_grep/openai.js +21 -8
  107. package/dist/tools/warp_grep/providers/command.cjs +80 -1
  108. package/dist/tools/warp_grep/providers/command.cjs.map +1 -1
  109. package/dist/tools/warp_grep/providers/command.js +2 -2
  110. package/dist/tools/warp_grep/providers/local.cjs +80 -1
  111. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  112. package/dist/tools/warp_grep/providers/local.js +2 -2
  113. package/dist/tools/warp_grep/vercel.cjs +291 -57
  114. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  115. package/dist/tools/warp_grep/vercel.d.ts +40 -19
  116. package/dist/tools/warp_grep/vercel.js +17 -8
  117. package/package.json +1 -1
  118. package/dist/chunk-AFEPUNAO.js +0 -15
  119. package/dist/chunk-AFEPUNAO.js.map +0 -1
  120. package/dist/chunk-EYHXBQQX.js.map +0 -1
  121. package/dist/chunk-GDR65N2J.js.map +0 -1
  122. package/dist/chunk-GTOXMAF2.js.map +0 -1
  123. package/dist/chunk-HKZB23U7.js +0 -85
  124. package/dist/chunk-HKZB23U7.js.map +0 -1
  125. package/dist/chunk-IQHKEIQX.js +0 -54
  126. package/dist/chunk-IQHKEIQX.js.map +0 -1
  127. package/dist/chunk-JKFVDM62.js +0 -45
  128. package/dist/chunk-JKFVDM62.js.map +0 -1
  129. package/dist/chunk-KL4YVZRF.js +0 -57
  130. package/dist/chunk-KL4YVZRF.js.map +0 -1
  131. package/dist/chunk-SMR2T5BT.js +0 -104
  132. package/dist/chunk-SMR2T5BT.js.map +0 -1
  133. package/dist/chunk-XYPMN4A3.js +0 -1
  134. /package/dist/{chunk-TVFGHXPE.js.map → chunk-3FTAIJBH.js.map} +0 -0
  135. /package/dist/{chunk-ZRLEAPZV.js.map → chunk-76DJEQEP.js.map} +0 -0
  136. /package/dist/{chunk-PEGZVGG4.js.map → chunk-G4AWE5A2.js.map} +0 -0
  137. /package/dist/{chunk-OUEJ6XEO.js.map → chunk-GJU7UOFL.js.map} +0 -0
  138. /package/dist/{chunk-XYPMN4A3.js.map → chunk-JYBVRF72.js.map} +0 -0
  139. /package/dist/{chunk-VBBJGWHY.js.map → chunk-P2XKFWFD.js.map} +0 -0
  140. /package/dist/{chunk-O5DA5V5S.js.map → chunk-UBX7QYBD.js.map} +0 -0
  141. /package/dist/{chunk-X4CQ6D3G.js.map → chunk-UIZT3KVJ.js.map} +0 -0
  142. /package/dist/{chunk-UYBIKZPM.js.map → chunk-UXYK7WZX.js.map} +0 -0
package/dist/index.cjs CHANGED
@@ -31,14 +31,20 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
31
31
  var index_exports = {};
32
32
  __export(index_exports, {
33
33
  AnthropicRouter: () => AnthropicRouter,
34
+ AnthropicToolFactory: () => AnthropicToolFactory,
34
35
  BrowserClient: () => BrowserClient,
35
36
  CodebaseSearchClient: () => CodebaseSearchClient,
37
+ CommandExecProvider: () => CommandExecProvider,
36
38
  FastApplyClient: () => FastApplyClient,
37
39
  GeminiRouter: () => GeminiRouter,
40
+ LocalRipgrepProvider: () => LocalRipgrepProvider,
38
41
  MorphClient: () => MorphClient,
39
42
  MorphGit: () => MorphGit,
40
43
  OpenAIRouter: () => OpenAIRouter,
41
- RawRouter: () => RawRouter
44
+ OpenAIToolFactory: () => OpenAIToolFactory,
45
+ RawRouter: () => RawRouter,
46
+ VercelToolFactory: () => VercelToolFactory,
47
+ WarpGrepClient: () => WarpGrepClient
42
48
  });
43
49
  module.exports = __toCommonJS(index_exports);
44
50
 
@@ -476,15 +482,46 @@ var BrowserClient = class {
476
482
  return executeBrowserTask(input, this.config);
477
483
  }
478
484
  async createTask(input) {
485
+ const apiUrl = this.config.apiUrl || DEFAULT_CONFIG2.apiUrl;
486
+ const debug = this.config.debug || false;
487
+ if (debug) {
488
+ console.log(`[Browser] createTask: "${input.task.slice(0, 60)}..." url=${input.url || "none"}`);
489
+ console.log(`[Browser] Calling async endpoint: ${apiUrl}/browser-task/async`);
490
+ }
491
+ const headers = { "Content-Type": "application/json" };
492
+ if (this.config.apiKey) headers["Authorization"] = `Bearer ${this.config.apiKey}`;
493
+ const response = await fetch(`${apiUrl}/browser-task/async`, {
494
+ method: "POST",
495
+ headers,
496
+ body: JSON.stringify({
497
+ task: input.task,
498
+ url: input.url,
499
+ max_steps: input.max_steps ?? 10,
500
+ model: input.model ?? "morph-computer-use-v0",
501
+ viewport_width: input.viewport_width ?? 1280,
502
+ viewport_height: input.viewport_height ?? 720,
503
+ external_id: input.external_id,
504
+ repo_id: input.repo_id,
505
+ commit_id: input.commit_id,
506
+ record_video: input.record_video ?? false,
507
+ video_width: input.video_width ?? input.viewport_width ?? 1280,
508
+ video_height: input.video_height ?? input.viewport_height ?? 720,
509
+ allow_resizing: input.allow_resizing ?? false,
510
+ structured_output: "schema" in input ? stringifyStructuredOutput(input.schema) : void 0
511
+ })
512
+ });
513
+ if (!response.ok) {
514
+ const errorText = await response.text().catch(() => response.statusText);
515
+ if (debug) console.error(`[Browser] Error: ${response.status} - ${errorText}`);
516
+ throw new Error(`HTTP ${response.status}: ${errorText}`);
517
+ }
518
+ const result = await response.json();
519
+ if (debug) {
520
+ console.log(`[Browser] \u2705 Task created: recording_id=${result.recording_id ?? "none"} debug_url=${result.debugUrl ? "available" : "none"}`);
521
+ }
479
522
  if ("schema" in input) {
480
- const taskInput = {
481
- ...input,
482
- structured_output: stringifyStructuredOutput(input.schema)
483
- };
484
- const result = await executeBrowserTask(taskInput, this.config);
485
523
  return wrapTaskResponseWithSchema(result, this.config, input.schema);
486
524
  } else {
487
- const result = await executeBrowserTask(input, this.config);
488
525
  return wrapTaskResponse(result, this.config);
489
526
  }
490
527
  }
@@ -561,6 +598,7 @@ async function executeBrowserTask(input, config = {}) {
561
598
  record_video: input.record_video ?? false,
562
599
  video_width: input.video_width ?? input.viewport_width ?? 1280,
563
600
  video_height: input.video_height ?? input.viewport_height ?? 720,
601
+ allow_resizing: input.allow_resizing ?? false,
564
602
  structured_output: input.structured_output
565
603
  })
566
604
  },
@@ -754,7 +792,21 @@ function wrapTaskResponse(result, config) {
754
792
  task_id: result.task_id || "",
755
793
  liveUrl: result.task_id ? generateLiveUrl(result.task_id, config) : result.debugUrl || "",
756
794
  complete: async (pollConfig) => {
757
- return pollTaskUntilComplete(result.task_id, config, pollConfig);
795
+ if (result.task_id) {
796
+ return pollTaskUntilComplete(result.task_id, config, pollConfig);
797
+ }
798
+ if (result.recording_id) {
799
+ const recording = await waitForRecording(
800
+ result.recording_id,
801
+ config,
802
+ pollConfig
803
+ );
804
+ return {
805
+ ...result,
806
+ recording_status: recording.status
807
+ };
808
+ }
809
+ throw new Error("Cannot poll completion: no task_id or recording_id available");
758
810
  },
759
811
  // Add Steel live session helpers - either functional or error-throwing
760
812
  getLiveUrl: result.debugUrl ? (options) => buildLiveUrl(result.debugUrl, options) : () => {
@@ -785,8 +837,22 @@ function wrapTaskResponseWithSchema(result, config, schema) {
785
837
  task_id: result.task_id || "",
786
838
  liveUrl: result.task_id ? generateLiveUrl(result.task_id, config) : result.debugUrl || "",
787
839
  complete: async (pollConfig) => {
788
- const finalResult = await pollTaskUntilComplete(result.task_id, config, pollConfig);
789
- return parseStructuredTaskOutput(finalResult, schema);
840
+ if (result.task_id) {
841
+ const finalResult = await pollTaskUntilComplete(result.task_id, config, pollConfig);
842
+ return parseStructuredTaskOutput(finalResult, schema);
843
+ }
844
+ if (result.recording_id) {
845
+ const recording = await waitForRecording(
846
+ result.recording_id,
847
+ config,
848
+ pollConfig
849
+ );
850
+ return {
851
+ ...parsed,
852
+ recording_status: recording.status
853
+ };
854
+ }
855
+ throw new Error("Cannot poll completion: no task_id or recording_id available");
790
856
  },
791
857
  // Add Steel live session helpers - either functional or error-throwing
792
858
  getLiveUrl: result.debugUrl ? (options) => buildLiveUrl(result.debugUrl, options) : () => {
@@ -838,10 +904,1095 @@ async function checkHealth(config = {}) {
838
904
  }
839
905
  }
840
906
 
907
+ // tools/warp_grep/agent/config.ts
908
+ var AGENT_CONFIG = {
909
+ // Give the model freedom; failsafe cap to prevent infinite loops
910
+ MAX_ROUNDS: 10,
911
+ TIMEOUT_MS: 3e4
912
+ };
913
+ var BUILTIN_EXCLUDES = [
914
+ // Version control
915
+ ".git",
916
+ ".svn",
917
+ ".hg",
918
+ ".bzr",
919
+ // Dependencies
920
+ "node_modules",
921
+ "bower_components",
922
+ ".pnpm",
923
+ ".yarn",
924
+ "vendor",
925
+ "packages",
926
+ "Pods",
927
+ ".bundle",
928
+ // Python
929
+ "__pycache__",
930
+ ".pytest_cache",
931
+ ".mypy_cache",
932
+ ".ruff_cache",
933
+ ".venv",
934
+ "venv",
935
+ ".tox",
936
+ ".nox",
937
+ ".eggs",
938
+ "*.egg-info",
939
+ // Build outputs
940
+ "dist",
941
+ "build",
942
+ "out",
943
+ "output",
944
+ "target",
945
+ "_build",
946
+ ".next",
947
+ ".nuxt",
948
+ ".output",
949
+ ".vercel",
950
+ ".netlify",
951
+ // Cache directories
952
+ ".cache",
953
+ ".parcel-cache",
954
+ ".turbo",
955
+ ".nx",
956
+ ".gradle",
957
+ // IDE/Editor
958
+ ".idea",
959
+ ".vscode",
960
+ ".vs",
961
+ // Coverage
962
+ "coverage",
963
+ ".coverage",
964
+ "htmlcov",
965
+ ".nyc_output",
966
+ // Temporary
967
+ "tmp",
968
+ "temp",
969
+ ".tmp",
970
+ ".temp",
971
+ // Lock files
972
+ "package-lock.json",
973
+ "yarn.lock",
974
+ "pnpm-lock.yaml",
975
+ "bun.lockb",
976
+ "Cargo.lock",
977
+ "Gemfile.lock",
978
+ "poetry.lock",
979
+ // Binary/minified
980
+ "*.min.js",
981
+ "*.min.css",
982
+ "*.bundle.js",
983
+ "*.wasm",
984
+ "*.so",
985
+ "*.dll",
986
+ "*.pyc",
987
+ "*.map",
988
+ "*.js.map",
989
+ // Hidden directories catch-all
990
+ ".*"
991
+ ];
992
+ var DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || "").split(",").map((s) => s.trim()).filter(Boolean).concat(BUILTIN_EXCLUDES);
993
+ var DEFAULT_MODEL = "morph-warp-grep";
994
+
995
+ // tools/warp_grep/agent/prompt.ts
996
+ var SYSTEM_PROMPT = `You are a code search agent. Your task is to find all relevant code for a given query.
997
+
998
+ <workflow>
999
+ You have exactly 4 turns. The 4th turn MUST be a \`finish\` call. Each turn allows up to 8 parallel tool calls.
1000
+
1001
+ - Turn 1: Map the territory OR dive deep (based on query specificity)
1002
+ - Turn 2-3: Refine based on findings
1003
+ - Turn 4: MUST call \`finish\` with all relevant code locations
1004
+ - You MAY call \`finish\` early if confident\u2014but never before at least 1 search turn.
1005
+
1006
+ Remember, if the task feels easy to you, it is strongly desirable to call \`finish\` early using fewer turns, but quality over speed.
1007
+ </workflow>
1008
+
1009
+ <tools>
1010
+ ### \`analyse <path> [pattern]\`
1011
+ Directory tree or file search. Shows structure of a path, optionally filtered by regex pattern.
1012
+ - \`path\`: Required. Directory or file path (use \`.\` for repo root)
1013
+ - \`pattern\`: Optional regex to filter results
1014
+
1015
+ Examples:
1016
+ \`\`\`
1017
+ analyse .
1018
+ analyse src/api
1019
+ analyse . ".*\\.ts$"
1020
+ analyse src "test.*"
1021
+ \`\`\`
1022
+
1023
+ ### \`read <path>[:start-end]\`
1024
+ Read file contents. Line range is 1-based, inclusive.
1025
+ - Returns numbered lines for easy reference
1026
+ - Omit range to read entire file
1027
+
1028
+ Examples:
1029
+ \`\`\`
1030
+ read src/main.py
1031
+ read src/db/conn.py:10-50
1032
+ read package.json:1-20
1033
+ \`\`\`
1034
+
1035
+ ### \`grep '<pattern>' <path>\`
1036
+ Ripgrep search. Finds pattern matches across files.
1037
+ - \`'<pattern>'\`: Required. Regex pattern wrapped in single quotes
1038
+ - \`<path>\`: Required. Directory or file to search (use \`.\` for repo root)
1039
+
1040
+ Examples:
1041
+ \`\`\`
1042
+ grep 'class.*Service' src/
1043
+ grep 'def authenticate' .
1044
+ grep 'import.*from' src/components/
1045
+ grep 'TODO' .
1046
+ \`\`\`
1047
+
1048
+ ### \`finish <file1:ranges> [file2:ranges ...]\`
1049
+ Submit final answer with all relevant code locations.
1050
+ - Include generous line ranges\u2014don't be stingy with context
1051
+ - Ranges are comma-separated: \`file.py:10-30,50-60\`
1052
+ - ALWAYS include import statements at the top of files (usually lines 1-20)
1053
+ - If code spans multiple files, include ALL of them
1054
+ - Small files can be returned in full
1055
+
1056
+ Examples:
1057
+ \`\`\`
1058
+ finish src/auth.py:1-15,25-50,75-80 src/models/user.py:1-10,20-45
1059
+ finish src/index.ts:1-100
1060
+ \`\`\`
1061
+ </tools>
1062
+
1063
+ <strategy>
1064
+ **Before your first tool call, classify the query:**
1065
+
1066
+ | Query Type | Turn 1 Strategy | Early Finish? |
1067
+ |------------|-----------------|---------------|
1068
+ | **Specific** (function name, error string, unique identifier) | 8 parallel greps on likely paths | Often by turn 2 |
1069
+ | **Conceptual** (how does X work, where is Y handled) | analyse + 2-3 broad greps | Rarely early |
1070
+ | **Exploratory** (find all tests, list API endpoints) | analyse at multiple depths | Usually needs 3 turns |
1071
+
1072
+ **Parallel call patterns:**
1073
+ - **Shotgun grep**: Same pattern, 8 different directories\u2014fast coverage
1074
+ - **Variant grep**: 8 pattern variations (synonyms, naming conventions)\u2014catches inconsistent codebases
1075
+ - **Funnel**: 1 analyse + 7 greps\u2014orient and search simultaneously
1076
+ - **Deep read**: 8 reads on files you already identified\u2014gather full context fast
1077
+ </strategy>
1078
+
1079
+ <output_format>
1080
+ EVERY response MUST follow this exact format:
1081
+
1082
+ 1. First, wrap your reasoning in \`<think>...</think>\` tags containing:
1083
+ - Query classification (specific/conceptual/exploratory)
1084
+ - Confidence estimate (can I finish in 1-2 turns?)
1085
+ - This turn's parallel strategy
1086
+ - What signals would let me finish early?
1087
+
1088
+ 2. Then, output tool calls wrapped in \`<tool_call>...</tool_call>\` tags, one per line.
1089
+
1090
+ Example:
1091
+ \`\`\`
1092
+ <think>
1093
+ This is a specific query about authentication. I'll grep for auth-related patterns.
1094
+ High confidence I can finish in 2 turns if I find the auth module.
1095
+ Strategy: Shotgun grep across likely directories.
1096
+ </think>
1097
+ <tool_call>grep 'authenticate' src/</tool_call>
1098
+ <tool_call>grep 'login' src/</tool_call>
1099
+ <tool_call>analyse src/auth</tool_call>
1100
+ \`\`\`
1101
+
1102
+ No commentary outside \`<think>\`. No explanations after tool calls.
1103
+ </output_format>
1104
+
1105
+ <finishing_requirements>
1106
+ When calling \`finish\`:
1107
+ - Include the import section (typically lines 1-20) of each file
1108
+ - Include all function/class definitions that are relevant
1109
+ - Include any type definitions, interfaces, or constants used
1110
+ - Better to over-include than leave the user missing context
1111
+ - If unsure about boundaries, include more rather than less
1112
+ </finishing_requirements>
1113
+
1114
+ Begin your exploration now to find code relevant to the query.`;
1115
+ function getSystemPrompt() {
1116
+ return SYSTEM_PROMPT;
1117
+ }
1118
+
1119
+ // tools/warp_grep/agent/parser.ts
1120
+ var LLMResponseParseError = class extends Error {
1121
+ constructor(message) {
1122
+ super(message);
1123
+ this.name = "LLMResponseParseError";
1124
+ }
1125
+ };
1126
+ var VALID_COMMANDS = ["analyse", "grep", "read", "finish"];
1127
+ function preprocessText(text) {
1128
+ let processed = text.replace(/<think>[\s\S]*?<\/think>/gi, "");
1129
+ const openingTagRegex = /<tool_call>|<tool>/gi;
1130
+ const closingTagRegex = /<\/tool_call>|<\/tool>/gi;
1131
+ const openingMatches = processed.match(openingTagRegex) || [];
1132
+ const closingMatches = processed.match(closingTagRegex) || [];
1133
+ if (openingMatches.length > closingMatches.length) {
1134
+ const lastClosingMatch = /<\/tool_call>|<\/tool>/gi;
1135
+ let lastClosingIndex = -1;
1136
+ let match;
1137
+ while ((match = lastClosingMatch.exec(processed)) !== null) {
1138
+ lastClosingIndex = match.index + match[0].length;
1139
+ }
1140
+ if (lastClosingIndex > 0) {
1141
+ processed = processed.slice(0, lastClosingIndex);
1142
+ }
1143
+ }
1144
+ const toolCallLines = [];
1145
+ const toolTagRegex = /<tool_call>([\s\S]*?)<\/tool_call>|<tool>([\s\S]*?)<\/tool>/gi;
1146
+ let tagMatch;
1147
+ while ((tagMatch = toolTagRegex.exec(processed)) !== null) {
1148
+ const content = (tagMatch[1] || tagMatch[2] || "").trim();
1149
+ if (content) {
1150
+ const lines = content.split(/\r?\n/).map((l) => l.trim()).filter((l) => l);
1151
+ toolCallLines.push(...lines);
1152
+ }
1153
+ }
1154
+ const allLines = processed.split(/\r?\n/).map((l) => l.trim());
1155
+ for (const line of allLines) {
1156
+ if (!line) continue;
1157
+ if (line.startsWith("<")) continue;
1158
+ const firstWord = line.split(/\s/)[0];
1159
+ if (VALID_COMMANDS.includes(firstWord)) {
1160
+ if (!toolCallLines.includes(line)) {
1161
+ toolCallLines.push(line);
1162
+ }
1163
+ }
1164
+ }
1165
+ return toolCallLines;
1166
+ }
1167
+ var LLMResponseParser = class {
1168
+ finishSpecSplitRe = /,(?=[^,\s]+:)/;
1169
+ parse(text) {
1170
+ if (typeof text !== "string") {
1171
+ throw new TypeError("Command text must be a string.");
1172
+ }
1173
+ const lines = preprocessText(text);
1174
+ const commands = [];
1175
+ let finishAccumulator = null;
1176
+ lines.forEach((line, idx) => {
1177
+ if (!line || line.startsWith("#")) return;
1178
+ const ctx = { lineNumber: idx + 1, raw: line };
1179
+ const parts = this.splitLine(line, ctx);
1180
+ if (parts.length === 0) return;
1181
+ const cmd = parts[0];
1182
+ switch (cmd) {
1183
+ case "analyse":
1184
+ this.handleAnalyse(parts, ctx, commands);
1185
+ break;
1186
+ case "grep":
1187
+ this.handleGrep(parts, ctx, commands);
1188
+ break;
1189
+ case "read":
1190
+ this.handleRead(parts, ctx, commands);
1191
+ break;
1192
+ case "finish":
1193
+ finishAccumulator = this.handleFinish(parts, ctx, finishAccumulator);
1194
+ break;
1195
+ default:
1196
+ break;
1197
+ }
1198
+ });
1199
+ if (finishAccumulator) {
1200
+ const map = finishAccumulator;
1201
+ const entries = [...map.entries()];
1202
+ const filesPayload = entries.map(([path4, ranges]) => ({
1203
+ path: path4,
1204
+ lines: [...ranges].sort((a, b) => a[0] - b[0])
1205
+ }));
1206
+ commands.push({ name: "finish", arguments: { files: filesPayload } });
1207
+ }
1208
+ return commands;
1209
+ }
1210
+ splitLine(line, ctx) {
1211
+ try {
1212
+ const parts = [];
1213
+ let current = "";
1214
+ let inSingle = false;
1215
+ for (let i = 0; i < line.length; i++) {
1216
+ const ch = line[i];
1217
+ if (ch === "'" && line[i - 1] !== "\\") {
1218
+ inSingle = !inSingle;
1219
+ current += ch;
1220
+ } else if (!inSingle && /\s/.test(ch)) {
1221
+ if (current) {
1222
+ parts.push(current);
1223
+ current = "";
1224
+ }
1225
+ } else {
1226
+ current += ch;
1227
+ }
1228
+ }
1229
+ if (current) parts.push(current);
1230
+ return parts;
1231
+ } catch {
1232
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unable to parse line.`);
1233
+ }
1234
+ }
1235
+ handleAnalyse(parts, ctx, commands) {
1236
+ if (parts.length < 2) {
1237
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: analyse requires <path>`);
1238
+ }
1239
+ const path4 = parts[1];
1240
+ const pattern = parts[2]?.replace(/^"|"$/g, "") ?? null;
1241
+ commands.push({ name: "analyse", arguments: { path: path4, pattern } });
1242
+ }
1243
+ // no glob tool in MCP
1244
+ handleGrep(parts, ctx, commands) {
1245
+ if (parts.length < 3) {
1246
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep requires '<pattern>' and <path>`);
1247
+ }
1248
+ const pat = parts[1];
1249
+ if (!pat.startsWith("'") || !pat.endsWith("'")) {
1250
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep pattern must be single-quoted`);
1251
+ }
1252
+ commands.push({ name: "grep", arguments: { pattern: pat.slice(1, -1), path: parts[2] } });
1253
+ }
1254
+ handleRead(parts, ctx, commands) {
1255
+ if (parts.length < 2) {
1256
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: read requires <path> or <path>:<start-end>`);
1257
+ }
1258
+ const spec = parts[1];
1259
+ const rangeIdx = spec.indexOf(":");
1260
+ if (rangeIdx === -1) {
1261
+ commands.push({ name: "read", arguments: { path: spec } });
1262
+ return;
1263
+ }
1264
+ const path4 = spec.slice(0, rangeIdx);
1265
+ const range = spec.slice(rangeIdx + 1);
1266
+ const [s, e] = range.split("-").map((v) => parseInt(v, 10));
1267
+ if (!Number.isFinite(s) || !Number.isFinite(e)) {
1268
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid read range '${range}'`);
1269
+ }
1270
+ commands.push({ name: "read", arguments: { path: path4, start: s, end: e } });
1271
+ }
1272
+ handleFinish(parts, ctx, acc) {
1273
+ const map = acc ?? /* @__PURE__ */ new Map();
1274
+ const args = parts.slice(1);
1275
+ for (const token of args) {
1276
+ const [path4, rangesText] = token.split(":", 2);
1277
+ if (!path4 || !rangesText) {
1278
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid finish token '${token}'`);
1279
+ }
1280
+ const rangeSpecs = rangesText.split(",").filter(Boolean);
1281
+ for (const spec of rangeSpecs) {
1282
+ const [s, e] = spec.split("-").map((v) => parseInt(v, 10));
1283
+ if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {
1284
+ throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid range '${spec}'`);
1285
+ }
1286
+ const arr = map.get(path4) ?? [];
1287
+ arr.push([s, e]);
1288
+ map.set(path4, arr);
1289
+ }
1290
+ }
1291
+ return map;
1292
+ }
1293
+ };
1294
+
1295
+ // tools/warp_grep/tools/read.ts
1296
+ async function toolRead(provider, args) {
1297
+ const res = await provider.read({ path: args.path, start: args.start, end: args.end });
1298
+ return res.lines.join("\n");
1299
+ }
1300
+
1301
+ // tools/warp_grep/tools/analyse.ts
1302
+ async function toolAnalyse(provider, args) {
1303
+ const list = await provider.analyse({
1304
+ path: args.path,
1305
+ pattern: args.pattern ?? null,
1306
+ maxResults: args.maxResults ?? 100,
1307
+ maxDepth: args.maxDepth ?? 2
1308
+ });
1309
+ if (!list.length) return "empty";
1310
+ return list.map((e) => `${" ".repeat(e.depth)}- ${e.type === "dir" ? "[D]" : "[F]"} ${e.name}`).join("\n");
1311
+ }
1312
+
1313
+ // tools/warp_grep/agent/formatter.ts
1314
+ var ToolOutputFormatter = class {
1315
+ format(toolName, args, output, options = {}) {
1316
+ const name = (toolName ?? "").trim();
1317
+ if (!name) {
1318
+ return "";
1319
+ }
1320
+ const payload = output?.toString?.()?.trim?.() ?? "";
1321
+ const isError = Boolean(options.isError);
1322
+ const safeArgs = args ?? {};
1323
+ if (!payload && !isError) {
1324
+ return "";
1325
+ }
1326
+ switch (name) {
1327
+ case "read":
1328
+ return this.formatRead(safeArgs, payload, isError);
1329
+ case "analyse":
1330
+ return this.formatAnalyse(safeArgs, payload, isError);
1331
+ case "grep":
1332
+ return this.formatGrep(safeArgs, payload, isError);
1333
+ default:
1334
+ return payload ? `<tool_output>
1335
+ ${payload}
1336
+ </tool_output>` : "";
1337
+ }
1338
+ }
1339
+ formatRead(args, payload, isError) {
1340
+ if (isError) {
1341
+ return payload;
1342
+ }
1343
+ const path4 = this.asString(args.path) || "...";
1344
+ return `<file path="${path4}">
1345
+ ${payload}
1346
+ </file>`;
1347
+ }
1348
+ formatAnalyse(args, payload, isError) {
1349
+ const path4 = this.asString(args.path) || ".";
1350
+ if (isError) {
1351
+ return `<analyse_results path="${path4}" status="error">
1352
+ ${payload}
1353
+ </analyse_results>`;
1354
+ }
1355
+ return `<analyse_results path="${path4}">
1356
+ ${payload}
1357
+ </analyse_results>`;
1358
+ }
1359
+ formatGrep(args, payload, isError) {
1360
+ const pattern = this.asString(args.pattern);
1361
+ const path4 = this.asString(args.path);
1362
+ const attributes = [];
1363
+ if (pattern !== void 0) {
1364
+ attributes.push(`pattern="${pattern}"`);
1365
+ }
1366
+ if (path4 !== void 0) {
1367
+ attributes.push(`path="${path4}"`);
1368
+ }
1369
+ if (isError) {
1370
+ attributes.push('status="error"');
1371
+ }
1372
+ const attrText = attributes.length ? ` ${attributes.join(" ")}` : "";
1373
+ return `<grep_output${attrText}>
1374
+ ${payload}
1375
+ </grep_output>`;
1376
+ }
1377
+ asString(value) {
1378
+ if (value === null || value === void 0) {
1379
+ return void 0;
1380
+ }
1381
+ return String(value);
1382
+ }
1383
+ };
1384
+ var sharedFormatter = new ToolOutputFormatter();
1385
+ function formatAgentToolOutput(toolName, args, output, options = {}) {
1386
+ return sharedFormatter.format(toolName, args, output, options);
1387
+ }
1388
+
1389
+ // tools/warp_grep/agent/grep_helpers.ts
1390
+ var GrepState = class {
1391
+ seenLines = /* @__PURE__ */ new Set();
1392
+ isNew(path4, lineNumber) {
1393
+ const key = this.makeKey(path4, lineNumber);
1394
+ return !this.seenLines.has(key);
1395
+ }
1396
+ add(path4, lineNumber) {
1397
+ this.seenLines.add(this.makeKey(path4, lineNumber));
1398
+ }
1399
+ makeKey(path4, lineNumber) {
1400
+ return `${path4}:${lineNumber}`;
1401
+ }
1402
+ };
1403
+ var MAX_GREP_OUTPUT_CHARS_PER_TURN = 6e4;
1404
+ function extractMatchFields(payload) {
1405
+ const text = payload.replace(/\r?\n$/, "");
1406
+ if (!text || text.startsWith("[error]")) {
1407
+ return null;
1408
+ }
1409
+ const firstSep = text.indexOf(":");
1410
+ if (firstSep === -1) {
1411
+ return null;
1412
+ }
1413
+ let filePath = text.slice(0, firstSep).trim();
1414
+ if (!filePath) {
1415
+ return null;
1416
+ }
1417
+ if (filePath.startsWith("./") || filePath.startsWith(".\\")) {
1418
+ filePath = filePath.slice(2);
1419
+ }
1420
+ const remainder = text.slice(firstSep + 1);
1421
+ const secondSep = remainder.indexOf(":");
1422
+ if (secondSep === -1) {
1423
+ return null;
1424
+ }
1425
+ const linePart = remainder.slice(0, secondSep);
1426
+ const lineNumber = Number.parseInt(linePart, 10);
1427
+ if (!Number.isInteger(lineNumber) || lineNumber <= 0) {
1428
+ return null;
1429
+ }
1430
+ let contentSegment = remainder.slice(secondSep + 1);
1431
+ const columnSep = contentSegment.indexOf(":");
1432
+ if (columnSep !== -1 && /^\d+$/.test(contentSegment.slice(0, columnSep))) {
1433
+ contentSegment = contentSegment.slice(columnSep + 1);
1434
+ }
1435
+ const content = contentSegment.trim();
1436
+ if (!content) {
1437
+ return null;
1438
+ }
1439
+ return { path: filePath, lineNumber, content };
1440
+ }
1441
+ function parseAndFilterGrepOutput(rawOutput, state) {
1442
+ const matches = [];
1443
+ if (typeof rawOutput !== "string" || !rawOutput.trim()) {
1444
+ return matches;
1445
+ }
1446
+ for (const line of rawOutput.split(/\r?\n/)) {
1447
+ const fields = extractMatchFields(line);
1448
+ if (!fields) {
1449
+ continue;
1450
+ }
1451
+ if (state.isNew(fields.path, fields.lineNumber)) {
1452
+ matches.push(fields);
1453
+ state.add(fields.path, fields.lineNumber);
1454
+ }
1455
+ }
1456
+ return matches;
1457
+ }
1458
+ function truncateOutput(payload, maxChars) {
1459
+ if (payload.length <= maxChars) {
1460
+ return payload;
1461
+ }
1462
+ const note = "... (output truncated)";
1463
+ const available = maxChars - note.length - 1;
1464
+ if (available <= 0) {
1465
+ return note;
1466
+ }
1467
+ if (payload.length <= available) {
1468
+ return `${payload.slice(0, available).replace(/\n$/, "")}
1469
+ ${note}`;
1470
+ }
1471
+ const core = payload.slice(0, Math.max(0, available - 1));
1472
+ const trimmed = core.replace(/\n$/, "").replace(/\s+$/, "");
1473
+ const snippet = trimmed ? `${trimmed}\u2026` : "\u2026";
1474
+ return `${snippet}
1475
+ ${note}`;
1476
+ }
1477
+ function formatTurnGrepOutput(matches, maxChars = MAX_GREP_OUTPUT_CHARS_PER_TURN) {
1478
+ if (!matches || matches.length === 0) {
1479
+ return "No new matches found.";
1480
+ }
1481
+ const matchesByFile = /* @__PURE__ */ new Map();
1482
+ for (const match of matches) {
1483
+ if (!matchesByFile.has(match.path)) {
1484
+ matchesByFile.set(match.path, []);
1485
+ }
1486
+ matchesByFile.get(match.path).push(match);
1487
+ }
1488
+ const lines = [];
1489
+ const sortedPaths = Array.from(matchesByFile.keys()).sort();
1490
+ sortedPaths.forEach((filePath, index) => {
1491
+ if (index > 0) {
1492
+ lines.push("");
1493
+ }
1494
+ lines.push(filePath);
1495
+ const sortedMatches = matchesByFile.get(filePath).slice().sort((a, b) => a.lineNumber - b.lineNumber);
1496
+ for (const match of sortedMatches) {
1497
+ lines.push(`${match.lineNumber}:${match.content}`);
1498
+ }
1499
+ });
1500
+ return truncateOutput(lines.join("\n"), maxChars);
1501
+ }
1502
+
1503
+ // tools/warp_grep/tools/finish.ts
1504
+ async function readFinishFiles(repoRoot, files, reader) {
1505
+ const out = [];
1506
+ for (const f of files) {
1507
+ const ranges = mergeRanges(f.lines);
1508
+ const chunks = [];
1509
+ for (const [s, e] of ranges) {
1510
+ const lines = await reader(f.path, s, e);
1511
+ chunks.push(lines.join("\n"));
1512
+ }
1513
+ out.push({ path: f.path, ranges, content: chunks.join("\n") });
1514
+ }
1515
+ return out;
1516
+ }
1517
+ function mergeRanges(ranges) {
1518
+ if (!ranges.length) return [];
1519
+ const sorted = [...ranges].sort((a, b) => a[0] - b[0]);
1520
+ const merged = [];
1521
+ let [cs, ce] = sorted[0];
1522
+ for (let i = 1; i < sorted.length; i++) {
1523
+ const [s, e] = sorted[i];
1524
+ if (s <= ce + 1) {
1525
+ ce = Math.max(ce, e);
1526
+ } else {
1527
+ merged.push([cs, ce]);
1528
+ cs = s;
1529
+ ce = e;
1530
+ }
1531
+ }
1532
+ merged.push([cs, ce]);
1533
+ return merged;
1534
+ }
1535
+
1536
+ // tools/warp_grep/agent/runner.ts
1537
+ var import_path2 = __toESM(require("path"), 1);
1538
+ var import_promises2 = __toESM(require("fs/promises"), 1);
1539
+ var parser = new LLMResponseParser();
1540
+ async function buildInitialState(repoRoot, query) {
1541
+ try {
1542
+ const entries = await import_promises2.default.readdir(repoRoot, { withFileTypes: true });
1543
+ const dirs = entries.filter((e) => e.isDirectory()).map((d) => d.name).slice(0, 50);
1544
+ const files = entries.filter((e) => e.isFile()).map((f) => f.name).slice(0, 50);
1545
+ const parts = [
1546
+ `<repo_root>${repoRoot}</repo_root>`,
1547
+ `<top_dirs>${dirs.join(", ")}</top_dirs>`,
1548
+ `<top_files>${files.join(", ")}</top_files>`
1549
+ ];
1550
+ return parts.join("\n");
1551
+ } catch {
1552
+ return `<repo_root>${repoRoot}</repo_root>`;
1553
+ }
1554
+ }
1555
+ async function callModel(messages, model, apiKey) {
1556
+ const api = "https://api.morphllm.com/v1/chat/completions";
1557
+ const fetchPromise = fetchWithRetry(
1558
+ api,
1559
+ {
1560
+ method: "POST",
1561
+ headers: {
1562
+ "Content-Type": "application/json",
1563
+ Authorization: `Bearer ${apiKey || process.env.MORPH_API_KEY || ""}`
1564
+ },
1565
+ body: JSON.stringify({
1566
+ model,
1567
+ temperature: 0,
1568
+ max_tokens: 1024,
1569
+ messages
1570
+ })
1571
+ },
1572
+ {}
1573
+ );
1574
+ const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, "morph-warp-grep request timed out");
1575
+ if (!resp.ok) {
1576
+ const t = await resp.text();
1577
+ throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);
1578
+ }
1579
+ const data = await resp.json();
1580
+ const content = data?.choices?.[0]?.message?.content;
1581
+ if (!content || typeof content !== "string") {
1582
+ throw new Error("Invalid response from model");
1583
+ }
1584
+ return content;
1585
+ }
1586
+ async function runWarpGrep(config) {
1587
+ const repoRoot = import_path2.default.resolve(config.repoRoot || process.cwd());
1588
+ const messages = [];
1589
+ const systemMessage = { role: "system", content: getSystemPrompt() };
1590
+ messages.push(systemMessage);
1591
+ const queryContent = `<query>${config.query}</query>`;
1592
+ messages.push({ role: "user", content: queryContent });
1593
+ const initialState = await buildInitialState(repoRoot, config.query);
1594
+ messages.push({ role: "user", content: initialState });
1595
+ const maxRounds = AGENT_CONFIG.MAX_ROUNDS;
1596
+ const model = config.model || DEFAULT_MODEL;
1597
+ const provider = config.provider;
1598
+ const errors = [];
1599
+ const grepState = new GrepState();
1600
+ let finishMeta;
1601
+ let terminationReason = "terminated";
1602
+ for (let round = 1; round <= maxRounds; round += 1) {
1603
+ const assistantContent = await callModel(messages, model, config.apiKey).catch((e) => {
1604
+ errors.push({ message: e instanceof Error ? e.message : String(e) });
1605
+ return "";
1606
+ });
1607
+ if (!assistantContent) break;
1608
+ messages.push({ role: "assistant", content: assistantContent });
1609
+ let toolCalls = [];
1610
+ try {
1611
+ toolCalls = parser.parse(assistantContent);
1612
+ } catch (e) {
1613
+ errors.push({ message: e instanceof Error ? e.message : String(e) });
1614
+ terminationReason = "terminated";
1615
+ break;
1616
+ }
1617
+ if (toolCalls.length === 0) {
1618
+ errors.push({ message: "No tool calls produced by the model." });
1619
+ terminationReason = "terminated";
1620
+ break;
1621
+ }
1622
+ const finishCalls = toolCalls.filter((c) => c.name === "finish");
1623
+ const grepCalls = toolCalls.filter((c) => c.name === "grep");
1624
+ const analyseCalls = toolCalls.filter((c) => c.name === "analyse");
1625
+ const readCalls = toolCalls.filter((c) => c.name === "read");
1626
+ const formatted = [];
1627
+ const otherPromises = [];
1628
+ for (const c of analyseCalls) {
1629
+ const args = c.arguments ?? {};
1630
+ otherPromises.push(
1631
+ toolAnalyse(provider, args).then(
1632
+ (p) => formatAgentToolOutput("analyse", args, p, { isError: false }),
1633
+ (err) => formatAgentToolOutput("analyse", args, String(err), { isError: true })
1634
+ )
1635
+ );
1636
+ }
1637
+ for (const c of readCalls) {
1638
+ const args = c.arguments ?? {};
1639
+ otherPromises.push(
1640
+ toolRead(provider, args).then(
1641
+ (p) => formatAgentToolOutput("read", args, p, { isError: false }),
1642
+ (err) => formatAgentToolOutput("read", args, String(err), { isError: true })
1643
+ )
1644
+ );
1645
+ }
1646
+ const otherResults = await Promise.all(otherPromises);
1647
+ formatted.push(...otherResults);
1648
+ for (const c of grepCalls) {
1649
+ const args = c.arguments ?? {};
1650
+ try {
1651
+ const grepRes = await provider.grep({ pattern: args.pattern, path: args.path });
1652
+ const rawOutput = Array.isArray(grepRes.lines) ? grepRes.lines.join("\n") : "";
1653
+ const newMatches = parseAndFilterGrepOutput(rawOutput, grepState);
1654
+ let formattedPayload = formatTurnGrepOutput(newMatches);
1655
+ if (formattedPayload === "No new matches found.") {
1656
+ formattedPayload = "no new matches";
1657
+ }
1658
+ formatted.push(formatAgentToolOutput("grep", args, formattedPayload, { isError: false }));
1659
+ } catch (err) {
1660
+ formatted.push(formatAgentToolOutput("grep", args, String(err), { isError: true }));
1661
+ }
1662
+ }
1663
+ if (formatted.length > 0) {
1664
+ const turnsUsed = round;
1665
+ const turnsRemaining = 4 - turnsUsed;
1666
+ let turnMessage;
1667
+ if (turnsRemaining === 0) {
1668
+ turnMessage = `
1669
+
1670
+ [Turn ${turnsUsed}/4] This is your LAST turn. You MUST call the finish tool now.`;
1671
+ } else if (turnsRemaining === 1) {
1672
+ turnMessage = `
1673
+
1674
+ [Turn ${turnsUsed}/4] You have 1 turn remaining. Next turn you MUST call the finish tool.`;
1675
+ } else {
1676
+ turnMessage = `
1677
+
1678
+ [Turn ${turnsUsed}/4] You have ${turnsRemaining} turns remaining.`;
1679
+ }
1680
+ messages.push({ role: "user", content: formatted.join("\n") + turnMessage });
1681
+ }
1682
+ if (finishCalls.length) {
1683
+ const fc = finishCalls[0];
1684
+ const files = fc.arguments?.files ?? [];
1685
+ finishMeta = { files };
1686
+ terminationReason = "completed";
1687
+ break;
1688
+ }
1689
+ }
1690
+ if (terminationReason !== "completed" || !finishMeta) {
1691
+ return { terminationReason, messages, errors };
1692
+ }
1693
+ const parts = ["Relevant context found:"];
1694
+ for (const f of finishMeta.files) {
1695
+ const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(", ");
1696
+ parts.push(`- ${f.path}: ${ranges}`);
1697
+ }
1698
+ const payload = parts.join("\n");
1699
+ const resolved = await readFinishFiles(
1700
+ repoRoot,
1701
+ finishMeta.files,
1702
+ async (p, s, e) => {
1703
+ const rr = await provider.read({ path: p, start: s, end: e });
1704
+ return rr.lines.map((l) => {
1705
+ const idx = l.indexOf("|");
1706
+ return idx >= 0 ? l.slice(idx + 1) : l;
1707
+ });
1708
+ }
1709
+ );
1710
+ return {
1711
+ terminationReason: "completed",
1712
+ messages,
1713
+ finish: { payload, metadata: finishMeta, resolved }
1714
+ };
1715
+ }
1716
+
1717
+ // tools/warp_grep/providers/local.ts
1718
+ var import_promises4 = __toESM(require("fs/promises"), 1);
1719
+ var import_path4 = __toESM(require("path"), 1);
1720
+
1721
+ // tools/warp_grep/utils/ripgrep.ts
1722
+ var import_child_process = require("child_process");
1723
+ function runRipgrep(args, opts) {
1724
+ return new Promise((resolve2) => {
1725
+ const child = (0, import_child_process.spawn)("rg", args, {
1726
+ cwd: opts?.cwd,
1727
+ env: { ...process.env, ...opts?.env || {} },
1728
+ stdio: ["ignore", "pipe", "pipe"]
1729
+ });
1730
+ let stdout = "";
1731
+ let stderr = "";
1732
+ child.stdout.on("data", (d) => stdout += d.toString());
1733
+ child.stderr.on("data", (d) => stderr += d.toString());
1734
+ child.on("close", (code) => {
1735
+ resolve2({ stdout, stderr, exitCode: typeof code === "number" ? code : -1 });
1736
+ });
1737
+ child.on("error", () => {
1738
+ resolve2({ stdout: "", stderr: "Failed to spawn ripgrep (rg). Ensure it is installed.", exitCode: -1 });
1739
+ });
1740
+ });
1741
+ }
1742
+
1743
+ // tools/warp_grep/utils/paths.ts
1744
+ var import_fs = __toESM(require("fs"), 1);
1745
+ var import_path3 = __toESM(require("path"), 1);
1746
+ function resolveUnderRepo(repoRoot, targetPath) {
1747
+ const absRoot = import_path3.default.resolve(repoRoot);
1748
+ const resolved = import_path3.default.resolve(absRoot, targetPath);
1749
+ ensureWithinRepo(absRoot, resolved);
1750
+ return resolved;
1751
+ }
1752
+ function ensureWithinRepo(repoRoot, absTarget) {
1753
+ const rel = import_path3.default.relative(import_path3.default.resolve(repoRoot), import_path3.default.resolve(absTarget));
1754
+ if (rel.startsWith("..") || import_path3.default.isAbsolute(rel)) {
1755
+ throw new Error(`Path outside repository root: ${absTarget}`);
1756
+ }
1757
+ }
1758
+ function toRepoRelative(repoRoot, absPath) {
1759
+ return import_path3.default.relative(import_path3.default.resolve(repoRoot), import_path3.default.resolve(absPath));
1760
+ }
1761
+ function isSymlink(p) {
1762
+ try {
1763
+ const st = import_fs.default.lstatSync(p);
1764
+ return st.isSymbolicLink();
1765
+ } catch {
1766
+ return false;
1767
+ }
1768
+ }
1769
+ function isTextualFile(filePath, maxBytes = 2e6) {
1770
+ try {
1771
+ const st = import_fs.default.statSync(filePath);
1772
+ if (!st.isFile()) return false;
1773
+ if (st.size > maxBytes) return false;
1774
+ const fd = import_fs.default.openSync(filePath, "r");
1775
+ const buf = Buffer.alloc(512);
1776
+ const read = import_fs.default.readSync(fd, buf, 0, buf.length, 0);
1777
+ import_fs.default.closeSync(fd);
1778
+ for (let i = 0; i < read; i++) {
1779
+ const c = buf[i];
1780
+ if (c === 0) return false;
1781
+ }
1782
+ return true;
1783
+ } catch {
1784
+ return false;
1785
+ }
1786
+ }
1787
+
1788
+ // tools/warp_grep/utils/files.ts
1789
+ var import_promises3 = __toESM(require("fs/promises"), 1);
1790
+ async function readAllLines(filePath) {
1791
+ const content = await import_promises3.default.readFile(filePath, "utf8");
1792
+ return content.split(/\r?\n/);
1793
+ }
1794
+
1795
+ // tools/warp_grep/providers/local.ts
1796
+ var LocalRipgrepProvider = class {
1797
+ constructor(repoRoot, excludes = DEFAULT_EXCLUDES) {
1798
+ this.repoRoot = repoRoot;
1799
+ this.excludes = excludes;
1800
+ }
1801
+ async grep(params) {
1802
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1803
+ const stat = await import_promises4.default.stat(abs).catch(() => null);
1804
+ if (!stat) return { lines: [] };
1805
+ const targetArg = abs === import_path4.default.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
1806
+ const args = [
1807
+ "--no-config",
1808
+ "--no-heading",
1809
+ "--with-filename",
1810
+ "--line-number",
1811
+ "--color=never",
1812
+ "--trim",
1813
+ "--max-columns=400",
1814
+ ...this.excludes.flatMap((e) => ["-g", `!${e}`]),
1815
+ params.pattern,
1816
+ targetArg || "."
1817
+ ];
1818
+ const res = await runRipgrep(args, { cwd: this.repoRoot });
1819
+ if (res.exitCode === -1) {
1820
+ throw new Error(res.stderr || "ripgrep (rg) execution failed.");
1821
+ }
1822
+ if (res.exitCode !== 0 && res.exitCode !== 1) {
1823
+ throw new Error(res.stderr || `ripgrep failed with code ${res.exitCode}`);
1824
+ }
1825
+ const lines = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
1826
+ return { lines };
1827
+ }
1828
+ async glob(params) {
1829
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1830
+ const targetArg = abs === import_path4.default.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
1831
+ const args = [
1832
+ "--no-config",
1833
+ "--files",
1834
+ "-g",
1835
+ params.pattern,
1836
+ ...this.excludes.flatMap((e) => ["-g", `!${e}`]),
1837
+ targetArg || "."
1838
+ ];
1839
+ const res = await runRipgrep(args, { cwd: this.repoRoot });
1840
+ if (res.exitCode === -1) {
1841
+ throw new Error(res.stderr || "ripgrep (rg) execution failed.");
1842
+ }
1843
+ const files = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
1844
+ return { files };
1845
+ }
1846
+ async read(params) {
1847
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1848
+ const stat = await import_promises4.default.stat(abs).catch(() => null);
1849
+ if (!stat || !stat.isFile()) {
1850
+ throw new Error(`Path is not a file: ${params.path}`);
1851
+ }
1852
+ if (isSymlink(abs)) {
1853
+ throw new Error(`Refusing to read symlink: ${params.path}`);
1854
+ }
1855
+ if (!isTextualFile(abs)) {
1856
+ throw new Error(`Non-text or too-large file: ${params.path}`);
1857
+ }
1858
+ const lines = await readAllLines(abs);
1859
+ const total = lines.length;
1860
+ const s = params.start ?? 1;
1861
+ const e = Math.min(params.end ?? total, total);
1862
+ if (s > total && total > 0) {
1863
+ throw new Error(`start ${s} exceeds file length (${total})`);
1864
+ }
1865
+ const out = [];
1866
+ for (let i = s; i <= e; i += 1) {
1867
+ const content = lines[i - 1] ?? "";
1868
+ out.push(`${i}|${content}`);
1869
+ }
1870
+ return { lines: out };
1871
+ }
1872
+ async analyse(params) {
1873
+ const abs = resolveUnderRepo(this.repoRoot, params.path);
1874
+ const stat = await import_promises4.default.stat(abs).catch(() => null);
1875
+ if (!stat || !stat.isDirectory()) {
1876
+ return [];
1877
+ }
1878
+ const maxResults = params.maxResults ?? 100;
1879
+ const maxDepth = params.maxDepth ?? 2;
1880
+ const regex = params.pattern ? new RegExp(params.pattern) : null;
1881
+ const results = [];
1882
+ async function walk(dir, depth) {
1883
+ if (depth > maxDepth || results.length >= maxResults) return;
1884
+ const entries = await import_promises4.default.readdir(dir, { withFileTypes: true });
1885
+ for (const entry of entries) {
1886
+ const full = import_path4.default.join(dir, entry.name);
1887
+ const rel = toRepoRelative(abs, full).replace(/^[.][/\\]?/, "");
1888
+ if (DEFAULT_EXCLUDES.some((ex) => rel.split(import_path4.default.sep).includes(ex))) continue;
1889
+ if (regex && !regex.test(entry.name)) continue;
1890
+ if (results.length >= maxResults) break;
1891
+ results.push({
1892
+ name: entry.name,
1893
+ path: toRepoRelative(import_path4.default.resolve(""), full),
1894
+ // relative display
1895
+ type: entry.isDirectory() ? "dir" : "file",
1896
+ depth
1897
+ });
1898
+ if (entry.isDirectory()) {
1899
+ await walk(full, depth + 1);
1900
+ }
1901
+ }
1902
+ }
1903
+ await walk(abs, 0);
1904
+ return results;
1905
+ }
1906
+ };
1907
+
1908
+ // tools/warp_grep/core.ts
1909
+ var WarpGrepClient = class {
1910
+ config;
1911
+ constructor(config = {}) {
1912
+ this.config = {
1913
+ apiKey: config.apiKey,
1914
+ debug: config.debug,
1915
+ timeout: config.timeout,
1916
+ retryConfig: config.retryConfig
1917
+ };
1918
+ }
1919
+ /**
1920
+ * Execute a code search query
1921
+ *
1922
+ * @param input - Search parameters including query, repoRoot, and optional provider
1923
+ * @returns Search results with relevant code contexts
1924
+ *
1925
+ * @example
1926
+ * ```typescript
1927
+ * const result = await client.execute({
1928
+ * query: 'Find authentication middleware',
1929
+ * repoRoot: '.'
1930
+ * });
1931
+ *
1932
+ * if (result.success) {
1933
+ * for (const ctx of result.contexts) {
1934
+ * console.log(`File: ${ctx.file}`);
1935
+ * console.log(ctx.content);
1936
+ * }
1937
+ * }
1938
+ * ```
1939
+ */
1940
+ async execute(input) {
1941
+ const provider = input.provider ?? new LocalRipgrepProvider(input.repoRoot, input.excludes);
1942
+ const result = await runWarpGrep({
1943
+ query: input.query,
1944
+ repoRoot: input.repoRoot,
1945
+ provider,
1946
+ excludes: input.excludes,
1947
+ includes: input.includes,
1948
+ debug: input.debug ?? this.config.debug ?? false,
1949
+ apiKey: this.config.apiKey
1950
+ });
1951
+ const finish = result.finish;
1952
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
1953
+ return {
1954
+ success: false,
1955
+ error: "Search did not complete"
1956
+ };
1957
+ }
1958
+ const contexts = (finish.resolved ?? []).map((r) => ({
1959
+ file: r.path,
1960
+ content: r.content
1961
+ }));
1962
+ return {
1963
+ success: true,
1964
+ contexts,
1965
+ summary: finish.payload
1966
+ };
1967
+ }
1968
+ };
1969
+ function formatResult(result) {
1970
+ if (!result.success) {
1971
+ return `Search failed: ${result.error}`;
1972
+ }
1973
+ if (!result.contexts || result.contexts.length === 0) {
1974
+ return "No relevant code found. Try rephrasing your query.";
1975
+ }
1976
+ const lines = [];
1977
+ lines.push(`Found ${result.contexts.length} relevant code sections:
1978
+ `);
1979
+ result.contexts.forEach((ctx, i) => {
1980
+ lines.push(`${i + 1}. ${ctx.file}`);
1981
+ lines.push("```");
1982
+ lines.push(ctx.content);
1983
+ lines.push("```");
1984
+ lines.push("");
1985
+ });
1986
+ if (result.summary) {
1987
+ lines.push(`Summary: ${result.summary}`);
1988
+ }
1989
+ return lines.join("\n");
1990
+ }
1991
+
841
1992
  // git/client.ts
842
1993
  var import_isomorphic_git = __toESM(require("isomorphic-git"), 1);
843
1994
  var import_node = __toESM(require("isomorphic-git/http/node"), 1);
844
- var import_fs = __toESM(require("fs"), 1);
1995
+ var import_fs2 = __toESM(require("fs"), 1);
845
1996
  var DEFAULT_PROXY_URL = "https://repos.morphllm.com";
846
1997
  var MorphGit = class {
847
1998
  apiKey;
@@ -898,12 +2049,12 @@ var MorphGit = class {
898
2049
  throw new Error(`Failed to create repository: ${error}`);
899
2050
  }
900
2051
  await import_isomorphic_git.default.init({
901
- fs: import_fs.default,
2052
+ fs: import_fs2.default,
902
2053
  dir,
903
2054
  defaultBranch
904
2055
  });
905
2056
  await import_isomorphic_git.default.addRemote({
906
- fs: import_fs.default,
2057
+ fs: import_fs2.default,
907
2058
  dir,
908
2059
  remote: "origin",
909
2060
  url: `${this.proxyUrl}/v1/repos/${repoId}`
@@ -924,7 +2075,7 @@ var MorphGit = class {
924
2075
  async clone(options) {
925
2076
  const { repoId, dir, branch = "main", depth, singleBranch = true } = options;
926
2077
  await import_isomorphic_git.default.clone({
927
- fs: import_fs.default,
2078
+ fs: import_fs2.default,
928
2079
  http: import_node.default,
929
2080
  dir,
930
2081
  url: `${this.proxyUrl}/v1/repos/${repoId}`,
@@ -941,42 +2092,65 @@ var MorphGit = class {
941
2092
  * ```ts
942
2093
  * await morphGit.push({
943
2094
  * dir: './my-project',
944
- * branch: 'main' // Required: explicit branch name
2095
+ * branch: 'main', // Required: explicit branch name
2096
+ * index: true // Optional: generate embeddings (default: true)
945
2097
  * });
946
2098
  * ```
947
2099
  */
948
2100
  async push(options) {
949
- const { dir, remote = "origin", branch, waitForEmbeddings } = options;
2101
+ const { dir, remote = "origin", branch, waitForEmbeddings, index = true } = options;
950
2102
  if (!branch) {
951
2103
  throw new Error(
952
2104
  'branch is required for push operations. Specify the branch explicitly: { dir: "./my-project", branch: "main" }'
953
2105
  );
954
2106
  }
955
- let commitHash;
2107
+ const commitHash = await import_isomorphic_git.default.resolveRef({ fs: import_fs2.default, dir, ref: "HEAD" });
956
2108
  let repoId;
957
- if (waitForEmbeddings) {
958
- commitHash = await import_isomorphic_git.default.resolveRef({ fs: import_fs.default, dir, ref: "HEAD" });
959
- const remotes = await import_isomorphic_git.default.listRemotes({ fs: import_fs.default, dir });
960
- const originRemote = remotes.find((r) => r.remote === remote);
961
- if (originRemote) {
962
- const match = originRemote.url.match(/\/repos\/([^\/]+)$/);
963
- if (match) {
964
- repoId = match[1];
965
- }
2109
+ const remotes = await import_isomorphic_git.default.listRemotes({ fs: import_fs2.default, dir });
2110
+ const originRemote = remotes.find((r) => r.remote === remote);
2111
+ if (originRemote) {
2112
+ const match = originRemote.url.match(/\/repos\/([^\/]+)$/);
2113
+ if (match) {
2114
+ repoId = match[1];
966
2115
  }
967
2116
  }
968
2117
  await import_isomorphic_git.default.push({
969
- fs: import_fs.default,
2118
+ fs: import_fs2.default,
970
2119
  http: import_node.default,
971
2120
  dir,
972
2121
  remote,
973
2122
  ref: branch,
974
2123
  onAuth: this.getAuthCallback()
975
2124
  });
976
- if (waitForEmbeddings && repoId && commitHash) {
2125
+ if (repoId && commitHash) {
2126
+ await this.configureCommit({ repoId, commitHash, branch, index });
2127
+ }
2128
+ if (waitForEmbeddings && repoId && commitHash && index) {
977
2129
  await this.waitForEmbeddings({ repoId, commitHash });
978
2130
  }
979
2131
  }
2132
+ /**
2133
+ * Configure commit settings on the backend after push.
2134
+ * Sets the index flag to control embedding generation.
2135
+ * @private
2136
+ */
2137
+ async configureCommit(options) {
2138
+ const { repoId, commitHash, branch, index } = options;
2139
+ const response = await fetch(
2140
+ `${this.proxyUrl}/v1/repos/${repoId}/commits/${commitHash}/config`,
2141
+ {
2142
+ method: "POST",
2143
+ headers: {
2144
+ "Authorization": `Bearer ${this.apiKey}`,
2145
+ "Content-Type": "application/json"
2146
+ },
2147
+ body: JSON.stringify({ index, branch })
2148
+ }
2149
+ );
2150
+ if (!response.ok) {
2151
+ console.warn(`Failed to configure commit: ${response.status}`);
2152
+ }
2153
+ }
980
2154
  /**
981
2155
  * Pull changes from remote repository
982
2156
  *
@@ -996,7 +2170,7 @@ var MorphGit = class {
996
2170
  );
997
2171
  }
998
2172
  await import_isomorphic_git.default.pull({
999
- fs: import_fs.default,
2173
+ fs: import_fs2.default,
1000
2174
  http: import_node.default,
1001
2175
  dir,
1002
2176
  remote,
@@ -1065,7 +2239,7 @@ var MorphGit = class {
1065
2239
  async add(options) {
1066
2240
  const { dir, filepath } = options;
1067
2241
  await import_isomorphic_git.default.add({
1068
- fs: import_fs.default,
2242
+ fs: import_fs2.default,
1069
2243
  dir,
1070
2244
  filepath
1071
2245
  });
@@ -1084,7 +2258,7 @@ var MorphGit = class {
1084
2258
  async remove(options) {
1085
2259
  const { dir, filepath } = options;
1086
2260
  await import_isomorphic_git.default.remove({
1087
- fs: import_fs.default,
2261
+ fs: import_fs2.default,
1088
2262
  dir,
1089
2263
  filepath
1090
2264
  });
@@ -1101,6 +2275,7 @@ var MorphGit = class {
1101
2275
  * name: 'AI Agent',
1102
2276
  * email: 'ai@example.com'
1103
2277
  * },
2278
+ * metadata: { issueId: 'PROJ-123', source: 'agent' },
1104
2279
  * chatHistory: [
1105
2280
  * { role: 'user', content: 'Please add a new feature' },
1106
2281
  * { role: 'assistant', content: 'I will add that feature' }
@@ -1110,28 +2285,30 @@ var MorphGit = class {
1110
2285
  * ```
1111
2286
  */
1112
2287
  async commit(options) {
1113
- const { dir, message, author, chatHistory, recordingId } = options;
2288
+ const { dir, message, author, metadata, chatHistory, recordingId } = options;
1114
2289
  const commitAuthor = author || {
1115
2290
  name: "Morph SDK",
1116
2291
  email: "sdk@morphllm.com"
1117
2292
  };
1118
2293
  const sha = await import_isomorphic_git.default.commit({
1119
- fs: import_fs.default,
2294
+ fs: import_fs2.default,
1120
2295
  dir,
1121
2296
  message,
1122
2297
  author: commitAuthor
1123
2298
  });
1124
- if (chatHistory || recordingId) {
1125
- const metadata = {
2299
+ if (metadata || chatHistory || recordingId) {
2300
+ const notes = {
2301
+ metadata,
1126
2302
  chatHistory,
1127
- recordingId
2303
+ recordingId,
2304
+ _version: 1
1128
2305
  };
1129
2306
  await import_isomorphic_git.default.addNote({
1130
- fs: import_fs.default,
2307
+ fs: import_fs2.default,
1131
2308
  dir,
1132
2309
  ref: "refs/notes/morph-metadata",
1133
2310
  oid: sha,
1134
- note: JSON.stringify(metadata, null, 2),
2311
+ note: JSON.stringify(notes, null, 2),
1135
2312
  author: commitAuthor
1136
2313
  });
1137
2314
  }
@@ -1155,7 +2332,7 @@ var MorphGit = class {
1155
2332
  throw new Error("filepath is required for status check");
1156
2333
  }
1157
2334
  const status = await import_isomorphic_git.default.status({
1158
- fs: import_fs.default,
2335
+ fs: import_fs2.default,
1159
2336
  dir,
1160
2337
  filepath
1161
2338
  });
@@ -1175,7 +2352,7 @@ var MorphGit = class {
1175
2352
  async log(options) {
1176
2353
  const { dir, depth, ref } = options;
1177
2354
  const commits = await import_isomorphic_git.default.log({
1178
- fs: import_fs.default,
2355
+ fs: import_fs2.default,
1179
2356
  dir,
1180
2357
  depth,
1181
2358
  ref
@@ -1196,7 +2373,7 @@ var MorphGit = class {
1196
2373
  async checkout(options) {
1197
2374
  const { dir, ref } = options;
1198
2375
  await import_isomorphic_git.default.checkout({
1199
- fs: import_fs.default,
2376
+ fs: import_fs2.default,
1200
2377
  dir,
1201
2378
  ref
1202
2379
  });
@@ -1216,7 +2393,7 @@ var MorphGit = class {
1216
2393
  async branch(options) {
1217
2394
  const { dir, name, checkout = false } = options;
1218
2395
  await import_isomorphic_git.default.branch({
1219
- fs: import_fs.default,
2396
+ fs: import_fs2.default,
1220
2397
  dir,
1221
2398
  ref: name,
1222
2399
  checkout
@@ -1235,7 +2412,7 @@ var MorphGit = class {
1235
2412
  async listBranches(options) {
1236
2413
  const { dir } = options;
1237
2414
  const branches = await import_isomorphic_git.default.listBranches({
1238
- fs: import_fs.default,
2415
+ fs: import_fs2.default,
1239
2416
  dir
1240
2417
  });
1241
2418
  return branches;
@@ -1253,7 +2430,7 @@ var MorphGit = class {
1253
2430
  async currentBranch(options) {
1254
2431
  const { dir } = options;
1255
2432
  const branch = await import_isomorphic_git.default.currentBranch({
1256
- fs: import_fs.default,
2433
+ fs: import_fs2.default,
1257
2434
  dir
1258
2435
  });
1259
2436
  return branch || void 0;
@@ -1271,7 +2448,7 @@ var MorphGit = class {
1271
2448
  async statusMatrix(options) {
1272
2449
  const { dir } = options;
1273
2450
  const matrix = await import_isomorphic_git.default.statusMatrix({
1274
- fs: import_fs.default,
2451
+ fs: import_fs2.default,
1275
2452
  dir
1276
2453
  });
1277
2454
  return matrix.map(([filepath, HEADStatus, workdirStatus, stageStatus]) => {
@@ -1313,38 +2490,39 @@ var MorphGit = class {
1313
2490
  async resolveRef(options) {
1314
2491
  const { dir, ref } = options;
1315
2492
  const oid = await import_isomorphic_git.default.resolveRef({
1316
- fs: import_fs.default,
2493
+ fs: import_fs2.default,
1317
2494
  dir,
1318
2495
  ref
1319
2496
  });
1320
2497
  return oid;
1321
2498
  }
1322
2499
  /**
1323
- * Get metadata (chat history, recording ID) attached to a commit
2500
+ * Get notes (metadata, chat history, recording ID) attached to a commit
1324
2501
  *
1325
2502
  * @example
1326
2503
  * ```ts
1327
- * const metadata = await morphGit.getCommitMetadata({
2504
+ * const notes = await morphGit.getCommitMetadata({
1328
2505
  * dir: './my-project',
1329
2506
  * commitSha: 'abc123...'
1330
2507
  * });
1331
2508
  *
1332
- * if (metadata) {
1333
- * console.log('Chat history:', metadata.chatHistory);
1334
- * console.log('Recording ID:', metadata.recordingId);
2509
+ * if (notes) {
2510
+ * console.log('Metadata:', notes.metadata);
2511
+ * console.log('Chat history:', notes.chatHistory);
2512
+ * console.log('Recording ID:', notes.recordingId);
1335
2513
  * }
1336
2514
  * ```
1337
2515
  */
1338
2516
  async getCommitMetadata(options) {
1339
2517
  try {
1340
2518
  const note = await import_isomorphic_git.default.readNote({
1341
- fs: import_fs.default,
2519
+ fs: import_fs2.default,
1342
2520
  dir: options.dir,
1343
2521
  ref: "refs/notes/morph-metadata",
1344
2522
  oid: options.commitSha
1345
2523
  });
1346
- const metadata = JSON.parse(new TextDecoder().decode(note));
1347
- return metadata;
2524
+ const notes = JSON.parse(new TextDecoder().decode(note));
2525
+ return notes;
1348
2526
  } catch (err) {
1349
2527
  return null;
1350
2528
  }
@@ -1555,6 +2733,718 @@ var RawRouter = class extends BaseRouter {
1555
2733
  }
1556
2734
  };
1557
2735
 
2736
+ // tools/warp_grep/prompts.ts
2737
+ var WARP_GREP_DESCRIPTION = "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.";
2738
+
2739
+ // tools/warp_grep/openai.ts
2740
+ var TOOL_PARAMETERS = {
2741
+ type: "object",
2742
+ properties: {
2743
+ query: { type: "string", description: "Free-form repository question" }
2744
+ },
2745
+ required: ["query"]
2746
+ };
2747
+ async function execute(input, config) {
2748
+ const parsed = typeof input === "string" ? JSON.parse(input) : input;
2749
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
2750
+ const result = await runWarpGrep({
2751
+ query: parsed.query,
2752
+ repoRoot: config.repoRoot,
2753
+ provider,
2754
+ excludes: config.excludes,
2755
+ includes: config.includes,
2756
+ debug: config.debug ?? false,
2757
+ apiKey: config.apiKey
2758
+ });
2759
+ const finish = result.finish;
2760
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
2761
+ return { success: false, error: "Search did not complete" };
2762
+ }
2763
+ const contexts = (finish.resolved ?? []).map((r) => ({
2764
+ file: r.path,
2765
+ content: r.content
2766
+ }));
2767
+ return { success: true, contexts, summary: finish.payload };
2768
+ }
2769
+ function createMorphWarpGrepTool(config) {
2770
+ const tool4 = {
2771
+ type: "function",
2772
+ function: {
2773
+ name: "morph-warp-grep",
2774
+ description: config.description ?? WARP_GREP_DESCRIPTION,
2775
+ parameters: TOOL_PARAMETERS
2776
+ }
2777
+ };
2778
+ return Object.assign(tool4, {
2779
+ execute: async (input) => {
2780
+ return execute(input, config);
2781
+ },
2782
+ formatResult: (result) => {
2783
+ return formatResult(result);
2784
+ },
2785
+ getSystemPrompt: () => {
2786
+ return getSystemPrompt();
2787
+ }
2788
+ });
2789
+ }
2790
+
2791
+ // tools/codebase_search/prompts.ts
2792
+ var CODEBASE_SEARCH_DESCRIPTION = `Semantic search that finds code by meaning, not exact text.
2793
+
2794
+ Use this to explore unfamiliar codebases or ask "how/where/what" questions:
2795
+ - "How does X work?" - Find implementation details
2796
+ - "Where is Y handled?" - Locate specific functionality
2797
+ - "What happens when Z?" - Understand flow
2798
+
2799
+ The tool uses two-stage retrieval (embedding similarity + reranking) to find the most semantically relevant code chunks.
2800
+
2801
+ Returns code chunks with file paths, line ranges, and full content ranked by relevance.`;
2802
+ var CODEBASE_SEARCH_SYSTEM_PROMPT = `You have access to the codebase_search tool that performs semantic code search.
2803
+
2804
+ When searching:
2805
+ - Use natural language queries describing what you're looking for
2806
+ - Be specific about functionality, not variable names
2807
+ - Use target_directories to narrow search if you know the area
2808
+ - Results are ranked by relevance (rerank score is most important)
2809
+
2810
+ The tool returns:
2811
+ - File paths with symbol names (e.g. "src/auth.ts::AuthService@L1-L17")
2812
+ - Line ranges for precise navigation
2813
+ - Full code content for each match
2814
+ - Dual relevance scores: embedding similarity + rerank score
2815
+
2816
+ Use results to understand code or answer questions. The content is provided in full - avoid re-reading unless you need more context.`;
2817
+
2818
+ // tools/codebase_search/openai.ts
2819
+ function createCodebaseSearchTool(config) {
2820
+ const toolDefinition = {
2821
+ type: "function",
2822
+ function: {
2823
+ name: "codebase_search",
2824
+ description: CODEBASE_SEARCH_DESCRIPTION,
2825
+ parameters: {
2826
+ type: "object",
2827
+ properties: {
2828
+ query: {
2829
+ type: "string",
2830
+ description: 'A complete question about what you want to understand. Ask as if talking to a colleague: "How does X work?", "What happens when Y?", "Where is Z handled?"'
2831
+ },
2832
+ target_directories: {
2833
+ type: "array",
2834
+ items: { type: "string" },
2835
+ description: "Prefix directory paths to limit search scope (single directory only, no glob patterns). Use [] to search entire repo."
2836
+ },
2837
+ explanation: {
2838
+ type: "string",
2839
+ description: "One sentence explanation as to why this tool is being used, and how it contributes to the goal."
2840
+ },
2841
+ limit: {
2842
+ type: "number",
2843
+ description: "Maximum results to return (default: 10)"
2844
+ }
2845
+ },
2846
+ required: ["query", "target_directories", "explanation"]
2847
+ }
2848
+ }
2849
+ };
2850
+ return Object.assign(toolDefinition, {
2851
+ execute: async (input) => {
2852
+ const parsedInput = typeof input === "string" ? JSON.parse(input) : input;
2853
+ return executeCodebaseSearch(parsedInput, config);
2854
+ },
2855
+ formatResult: (result) => {
2856
+ return formatResult2(result);
2857
+ },
2858
+ getSystemPrompt: () => {
2859
+ return CODEBASE_SEARCH_SYSTEM_PROMPT;
2860
+ }
2861
+ });
2862
+ }
2863
+ function formatResult2(result) {
2864
+ if (!result.success) {
2865
+ return `Search failed: ${result.error}`;
2866
+ }
2867
+ if (result.results.length === 0) {
2868
+ return "No matching code found. Try rephrasing your query or removing directory filters.";
2869
+ }
2870
+ const lines = [];
2871
+ lines.push(`Found ${result.results.length} relevant code sections (${result.stats.searchTimeMs}ms):
2872
+ `);
2873
+ result.results.forEach((r, i) => {
2874
+ const relevance = (r.rerankScore * 100).toFixed(1);
2875
+ lines.push(`${i + 1}. ${r.filepath} (${relevance}% relevant)`);
2876
+ lines.push(` Symbol: ${r.symbolPath}`);
2877
+ lines.push(` Language: ${r.language}`);
2878
+ lines.push(` Lines: ${r.startLine}-${r.endLine}`);
2879
+ lines.push(` Code:`);
2880
+ const codeLines = r.content.split("\n");
2881
+ codeLines.slice(0, Math.min(codeLines.length, 20)).forEach((line) => {
2882
+ lines.push(` ${line}`);
2883
+ });
2884
+ if (codeLines.length > 20) {
2885
+ lines.push(` ... (${codeLines.length - 20} more lines)`);
2886
+ }
2887
+ lines.push("");
2888
+ });
2889
+ return lines.join("\n");
2890
+ }
2891
+
2892
+ // tools/fastapply/prompts.ts
2893
+ var EDIT_FILE_TOOL_DESCRIPTION = `Use this tool to make an edit to an existing file.
2894
+
2895
+ This will be read by a less intelligent model, which will quickly apply the edit. You should make it clear what the edit is, while also minimizing the unchanged code you write.
2896
+
2897
+ When writing the edit, you should specify each edit in sequence, with the special comment // ... existing code ... to represent unchanged code in between edited lines.
2898
+
2899
+ For example:
2900
+
2901
+ // ... existing code ...
2902
+ FIRST_EDIT
2903
+ // ... existing code ...
2904
+ SECOND_EDIT
2905
+ // ... existing code ...
2906
+ THIRD_EDIT
2907
+ // ... existing code ...
2908
+
2909
+ You should still bias towards repeating as few lines of the original file as possible to convey the change.
2910
+ But, each edit should contain minimally sufficient context of unchanged lines around the code you're editing to resolve ambiguity.
2911
+
2912
+ DO NOT omit spans of pre-existing code (or comments) without using the // ... existing code ... comment to indicate its absence. If you omit the existing code comment, the model may inadvertently delete these lines.
2913
+
2914
+ If you plan on deleting a section, you must provide context before and after to delete it.
2915
+
2916
+ Make sure it is clear what the edit should be, and where it should be applied.
2917
+ Make edits to a file in a single edit_file call instead of multiple edit_file calls to the same file. The apply model can handle many distinct edits at once.`;
2918
+ var EDIT_FILE_SYSTEM_PROMPT = `When the user is asking for edits to their code, use the edit_file tool to highlight the changes necessary and add comments to indicate where unchanged code has been skipped. For example:
2919
+
2920
+ // ... existing code ...
2921
+ {{ edit_1 }}
2922
+ // ... existing code ...
2923
+ {{ edit_2 }}
2924
+ // ... existing code ...
2925
+
2926
+ Often this will mean that the start/end of the file will be skipped, but that's okay! Rewrite the entire file ONLY if specifically requested. Always provide a brief explanation of the updates, unless the user specifically requests only the code.
2927
+
2928
+ These edit codeblocks are also read by a less intelligent language model, colloquially called the apply model, to update the file. To help specify the edit to the apply model, you will be very careful when generating the codeblock to not introduce ambiguity. You will specify all unchanged regions (code and comments) of the file with "// ... existing code ..." comment markers. This will ensure the apply model will not delete existing unchanged code or comments when editing the file.`;
2929
+
2930
+ // tools/fastapply/openai.ts
2931
+ var editFileTool = {
2932
+ type: "function",
2933
+ function: {
2934
+ name: "edit_file",
2935
+ description: EDIT_FILE_TOOL_DESCRIPTION,
2936
+ parameters: {
2937
+ type: "object",
2938
+ properties: {
2939
+ target_filepath: {
2940
+ type: "string",
2941
+ description: "The path of the target file to modify"
2942
+ },
2943
+ instructions: {
2944
+ type: "string",
2945
+ description: "A single sentence describing what you are changing (first person)"
2946
+ },
2947
+ code_edit: {
2948
+ type: "string",
2949
+ description: "The lazy edit with // ... existing code ... markers"
2950
+ }
2951
+ },
2952
+ required: ["target_filepath", "instructions", "code_edit"]
2953
+ }
2954
+ }
2955
+ };
2956
+ async function execute2(input, config) {
2957
+ return executeEditFile(input, config);
2958
+ }
2959
+ function getSystemPrompt2() {
2960
+ return EDIT_FILE_SYSTEM_PROMPT;
2961
+ }
2962
+ function formatResult3(result) {
2963
+ if (!result.success) {
2964
+ return `Error editing file: ${result.error}`;
2965
+ }
2966
+ const { changes } = result;
2967
+ const summary = [
2968
+ changes.linesAdded && `+${changes.linesAdded} lines`,
2969
+ changes.linesRemoved && `-${changes.linesRemoved} lines`,
2970
+ changes.linesModified && `~${changes.linesModified} lines modified`
2971
+ ].filter(Boolean).join(", ");
2972
+ if (result.udiff) {
2973
+ return `Successfully applied changes to ${result.filepath}:
2974
+
2975
+ ${result.udiff}
2976
+
2977
+ Summary: ${summary}`;
2978
+ }
2979
+ return `Successfully applied changes to ${result.filepath}. ${summary}`;
2980
+ }
2981
+ function createEditFileTool(config = {}) {
2982
+ const toolDef = {
2983
+ ...editFileTool,
2984
+ ...config.description && {
2985
+ function: {
2986
+ ...editFileTool.function,
2987
+ description: config.description
2988
+ }
2989
+ }
2990
+ };
2991
+ return Object.assign({}, toolDef, {
2992
+ execute: async (input) => {
2993
+ const parsedInput = typeof input === "string" ? JSON.parse(input) : input;
2994
+ return execute2(parsedInput, config);
2995
+ },
2996
+ formatResult: (result) => {
2997
+ return formatResult3(result);
2998
+ },
2999
+ getSystemPrompt: () => {
3000
+ return getSystemPrompt2();
3001
+ }
3002
+ });
3003
+ }
3004
+
3005
+ // factories/openai.ts
3006
+ var OpenAIToolFactory = class {
3007
+ constructor(config) {
3008
+ this.config = config;
3009
+ }
3010
+ /**
3011
+ * Create an OpenAI-compatible warp grep tool
3012
+ *
3013
+ * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)
3014
+ * @returns OpenAI ChatCompletionTool with execute and formatResult methods
3015
+ */
3016
+ createWarpGrepTool(toolConfig) {
3017
+ return createMorphWarpGrepTool({
3018
+ ...toolConfig,
3019
+ apiKey: this.config.apiKey
3020
+ });
3021
+ }
3022
+ /**
3023
+ * Create an OpenAI-compatible codebase search tool
3024
+ *
3025
+ * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)
3026
+ * @returns OpenAI ChatCompletionTool with execute and formatResult methods
3027
+ */
3028
+ createCodebaseSearchTool(toolConfig) {
3029
+ return createCodebaseSearchTool({
3030
+ ...toolConfig,
3031
+ apiKey: this.config.apiKey
3032
+ });
3033
+ }
3034
+ /**
3035
+ * Create an OpenAI-compatible edit file tool
3036
+ *
3037
+ * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
3038
+ * @returns OpenAI ChatCompletionTool with execute and formatResult methods
3039
+ */
3040
+ createEditFileTool(toolConfig = {}) {
3041
+ return createEditFileTool({
3042
+ ...toolConfig,
3043
+ morphApiKey: this.config.apiKey
3044
+ });
3045
+ }
3046
+ };
3047
+
3048
+ // tools/warp_grep/anthropic.ts
3049
+ var INPUT_SCHEMA = {
3050
+ type: "object",
3051
+ properties: {
3052
+ query: { type: "string", description: "Free-form repository question" }
3053
+ },
3054
+ required: ["query"]
3055
+ };
3056
+ async function execute3(input, config) {
3057
+ const parsed = typeof input === "string" ? JSON.parse(input) : input;
3058
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
3059
+ const result = await runWarpGrep({
3060
+ query: parsed.query,
3061
+ repoRoot: config.repoRoot,
3062
+ provider,
3063
+ excludes: config.excludes,
3064
+ includes: config.includes,
3065
+ debug: config.debug ?? false,
3066
+ apiKey: config.apiKey
3067
+ });
3068
+ const finish = result.finish;
3069
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
3070
+ return { success: false, error: "Search did not complete" };
3071
+ }
3072
+ const contexts = (finish.resolved ?? []).map((r) => ({
3073
+ file: r.path,
3074
+ content: r.content
3075
+ }));
3076
+ return { success: true, contexts, summary: finish.payload };
3077
+ }
3078
+ function createMorphWarpGrepTool2(config) {
3079
+ const tool4 = {
3080
+ name: "morph-warp-grep",
3081
+ description: config.description ?? WARP_GREP_DESCRIPTION,
3082
+ input_schema: INPUT_SCHEMA
3083
+ };
3084
+ return Object.assign(tool4, {
3085
+ execute: async (input) => {
3086
+ return execute3(input, config);
3087
+ },
3088
+ formatResult: (result) => {
3089
+ return formatResult(result);
3090
+ },
3091
+ getSystemPrompt: () => {
3092
+ return getSystemPrompt();
3093
+ }
3094
+ });
3095
+ }
3096
+
3097
+ // tools/codebase_search/anthropic.ts
3098
+ function createCodebaseSearchTool2(config) {
3099
+ const toolDefinition = {
3100
+ name: "codebase_search",
3101
+ description: CODEBASE_SEARCH_DESCRIPTION,
3102
+ input_schema: {
3103
+ type: "object",
3104
+ properties: {
3105
+ explanation: {
3106
+ type: "string",
3107
+ description: "One sentence explanation as to why this tool is being used, and how it contributes to the goal."
3108
+ },
3109
+ query: {
3110
+ type: "string",
3111
+ description: 'A complete question about what you want to understand. Ask as if talking to a colleague: "How does X work?", "What happens when Y?", "Where is Z handled?"'
3112
+ },
3113
+ target_directories: {
3114
+ type: "array",
3115
+ items: { type: "string" },
3116
+ description: "Prefix directory paths to limit search scope (single directory only, no glob patterns). Use [] to search entire repo."
3117
+ },
3118
+ limit: {
3119
+ type: "number",
3120
+ description: "Maximum results to return (default: 10)"
3121
+ }
3122
+ },
3123
+ required: ["query", "target_directories", "explanation"]
3124
+ },
3125
+ cache_control: { type: "ephemeral" }
3126
+ };
3127
+ return Object.assign(toolDefinition, {
3128
+ execute: async (input) => {
3129
+ return executeCodebaseSearch(input, config);
3130
+ },
3131
+ formatResult: (result) => {
3132
+ return formatResult4(result);
3133
+ },
3134
+ getSystemPrompt: () => {
3135
+ return CODEBASE_SEARCH_SYSTEM_PROMPT;
3136
+ }
3137
+ });
3138
+ }
3139
+ function formatResult4(result) {
3140
+ if (!result.success) {
3141
+ return `Search failed: ${result.error}`;
3142
+ }
3143
+ if (result.results.length === 0) {
3144
+ return "No matching code found. Try rephrasing your query or broadening the search scope.";
3145
+ }
3146
+ const lines = [];
3147
+ lines.push(`Found ${result.results.length} relevant code sections (searched ${result.stats.candidatesRetrieved} candidates in ${result.stats.searchTimeMs}ms):
3148
+ `);
3149
+ result.results.forEach((r, i) => {
3150
+ const relevance = (r.rerankScore * 100).toFixed(1);
3151
+ lines.push(`${i + 1}. ${r.filepath} (${relevance}% relevant)`);
3152
+ lines.push(` Symbol: ${r.symbolPath}`);
3153
+ lines.push(` Language: ${r.language}`);
3154
+ lines.push(` Lines: ${r.startLine}-${r.endLine}`);
3155
+ lines.push(` Code:`);
3156
+ const codeLines = r.content.split("\n");
3157
+ codeLines.slice(0, Math.min(codeLines.length, 20)).forEach((line) => {
3158
+ lines.push(` ${line}`);
3159
+ });
3160
+ if (codeLines.length > 20) {
3161
+ lines.push(` ... (${codeLines.length - 20} more lines)`);
3162
+ }
3163
+ lines.push("");
3164
+ });
3165
+ return lines.join("\n");
3166
+ }
3167
+
3168
+ // tools/fastapply/anthropic.ts
3169
+ var editFileTool2 = {
3170
+ name: "edit_file",
3171
+ description: EDIT_FILE_TOOL_DESCRIPTION,
3172
+ input_schema: {
3173
+ type: "object",
3174
+ properties: {
3175
+ target_filepath: {
3176
+ type: "string",
3177
+ description: "The path of the target file to modify"
3178
+ },
3179
+ instructions: {
3180
+ type: "string",
3181
+ description: "A single sentence describing what you are changing (first person)"
3182
+ },
3183
+ code_edit: {
3184
+ type: "string",
3185
+ description: "The lazy edit with // ... existing code ... markers"
3186
+ }
3187
+ },
3188
+ required: ["target_filepath", "instructions", "code_edit"]
3189
+ }
3190
+ };
3191
+ function formatResult5(result) {
3192
+ if (!result.success) {
3193
+ return `Error editing file: ${result.error}`;
3194
+ }
3195
+ const { changes } = result;
3196
+ const summary = [
3197
+ changes.linesAdded && `+${changes.linesAdded} lines`,
3198
+ changes.linesRemoved && `-${changes.linesRemoved} lines`,
3199
+ changes.linesModified && `~${changes.linesModified} lines modified`
3200
+ ].filter(Boolean).join(", ");
3201
+ if (result.udiff) {
3202
+ return `Successfully applied changes to ${result.filepath}:
3203
+
3204
+ ${result.udiff}
3205
+
3206
+ Summary: ${summary}`;
3207
+ }
3208
+ return `Successfully applied changes to ${result.filepath}. ${summary}`;
3209
+ }
3210
+ function createEditFileTool2(config = {}) {
3211
+ const toolDef = {
3212
+ ...editFileTool2,
3213
+ ...config.description && { description: config.description }
3214
+ };
3215
+ return Object.assign({}, toolDef, {
3216
+ execute: async (input) => {
3217
+ return executeEditFile(input, config);
3218
+ },
3219
+ formatResult: (result) => {
3220
+ return formatResult5(result);
3221
+ },
3222
+ getSystemPrompt: () => {
3223
+ return EDIT_FILE_SYSTEM_PROMPT;
3224
+ }
3225
+ });
3226
+ }
3227
+
3228
+ // factories/anthropic.ts
3229
+ var AnthropicToolFactory = class {
3230
+ constructor(config) {
3231
+ this.config = config;
3232
+ }
3233
+ /**
3234
+ * Create an Anthropic-compatible warp grep tool
3235
+ *
3236
+ * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)
3237
+ * @returns Anthropic Tool with execute and formatResult methods
3238
+ */
3239
+ createWarpGrepTool(toolConfig) {
3240
+ return createMorphWarpGrepTool2({
3241
+ ...toolConfig,
3242
+ apiKey: this.config.apiKey
3243
+ });
3244
+ }
3245
+ /**
3246
+ * Create an Anthropic-compatible codebase search tool
3247
+ *
3248
+ * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)
3249
+ * @returns Anthropic Tool with execute and formatResult methods
3250
+ */
3251
+ createCodebaseSearchTool(toolConfig) {
3252
+ return createCodebaseSearchTool2({
3253
+ ...toolConfig,
3254
+ apiKey: this.config.apiKey
3255
+ });
3256
+ }
3257
+ /**
3258
+ * Create an Anthropic-compatible edit file tool
3259
+ *
3260
+ * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
3261
+ * @returns Anthropic Tool with execute and formatResult methods
3262
+ */
3263
+ createEditFileTool(toolConfig = {}) {
3264
+ return createEditFileTool2({
3265
+ ...toolConfig,
3266
+ morphApiKey: this.config.apiKey
3267
+ });
3268
+ }
3269
+ };
3270
+
3271
+ // tools/warp_grep/vercel.ts
3272
+ var import_ai = require("ai");
3273
+ var import_zod = require("zod");
3274
+ var warpGrepSchema = import_zod.z.object({
3275
+ query: import_zod.z.string().describe("Free-form repository question")
3276
+ });
3277
+ function createMorphWarpGrepTool3(config) {
3278
+ return (0, import_ai.tool)({
3279
+ description: config.description ?? WARP_GREP_DESCRIPTION,
3280
+ inputSchema: warpGrepSchema,
3281
+ execute: async (params) => {
3282
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
3283
+ const result = await runWarpGrep({
3284
+ query: params.query,
3285
+ repoRoot: config.repoRoot,
3286
+ provider,
3287
+ excludes: config.excludes,
3288
+ includes: config.includes,
3289
+ debug: config.debug ?? false,
3290
+ apiKey: config.apiKey
3291
+ });
3292
+ const finish = result.finish;
3293
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
3294
+ return { success: false, error: "Search did not complete" };
3295
+ }
3296
+ const contexts = (finish.resolved ?? []).map((r) => ({
3297
+ file: r.path,
3298
+ content: r.content
3299
+ }));
3300
+ return { success: true, contexts, summary: finish.payload };
3301
+ }
3302
+ });
3303
+ }
3304
+
3305
+ // tools/codebase_search/vercel.ts
3306
+ var import_ai2 = require("ai");
3307
+ var import_zod2 = require("zod");
3308
+ function createCodebaseSearchTool3(config) {
3309
+ const schema = import_zod2.z.object({
3310
+ query: import_zod2.z.string().describe('A complete question about what you want to understand. Ask as if talking to a colleague: "How does X work?", "What happens when Y?", "Where is Z handled?"'),
3311
+ target_directories: import_zod2.z.array(import_zod2.z.string()).describe("Prefix directory paths to limit search scope (single directory only, no glob patterns). Use [] to search entire repo."),
3312
+ explanation: import_zod2.z.string().describe("One sentence explanation as to why this tool is being used, and how it contributes to the goal."),
3313
+ limit: import_zod2.z.number().optional().describe("Max results to return (default: 10)")
3314
+ });
3315
+ return (0, import_ai2.tool)({
3316
+ description: CODEBASE_SEARCH_DESCRIPTION,
3317
+ inputSchema: schema,
3318
+ execute: async (params) => {
3319
+ const { query, target_directories, explanation, limit } = params;
3320
+ const result = await executeCodebaseSearch(
3321
+ { query, target_directories, explanation, limit },
3322
+ config
3323
+ );
3324
+ if (!result.success) {
3325
+ return {
3326
+ error: result.error,
3327
+ results: []
3328
+ };
3329
+ }
3330
+ return {
3331
+ found: result.results.length,
3332
+ searchTime: `${result.stats.searchTimeMs}ms`,
3333
+ results: result.results.map((r) => ({
3334
+ file: r.filepath,
3335
+ symbol: r.symbolPath,
3336
+ lines: `${r.startLine}-${r.endLine}`,
3337
+ language: r.language,
3338
+ relevance: `${(r.rerankScore * 100).toFixed(1)}%`,
3339
+ code: r.content
3340
+ }))
3341
+ };
3342
+ }
3343
+ });
3344
+ }
3345
+
3346
+ // tools/fastapply/vercel.ts
3347
+ var import_ai3 = require("ai");
3348
+ var import_zod3 = require("zod");
3349
+ var editFileSchema = import_zod3.z.object({
3350
+ target_filepath: import_zod3.z.string().describe("The path of the target file to modify"),
3351
+ instructions: import_zod3.z.string().describe("A single sentence describing what you are changing (first person)"),
3352
+ code_edit: import_zod3.z.string().describe("The lazy edit with // ... existing code ... markers")
3353
+ });
3354
+ var editFileTool3 = (0, import_ai3.tool)({
3355
+ description: EDIT_FILE_TOOL_DESCRIPTION,
3356
+ inputSchema: editFileSchema,
3357
+ execute: async (params) => {
3358
+ const result = await executeEditFile({
3359
+ target_filepath: params.target_filepath,
3360
+ instructions: params.instructions,
3361
+ code_edit: params.code_edit
3362
+ });
3363
+ if (!result.success) {
3364
+ throw new Error(`Failed to edit file: ${result.error}`);
3365
+ }
3366
+ return {
3367
+ success: true,
3368
+ filepath: result.filepath,
3369
+ changes: result.changes,
3370
+ udiff: result.udiff
3371
+ };
3372
+ }
3373
+ });
3374
+ function createEditFileTool3(config = {}) {
3375
+ const schema = import_zod3.z.object({
3376
+ target_filepath: import_zod3.z.string().describe("The path of the target file to modify"),
3377
+ instructions: import_zod3.z.string().describe("A single sentence describing what you are changing (first person)"),
3378
+ code_edit: import_zod3.z.string().describe("The lazy edit with // ... existing code ... markers")
3379
+ });
3380
+ return (0, import_ai3.tool)({
3381
+ description: config.description || EDIT_FILE_TOOL_DESCRIPTION,
3382
+ inputSchema: schema,
3383
+ execute: async (params) => {
3384
+ const result = await executeEditFile(
3385
+ {
3386
+ target_filepath: params.target_filepath,
3387
+ instructions: params.instructions,
3388
+ code_edit: params.code_edit
3389
+ },
3390
+ config
3391
+ );
3392
+ if (!result.success) {
3393
+ throw new Error(`Failed to edit file: ${result.error}`);
3394
+ }
3395
+ return {
3396
+ success: true,
3397
+ filepath: result.filepath,
3398
+ changes: result.changes,
3399
+ udiff: result.udiff
3400
+ };
3401
+ }
3402
+ });
3403
+ }
3404
+
3405
+ // factories/vercel.ts
3406
+ var VercelToolFactory = class {
3407
+ constructor(config) {
3408
+ this.config = config;
3409
+ }
3410
+ /**
3411
+ * Create a Vercel AI SDK-compatible warp grep tool
3412
+ *
3413
+ * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)
3414
+ * @returns Vercel AI SDK tool
3415
+ */
3416
+ createWarpGrepTool(toolConfig) {
3417
+ return createMorphWarpGrepTool3({
3418
+ ...toolConfig,
3419
+ apiKey: this.config.apiKey
3420
+ });
3421
+ }
3422
+ /**
3423
+ * Create a Vercel AI SDK-compatible codebase search tool
3424
+ *
3425
+ * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)
3426
+ * @returns Vercel AI SDK tool
3427
+ */
3428
+ createCodebaseSearchTool(toolConfig) {
3429
+ return createCodebaseSearchTool3({
3430
+ ...toolConfig,
3431
+ apiKey: this.config.apiKey
3432
+ });
3433
+ }
3434
+ /**
3435
+ * Create a Vercel AI SDK-compatible edit file tool
3436
+ *
3437
+ * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)
3438
+ * @returns Vercel AI SDK tool
3439
+ */
3440
+ createEditFileTool(toolConfig = {}) {
3441
+ return createEditFileTool3({
3442
+ ...toolConfig,
3443
+ morphApiKey: this.config.apiKey
3444
+ });
3445
+ }
3446
+ };
3447
+
1558
3448
  // client.ts
1559
3449
  var MorphClient = class {
1560
3450
  /** Client configuration */
@@ -1563,12 +3453,20 @@ var MorphClient = class {
1563
3453
  fastApply;
1564
3454
  /** CodebaseSearch tool for semantic code search */
1565
3455
  codebaseSearch;
3456
+ /** WarpGrep tool for fast code search using ripgrep */
3457
+ warpGrep;
1566
3458
  /** Browser tool for AI-powered browser automation */
1567
3459
  browser;
1568
3460
  /** Git tool for version control operations */
1569
3461
  git;
1570
3462
  /** Model routers for intelligent model selection */
1571
3463
  routers;
3464
+ /** OpenAI-compatible tool factories */
3465
+ openai;
3466
+ /** Anthropic-compatible tool factories */
3467
+ anthropic;
3468
+ /** Vercel AI SDK tool factories */
3469
+ vercel;
1572
3470
  /**
1573
3471
  * Create a new Morph SDK client
1574
3472
  *
@@ -1597,6 +3495,12 @@ var MorphClient = class {
1597
3495
  timeout: config.timeout,
1598
3496
  retryConfig: config.retryConfig
1599
3497
  });
3498
+ this.warpGrep = new WarpGrepClient({
3499
+ apiKey: config.apiKey,
3500
+ debug: config.debug,
3501
+ timeout: config.timeout,
3502
+ retryConfig: config.retryConfig
3503
+ });
1600
3504
  this.browser = new BrowserClient({
1601
3505
  apiKey: config.apiKey,
1602
3506
  debug: config.debug,
@@ -1633,18 +3537,92 @@ var MorphClient = class {
1633
3537
  retryConfig: config.retryConfig
1634
3538
  })
1635
3539
  };
3540
+ this.openai = new OpenAIToolFactory(config);
3541
+ this.anthropic = new AnthropicToolFactory(config);
3542
+ this.vercel = new VercelToolFactory(config);
3543
+ }
3544
+ };
3545
+
3546
+ // tools/warp_grep/providers/command.ts
3547
+ var CommandExecProvider = class {
3548
+ constructor(opts) {
3549
+ this.opts = opts;
3550
+ }
3551
+ map(path4) {
3552
+ return this.opts.pathMap ? this.opts.pathMap(path4) : path4;
3553
+ }
3554
+ async grep(params) {
3555
+ const remotePath = this.map(params.path);
3556
+ const args = [
3557
+ "--no-config",
3558
+ "--no-heading",
3559
+ "--with-filename",
3560
+ "--line-number",
3561
+ "--color=never",
3562
+ "--trim",
3563
+ "--max-columns=400",
3564
+ ...(this.opts.excludes ?? DEFAULT_EXCLUDES).flatMap((e) => ["-g", `!${e}`]),
3565
+ params.pattern,
3566
+ remotePath || "."
3567
+ ];
3568
+ const res = await this.opts.run("rg", args, { cwd: this.opts.cwd, env: this.opts.env });
3569
+ if (res.exitCode === -1) throw new Error(res.stderr || "ripgrep execution failed");
3570
+ if (res.exitCode !== 0 && res.exitCode !== 1) throw new Error(res.stderr || `ripgrep failed (${res.exitCode})`);
3571
+ const lines = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
3572
+ return { lines };
3573
+ }
3574
+ async glob(params) {
3575
+ const remotePath = this.map(params.path);
3576
+ const args = [
3577
+ "--no-config",
3578
+ "--files",
3579
+ "-g",
3580
+ params.pattern,
3581
+ ...(this.opts.excludes ?? DEFAULT_EXCLUDES).flatMap((e) => ["-g", `!${e}`]),
3582
+ remotePath || "."
3583
+ ];
3584
+ const res = await this.opts.run("rg", args, { cwd: this.opts.cwd, env: this.opts.env });
3585
+ if (res.exitCode === -1) throw new Error(res.stderr || "ripgrep execution failed");
3586
+ const files = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
3587
+ return { files };
3588
+ }
3589
+ async read(params) {
3590
+ const remotePath = this.map(params.path);
3591
+ const rc = this.opts.readCommand ? this.opts.readCommand(remotePath, params.start, params.end) : { cmd: "sed", args: ["-n", `${params.start ?? 1},${params.end ?? 1e6}p`, remotePath] };
3592
+ const res = await this.opts.run(rc.cmd, rc.args, { cwd: this.opts.cwd, env: this.opts.env });
3593
+ if (res.exitCode !== 0) throw new Error(res.stderr || `read failed (${res.exitCode})`);
3594
+ const text = res.stdout || "";
3595
+ const lines = text.split(/\r?\n/).map((line, idx) => `${(params.start ?? 1) + idx}|${line}`);
3596
+ return { lines: lines.filter((l) => l !== `${(params.start ?? 1) + (lines.length - 1)}|`) };
3597
+ }
3598
+ async analyse(params) {
3599
+ const target = this.map(params.path);
3600
+ const pattern = params.pattern ?? "*";
3601
+ const files = await this.glob({ pattern, path: target }).catch(() => ({ files: [] }));
3602
+ return files.files.slice(0, params.maxResults ?? 100).map((f) => ({
3603
+ name: f.split("/").pop() || f,
3604
+ path: f,
3605
+ type: f.endsWith("/") ? "dir" : "file",
3606
+ depth: 0
3607
+ }));
1636
3608
  }
1637
3609
  };
1638
3610
  // Annotate the CommonJS export names for ESM import in node:
1639
3611
  0 && (module.exports = {
1640
3612
  AnthropicRouter,
3613
+ AnthropicToolFactory,
1641
3614
  BrowserClient,
1642
3615
  CodebaseSearchClient,
3616
+ CommandExecProvider,
1643
3617
  FastApplyClient,
1644
3618
  GeminiRouter,
3619
+ LocalRipgrepProvider,
1645
3620
  MorphClient,
1646
3621
  MorphGit,
1647
3622
  OpenAIRouter,
1648
- RawRouter
3623
+ OpenAIToolFactory,
3624
+ RawRouter,
3625
+ VercelToolFactory,
3626
+ WarpGrepClient
1649
3627
  });
1650
3628
  //# sourceMappingURL=index.cjs.map