@morphllm/morphsdk 0.2.57 → 0.2.59

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (170) hide show
  1. package/dist/anthropic-CaFUHxBW.d.ts +89 -0
  2. package/dist/{chunk-6X5UOY7B.js → chunk-2CASO3ZO.js} +46 -79
  3. package/dist/chunk-2CASO3ZO.js.map +1 -0
  4. package/dist/chunk-374N3GIA.js +118 -0
  5. package/dist/chunk-374N3GIA.js.map +1 -0
  6. package/dist/chunk-3IQIT6MC.js +65 -0
  7. package/dist/chunk-3IQIT6MC.js.map +1 -0
  8. package/dist/chunk-4VGOBA2J.js +57 -0
  9. package/dist/chunk-4VGOBA2J.js.map +1 -0
  10. package/dist/chunk-527P5X2E.js +98 -0
  11. package/dist/chunk-527P5X2E.js.map +1 -0
  12. package/dist/chunk-6N6ZYZYD.js +74 -0
  13. package/dist/chunk-6N6ZYZYD.js.map +1 -0
  14. package/dist/chunk-6Y5JB4JC.js +195 -0
  15. package/dist/chunk-6Y5JB4JC.js.map +1 -0
  16. package/dist/{chunk-TICMYDII.js → chunk-APP75CBN.js} +33 -16
  17. package/dist/chunk-APP75CBN.js.map +1 -0
  18. package/dist/{chunk-QFIHUCTF.js → chunk-FN4EP3WY.js} +19 -19
  19. package/dist/chunk-FN4EP3WY.js.map +1 -0
  20. package/dist/chunk-ILJ3J5IA.js +72 -0
  21. package/dist/chunk-ILJ3J5IA.js.map +1 -0
  22. package/dist/chunk-ISWL67SF.js +1 -0
  23. package/dist/chunk-KW7OEGZK.js +9 -0
  24. package/dist/chunk-KW7OEGZK.js.map +1 -0
  25. package/dist/chunk-Q5AHGIQO.js +205 -0
  26. package/dist/chunk-Q5AHGIQO.js.map +1 -0
  27. package/dist/{chunk-OXHGFHEU.js → chunk-VJU3BRET.js} +3 -3
  28. package/dist/chunk-VJU3BRET.js.map +1 -0
  29. package/dist/{chunk-TJIUA27P.js → chunk-XT5ZO6ES.js} +9 -5
  30. package/dist/chunk-XT5ZO6ES.js.map +1 -0
  31. package/dist/{chunk-LVPVVLTI.js → chunk-YV75OQTE.js} +105 -17
  32. package/dist/chunk-YV75OQTE.js.map +1 -0
  33. package/dist/{chunk-ZJIIICRA.js → chunk-ZO4PPFCZ.js} +60 -29
  34. package/dist/chunk-ZO4PPFCZ.js.map +1 -0
  35. package/dist/{client-CFoR--IU.d.ts → client-CextMMm9.d.ts} +10 -15
  36. package/dist/client.cjs +689 -343
  37. package/dist/client.cjs.map +1 -1
  38. package/dist/client.d.ts +3 -2
  39. package/dist/client.js +15 -15
  40. package/dist/finish-kXAcUJyB.d.ts +33 -0
  41. package/dist/gemini-CE80Pbdy.d.ts +117 -0
  42. package/dist/git/client.cjs +2 -2
  43. package/dist/git/client.cjs.map +1 -1
  44. package/dist/git/client.d.ts +1 -1
  45. package/dist/git/client.js +1 -1
  46. package/dist/git/index.cjs +2 -2
  47. package/dist/git/index.cjs.map +1 -1
  48. package/dist/git/index.js +1 -1
  49. package/dist/git/types.cjs.map +1 -1
  50. package/dist/git/types.d.ts +1 -1
  51. package/dist/index.cjs +702 -343
  52. package/dist/index.cjs.map +1 -1
  53. package/dist/index.d.ts +4 -3
  54. package/dist/index.js +17 -16
  55. package/dist/openai-Fvpqln7F.d.ts +89 -0
  56. package/dist/tools/warp_grep/agent/config.cjs +8 -4
  57. package/dist/tools/warp_grep/agent/config.cjs.map +1 -1
  58. package/dist/tools/warp_grep/agent/config.d.ts +7 -2
  59. package/dist/tools/warp_grep/agent/config.js +1 -1
  60. package/dist/tools/warp_grep/agent/formatter.cjs +32 -15
  61. package/dist/tools/warp_grep/agent/formatter.cjs.map +1 -1
  62. package/dist/tools/warp_grep/agent/formatter.d.ts +1 -1
  63. package/dist/tools/warp_grep/agent/formatter.js +1 -1
  64. package/dist/tools/warp_grep/agent/parser.cjs +104 -17
  65. package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
  66. package/dist/tools/warp_grep/agent/parser.d.ts +3 -5
  67. package/dist/tools/warp_grep/agent/parser.js +1 -3
  68. package/dist/tools/warp_grep/agent/prompt.cjs +132 -56
  69. package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -1
  70. package/dist/tools/warp_grep/agent/prompt.d.ts +1 -1
  71. package/dist/tools/warp_grep/agent/prompt.js +1 -1
  72. package/dist/tools/warp_grep/agent/runner.cjs +459 -192
  73. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
  74. package/dist/tools/warp_grep/agent/runner.d.ts +1 -0
  75. package/dist/tools/warp_grep/agent/runner.js +6 -8
  76. package/dist/tools/warp_grep/agent/types.cjs.map +1 -1
  77. package/dist/tools/warp_grep/agent/types.d.ts +9 -2
  78. package/dist/tools/warp_grep/anthropic.cjs +650 -260
  79. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  80. package/dist/tools/warp_grep/anthropic.d.ts +4 -74
  81. package/dist/tools/warp_grep/anthropic.js +13 -15
  82. package/dist/tools/warp_grep/client.cjs +1593 -0
  83. package/dist/tools/warp_grep/client.cjs.map +1 -0
  84. package/dist/tools/warp_grep/client.d.ts +87 -0
  85. package/dist/tools/warp_grep/client.js +26 -0
  86. package/dist/tools/warp_grep/gemini.cjs +1587 -0
  87. package/dist/tools/warp_grep/gemini.cjs.map +1 -0
  88. package/dist/tools/warp_grep/gemini.d.ts +7 -0
  89. package/dist/tools/warp_grep/gemini.js +34 -0
  90. package/dist/tools/warp_grep/harness.cjs +556 -220
  91. package/dist/tools/warp_grep/harness.cjs.map +1 -1
  92. package/dist/tools/warp_grep/harness.d.ts +50 -119
  93. package/dist/tools/warp_grep/harness.js +33 -41
  94. package/dist/tools/warp_grep/harness.js.map +1 -1
  95. package/dist/tools/warp_grep/index.cjs +812 -346
  96. package/dist/tools/warp_grep/index.cjs.map +1 -1
  97. package/dist/tools/warp_grep/index.d.ts +11 -6
  98. package/dist/tools/warp_grep/index.js +43 -22
  99. package/dist/tools/warp_grep/openai.cjs +650 -258
  100. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  101. package/dist/tools/warp_grep/openai.d.ts +4 -74
  102. package/dist/tools/warp_grep/openai.js +13 -13
  103. package/dist/tools/warp_grep/providers/local.cjs +66 -27
  104. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  105. package/dist/tools/warp_grep/providers/local.d.ts +4 -9
  106. package/dist/tools/warp_grep/providers/local.js +2 -2
  107. package/dist/tools/warp_grep/providers/remote.cjs +211 -0
  108. package/dist/tools/warp_grep/providers/remote.cjs.map +1 -0
  109. package/dist/tools/warp_grep/providers/remote.d.ts +67 -0
  110. package/dist/tools/warp_grep/providers/remote.js +9 -0
  111. package/dist/tools/warp_grep/providers/types.cjs.map +1 -1
  112. package/dist/tools/warp_grep/providers/types.d.ts +7 -15
  113. package/dist/tools/warp_grep/vercel.cjs +662 -277
  114. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  115. package/dist/tools/warp_grep/vercel.d.ts +4 -51
  116. package/dist/tools/warp_grep/vercel.js +16 -14
  117. package/dist/types-a_hxdPI6.d.ts +144 -0
  118. package/dist/vercel-3yjvfmVB.d.ts +66 -0
  119. package/package.json +12 -2
  120. package/dist/chunk-6X5UOY7B.js.map +0 -1
  121. package/dist/chunk-73RQWOQC.js +0 -16
  122. package/dist/chunk-73RQWOQC.js.map +0 -1
  123. package/dist/chunk-7OQOOB3R.js +0 -1
  124. package/dist/chunk-CFF636UC.js +0 -70
  125. package/dist/chunk-CFF636UC.js.map +0 -1
  126. package/dist/chunk-EK7OQPWD.js +0 -44
  127. package/dist/chunk-EK7OQPWD.js.map +0 -1
  128. package/dist/chunk-GJ5TYNRD.js +0 -107
  129. package/dist/chunk-GJ5TYNRD.js.map +0 -1
  130. package/dist/chunk-HQO45BAJ.js +0 -14
  131. package/dist/chunk-HQO45BAJ.js.map +0 -1
  132. package/dist/chunk-IMYQOKFO.js +0 -83
  133. package/dist/chunk-IMYQOKFO.js.map +0 -1
  134. package/dist/chunk-KBQWGT5L.js +0 -77
  135. package/dist/chunk-KBQWGT5L.js.map +0 -1
  136. package/dist/chunk-LVPVVLTI.js.map +0 -1
  137. package/dist/chunk-OXHGFHEU.js.map +0 -1
  138. package/dist/chunk-QFIHUCTF.js.map +0 -1
  139. package/dist/chunk-TICMYDII.js.map +0 -1
  140. package/dist/chunk-TJIUA27P.js.map +0 -1
  141. package/dist/chunk-WETRQJGU.js +0 -129
  142. package/dist/chunk-WETRQJGU.js.map +0 -1
  143. package/dist/chunk-ZJIIICRA.js.map +0 -1
  144. package/dist/core-CpkYEi_T.d.ts +0 -158
  145. package/dist/tools/warp_grep/tools/analyse.cjs +0 -40
  146. package/dist/tools/warp_grep/tools/analyse.cjs.map +0 -1
  147. package/dist/tools/warp_grep/tools/analyse.d.ts +0 -10
  148. package/dist/tools/warp_grep/tools/analyse.js +0 -8
  149. package/dist/tools/warp_grep/tools/finish.cjs +0 -69
  150. package/dist/tools/warp_grep/tools/finish.cjs.map +0 -1
  151. package/dist/tools/warp_grep/tools/finish.d.ts +0 -10
  152. package/dist/tools/warp_grep/tools/finish.js +0 -10
  153. package/dist/tools/warp_grep/tools/grep.cjs +0 -38
  154. package/dist/tools/warp_grep/tools/grep.cjs.map +0 -1
  155. package/dist/tools/warp_grep/tools/grep.d.ts +0 -8
  156. package/dist/tools/warp_grep/tools/grep.js +0 -15
  157. package/dist/tools/warp_grep/tools/grep.js.map +0 -1
  158. package/dist/tools/warp_grep/tools/read.cjs +0 -38
  159. package/dist/tools/warp_grep/tools/read.cjs.map +0 -1
  160. package/dist/tools/warp_grep/tools/read.d.ts +0 -9
  161. package/dist/tools/warp_grep/tools/read.js +0 -8
  162. package/dist/tools/warp_grep/utils/format.cjs +0 -42
  163. package/dist/tools/warp_grep/utils/format.cjs.map +0 -1
  164. package/dist/tools/warp_grep/utils/format.d.ts +0 -4
  165. package/dist/tools/warp_grep/utils/format.js +0 -18
  166. package/dist/tools/warp_grep/utils/format.js.map +0 -1
  167. /package/dist/{chunk-7OQOOB3R.js.map → chunk-ISWL67SF.js.map} +0 -0
  168. /package/dist/tools/warp_grep/{tools/analyse.js.map → client.js.map} +0 -0
  169. /package/dist/tools/warp_grep/{tools/finish.js.map → gemini.js.map} +0 -0
  170. /package/dist/tools/warp_grep/{tools/read.js.map → providers/remote.js.map} +0 -0
@@ -1,83 +0,0 @@
1
- import {
2
- WARP_GREP_DESCRIPTION,
3
- WARP_GREP_TOOL_NAME,
4
- formatResult
5
- } from "./chunk-GJ5TYNRD.js";
6
- import {
7
- runWarpGrep
8
- } from "./chunk-6X5UOY7B.js";
9
- import {
10
- getSystemPrompt
11
- } from "./chunk-WETRQJGU.js";
12
- import {
13
- LocalRipgrepProvider
14
- } from "./chunk-ZJIIICRA.js";
15
-
16
- // tools/warp_grep/openai.ts
17
- var TOOL_PARAMETERS = {
18
- type: "object",
19
- properties: {
20
- query: { type: "string", description: "Free-form repository question" }
21
- },
22
- required: ["query"]
23
- };
24
- var warpGrepTool = {
25
- type: "function",
26
- function: {
27
- name: WARP_GREP_TOOL_NAME,
28
- description: WARP_GREP_DESCRIPTION,
29
- parameters: TOOL_PARAMETERS
30
- }
31
- };
32
- async function execute(input, config) {
33
- const parsed = typeof input === "string" ? JSON.parse(input) : input;
34
- const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
35
- const result = await runWarpGrep({
36
- query: parsed.query,
37
- repoRoot: config.repoRoot,
38
- provider,
39
- excludes: config.excludes,
40
- includes: config.includes,
41
- debug: config.debug ?? false,
42
- apiKey: config.apiKey
43
- });
44
- const finish = result.finish;
45
- if (result.terminationReason !== "completed" || !finish?.metadata) {
46
- return { success: false, error: "Search did not complete" };
47
- }
48
- const contexts = (finish.resolved ?? []).map((r) => ({
49
- file: r.path,
50
- content: r.content
51
- }));
52
- return { success: true, contexts, summary: finish.payload };
53
- }
54
- function createMorphWarpGrepTool(config) {
55
- const tool = {
56
- type: "function",
57
- function: {
58
- name: config.name ?? WARP_GREP_TOOL_NAME,
59
- description: config.description ?? WARP_GREP_DESCRIPTION,
60
- parameters: TOOL_PARAMETERS
61
- }
62
- };
63
- return Object.assign(tool, {
64
- execute: async (input) => {
65
- return execute(input, config);
66
- },
67
- formatResult: (result) => {
68
- return formatResult(result);
69
- },
70
- getSystemPrompt: () => {
71
- return getSystemPrompt();
72
- }
73
- });
74
- }
75
- var openai_default = warpGrepTool;
76
-
77
- export {
78
- warpGrepTool,
79
- execute,
80
- createMorphWarpGrepTool,
81
- openai_default
82
- };
83
- //# sourceMappingURL=chunk-IMYQOKFO.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/openai.ts"],"sourcesContent":["/**\n * OpenAI SDK adapter for morph-warp-grep tool\n */\n\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions';\nimport { runWarpGrep } from './agent/runner.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { WARP_GREP_DESCRIPTION, WARP_GREP_TOOL_NAME, getSystemPrompt } from './prompts.js';\nimport { formatResult } from './core.js';\nimport type { WarpGrepToolConfig, WarpGrepResult, WarpGrepContext } from './types.js';\n\n/**\n * Input schema for the warp grep tool\n */\nconst TOOL_PARAMETERS = {\n type: 'object',\n properties: {\n query: { type: 'string', description: 'Free-form repository question' },\n },\n required: ['query'],\n} as const;\n\n/**\n * OpenAI-native warp grep tool definition\n * \n * @example\n * ```typescript\n * import OpenAI from 'openai';\n * import { warpGrepTool, execute } from '@morphllm/morphsdk/tools/warp-grep/openai';\n * \n * const client = new OpenAI();\n * const response = await client.chat.completions.create({\n * model: 'gpt-4o',\n * tools: [warpGrepTool],\n * messages: [{ role: 'user', content: 'Find authentication middleware' }]\n * });\n * \n * // Execute the tool call\n * const result = await execute({ query: '...' }, { repoRoot: '.' });\n * ```\n */\nexport const warpGrepTool: ChatCompletionTool = {\n type: 'function',\n function: {\n name: WARP_GREP_TOOL_NAME,\n description: WARP_GREP_DESCRIPTION,\n parameters: TOOL_PARAMETERS,\n },\n};\n\n/**\n * Execute warp grep search\n * \n * @param input - Tool input with query\n * @param config - Configuration with repoRoot and optional provider\n * @returns Search results\n */\nexport async function execute(\n input: { query: string } | string,\n config: WarpGrepToolConfig\n): Promise<WarpGrepResult> {\n const parsed = typeof input === 'string' ? JSON.parse(input) : input;\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n \n const result = await runWarpGrep({\n query: parsed.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete' };\n }\n\n const contexts: WarpGrepContext[] = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n\n return { success: true, contexts, summary: finish.payload };\n}\n\n// Re-export formatResult and getSystemPrompt for convenience\nexport { formatResult, getSystemPrompt };\n\n/**\n * Create a custom warp grep tool with configuration and methods\n * \n * @param config - Configuration options\n * @returns Tool definition with execute and formatResult methods\n * \n * @example\n * ```typescript\n * import OpenAI from 'openai';\n * import { createMorphWarpGrepTool } from '@morphllm/morphsdk/tools/warp-grep/openai';\n * \n * const tool = createMorphWarpGrepTool({\n * repoRoot: '.',\n * description: 'Custom tool description'\n * });\n * \n * const client = new OpenAI();\n * const response = await client.chat.completions.create({\n * model: 'gpt-4o',\n * tools: [tool],\n * messages: [{ role: 'user', content: 'Find authentication middleware' }]\n * });\n * \n * // Execute and format\n * const result = await tool.execute(toolCallArgs);\n * const formatted = tool.formatResult(result);\n * ```\n */\nexport function createMorphWarpGrepTool(config: WarpGrepToolConfig) {\n const tool: ChatCompletionTool = {\n type: 'function',\n function: {\n name: config.name ?? WARP_GREP_TOOL_NAME,\n description: config.description ?? WARP_GREP_DESCRIPTION,\n parameters: TOOL_PARAMETERS,\n },\n };\n\n return Object.assign(tool, {\n execute: async (input: unknown): Promise<WarpGrepResult> => {\n return execute(input as { query: string } | string, config);\n },\n formatResult: (result: WarpGrepResult): string => {\n return formatResult(result);\n },\n getSystemPrompt: (): string => {\n return getSystemPrompt();\n },\n });\n}\n\nexport default warpGrepTool;\n"],"mappings":";;;;;;;;;;;;;;;;AAcA,IAAM,kBAAkB;AAAA,EACtB,MAAM;AAAA,EACN,YAAY;AAAA,IACV,OAAO,EAAE,MAAM,UAAU,aAAa,gCAAgC;AAAA,EACxE;AAAA,EACA,UAAU,CAAC,OAAO;AACpB;AAqBO,IAAM,eAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,UAAU;AAAA,IACR,MAAM;AAAA,IACN,aAAa;AAAA,IACb,YAAY;AAAA,EACd;AACF;AASA,eAAsB,QACpB,OACA,QACyB;AACzB,QAAM,SAAS,OAAO,UAAU,WAAW,KAAK,MAAM,KAAK,IAAI;AAC/D,QAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAE7F,QAAM,SAAS,MAAM,YAAY;AAAA,IAC/B,OAAO,OAAO;AAAA,IACd,UAAU,OAAO;AAAA,IACjB;AAAA,IACA,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO,SAAS;AAAA,IACvB,QAAQ,OAAO;AAAA,EACjB,CAAC;AAED,QAAM,SAAS,OAAO;AACtB,MAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,WAAO,EAAE,SAAS,OAAO,OAAO,0BAA0B;AAAA,EAC5D;AAEA,QAAM,YAA+B,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,IACpE,MAAM,EAAE;AAAA,IACR,SAAS,EAAE;AAAA,EACb,EAAE;AAEF,SAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAC5D;AAiCO,SAAS,wBAAwB,QAA4B;AAClE,QAAM,OAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,OAAO,QAAQ;AAAA,MACrB,aAAa,OAAO,eAAe;AAAA,MACnC,YAAY;AAAA,IACd;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,MAAM;AAAA,IACzB,SAAS,OAAO,UAA4C;AAC1D,aAAO,QAAQ,OAAqC,MAAM;AAAA,IAC5D;AAAA,IACA,cAAc,CAAC,WAAmC;AAChD,aAAO,aAAa,MAAM;AAAA,IAC5B;AAAA,IACA,iBAAiB,MAAc;AAC7B,aAAO,gBAAgB;AAAA,IACzB;AAAA,EACF,CAAC;AACH;AAEA,IAAO,iBAAQ;","names":[]}
@@ -1,77 +0,0 @@
1
- import {
2
- WARP_GREP_DESCRIPTION,
3
- WARP_GREP_TOOL_NAME,
4
- formatResult
5
- } from "./chunk-GJ5TYNRD.js";
6
- import {
7
- runWarpGrep
8
- } from "./chunk-6X5UOY7B.js";
9
- import {
10
- getSystemPrompt
11
- } from "./chunk-WETRQJGU.js";
12
- import {
13
- LocalRipgrepProvider
14
- } from "./chunk-ZJIIICRA.js";
15
-
16
- // tools/warp_grep/anthropic.ts
17
- var INPUT_SCHEMA = {
18
- type: "object",
19
- properties: {
20
- query: { type: "string", description: "Free-form repository question" }
21
- },
22
- required: ["query"]
23
- };
24
- var warpGrepTool = {
25
- name: WARP_GREP_TOOL_NAME,
26
- description: WARP_GREP_DESCRIPTION,
27
- input_schema: INPUT_SCHEMA
28
- };
29
- async function execute(input, config) {
30
- const parsed = typeof input === "string" ? JSON.parse(input) : input;
31
- const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
32
- const result = await runWarpGrep({
33
- query: parsed.query,
34
- repoRoot: config.repoRoot,
35
- provider,
36
- excludes: config.excludes,
37
- includes: config.includes,
38
- debug: config.debug ?? false,
39
- apiKey: config.apiKey
40
- });
41
- const finish = result.finish;
42
- if (result.terminationReason !== "completed" || !finish?.metadata) {
43
- return { success: false, error: "Search did not complete" };
44
- }
45
- const contexts = (finish.resolved ?? []).map((r) => ({
46
- file: r.path,
47
- content: r.content
48
- }));
49
- return { success: true, contexts, summary: finish.payload };
50
- }
51
- function createMorphWarpGrepTool(config) {
52
- const tool = {
53
- name: config.name ?? WARP_GREP_TOOL_NAME,
54
- description: config.description ?? WARP_GREP_DESCRIPTION,
55
- input_schema: INPUT_SCHEMA
56
- };
57
- return Object.assign(tool, {
58
- execute: async (input) => {
59
- return execute(input, config);
60
- },
61
- formatResult: (result) => {
62
- return formatResult(result);
63
- },
64
- getSystemPrompt: () => {
65
- return getSystemPrompt();
66
- }
67
- });
68
- }
69
- var anthropic_default = warpGrepTool;
70
-
71
- export {
72
- warpGrepTool,
73
- execute,
74
- createMorphWarpGrepTool,
75
- anthropic_default
76
- };
77
- //# sourceMappingURL=chunk-KBQWGT5L.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/anthropic.ts"],"sourcesContent":["/**\n * Anthropic SDK adapter for morph-warp-grep tool\n */\n\nimport type { Tool } from '@anthropic-ai/sdk/resources/messages';\nimport { runWarpGrep } from './agent/runner.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { WARP_GREP_DESCRIPTION, WARP_GREP_TOOL_NAME, getSystemPrompt } from './prompts.js';\nimport { formatResult } from './core.js';\nimport type { WarpGrepToolConfig, WarpGrepResult, WarpGrepContext } from './types.js';\n\n/**\n * Input schema for the warp grep tool\n */\nconst INPUT_SCHEMA = {\n type: 'object',\n properties: {\n query: { type: 'string', description: 'Free-form repository question' },\n },\n required: ['query'],\n} as const;\n\n/**\n * Anthropic-native warp grep tool definition\n * \n * @example\n * ```typescript\n * import Anthropic from '@anthropic-ai/sdk';\n * import { warpGrepTool, execute } from '@morphllm/morphsdk/tools/warp-grep/anthropic';\n * \n * const client = new Anthropic();\n * const response = await client.messages.create({\n * model: 'claude-sonnet-4-5-20250929',\n * tools: [warpGrepTool],\n * messages: [{ role: 'user', content: 'Find authentication middleware' }]\n * });\n * \n * // Execute the tool call\n * const result = await execute({ query: '...' }, { repoRoot: '.' });\n * ```\n */\nexport const warpGrepTool: Tool = {\n name: WARP_GREP_TOOL_NAME,\n description: WARP_GREP_DESCRIPTION,\n input_schema: INPUT_SCHEMA,\n};\n\n/**\n * Execute warp grep search\n * \n * @param input - Tool input with query\n * @param config - Configuration with repoRoot and optional provider\n * @returns Search results\n */\nexport async function execute(\n input: { query: string } | string,\n config: WarpGrepToolConfig\n): Promise<WarpGrepResult> {\n const parsed = typeof input === 'string' ? JSON.parse(input) : input;\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n \n const result = await runWarpGrep({\n query: parsed.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete' };\n }\n\n const contexts: WarpGrepContext[] = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n\n return { success: true, contexts, summary: finish.payload };\n}\n\n// Re-export formatResult and getSystemPrompt for convenience\nexport { formatResult, getSystemPrompt };\n\n/**\n * Create a custom warp grep tool with configuration and methods\n * \n * @param config - Configuration options\n * @returns Tool definition with execute and formatResult methods\n * \n * @example\n * ```typescript\n * import Anthropic from '@anthropic-ai/sdk';\n * import { createMorphWarpGrepTool } from '@morphllm/morphsdk/tools/warp-grep/anthropic';\n * \n * const tool = createMorphWarpGrepTool({\n * repoRoot: '.',\n * description: 'Custom tool description'\n * });\n * \n * const client = new Anthropic();\n * const response = await client.messages.create({\n * model: 'claude-sonnet-4-5-20250929',\n * tools: [tool],\n * messages: [{ role: 'user', content: 'Find authentication middleware' }]\n * });\n * \n * // Execute and format\n * const result = await tool.execute(toolUseBlock.input);\n * const formatted = tool.formatResult(result);\n * ```\n */\nexport function createMorphWarpGrepTool(config: WarpGrepToolConfig) {\n const tool: Tool = {\n name: config.name ?? WARP_GREP_TOOL_NAME,\n description: config.description ?? WARP_GREP_DESCRIPTION,\n input_schema: INPUT_SCHEMA,\n };\n\n return Object.assign(tool, {\n execute: async (input: unknown): Promise<WarpGrepResult> => {\n return execute(input as { query: string } | string, config);\n },\n formatResult: (result: WarpGrepResult): string => {\n return formatResult(result);\n },\n getSystemPrompt: (): string => {\n return getSystemPrompt();\n },\n });\n}\n\nexport default warpGrepTool;\n"],"mappings":";;;;;;;;;;;;;;;;AAcA,IAAM,eAAe;AAAA,EACnB,MAAM;AAAA,EACN,YAAY;AAAA,IACV,OAAO,EAAE,MAAM,UAAU,aAAa,gCAAgC;AAAA,EACxE;AAAA,EACA,UAAU,CAAC,OAAO;AACpB;AAqBO,IAAM,eAAqB;AAAA,EAChC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,cAAc;AAChB;AASA,eAAsB,QACpB,OACA,QACyB;AACzB,QAAM,SAAS,OAAO,UAAU,WAAW,KAAK,MAAM,KAAK,IAAI;AAC/D,QAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAE7F,QAAM,SAAS,MAAM,YAAY;AAAA,IAC/B,OAAO,OAAO;AAAA,IACd,UAAU,OAAO;AAAA,IACjB;AAAA,IACA,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO,SAAS;AAAA,IACvB,QAAQ,OAAO;AAAA,EACjB,CAAC;AAED,QAAM,SAAS,OAAO;AACtB,MAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,WAAO,EAAE,SAAS,OAAO,OAAO,0BAA0B;AAAA,EAC5D;AAEA,QAAM,YAA+B,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,IACpE,MAAM,EAAE;AAAA,IACR,SAAS,EAAE;AAAA,EACb,EAAE;AAEF,SAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAC5D;AAiCO,SAAS,wBAAwB,QAA4B;AAClE,QAAM,OAAa;AAAA,IACjB,MAAM,OAAO,QAAQ;AAAA,IACrB,aAAa,OAAO,eAAe;AAAA,IACnC,cAAc;AAAA,EAChB;AAEA,SAAO,OAAO,OAAO,MAAM;AAAA,IACzB,SAAS,OAAO,UAA4C;AAC1D,aAAO,QAAQ,OAAqC,MAAM;AAAA,IAC5D;AAAA,IACA,cAAc,CAAC,WAAmC;AAChD,aAAO,aAAa,MAAM;AAAA,IAC5B;AAAA,IACA,iBAAiB,MAAc;AAC7B,aAAO,gBAAgB;AAAA,IACzB;AAAA,EACF,CAAC;AACH;AAEA,IAAO,oBAAQ;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/agent/parser.ts"],"sourcesContent":["// Parses assistant lines into structured tool calls\nimport type { ToolCall } from './types.js';\n\n// Keep for backwards compatibility - no longer thrown, but exported for tests\nexport class LLMResponseParseError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'LLMResponseParseError';\n }\n}\n\n// Valid tool command names\nconst VALID_COMMANDS = ['analyse', 'grep', 'read', 'finish'];\n\n/**\n * Preprocesses text to handle XML tags:\n * 1. Removes <think>...</think> blocks entirely\n * 2. Extracts content from <tool>...</tool> or <tool_call>...</tool_call> tags\n * 3. Passes through raw tool calls (lines starting with valid commands)\n * 4. Discards unclosed <tool...> tags\n */\nfunction preprocessText(text: string): string[] {\n // Step 1: Remove <think>...</think> blocks (including multiline)\n let processed = text.replace(/<think>[\\s\\S]*?<\\/think>/gi, '');\n \n // Step 2: Check for unclosed <tool or <tool_call tags and discard them\n // Find all opening tags and their positions\n const openingTagRegex = /<tool_call>|<tool>/gi;\n const closingTagRegex = /<\\/tool_call>|<\\/tool>/gi;\n \n // Count opening and closing tags\n const openingMatches = processed.match(openingTagRegex) || [];\n const closingMatches = processed.match(closingTagRegex) || [];\n \n // If there are more opening than closing tags, we have unclosed tags\n // In that case, only process complete tag pairs\n if (openingMatches.length > closingMatches.length) {\n // Remove any content after the last complete closing tag\n const lastClosingMatch = /<\\/tool_call>|<\\/tool>/gi;\n let lastClosingIndex = -1;\n let match;\n while ((match = lastClosingMatch.exec(processed)) !== null) {\n lastClosingIndex = match.index + match[0].length;\n }\n if (lastClosingIndex > 0) {\n processed = processed.slice(0, lastClosingIndex);\n }\n }\n \n // Step 3: Extract content from <tool_call>...</tool_call> and <tool>...</tool> tags\n const toolCallLines: string[] = [];\n const toolTagRegex = /<tool_call>([\\s\\S]*?)<\\/tool_call>|<tool>([\\s\\S]*?)<\\/tool>/gi;\n let tagMatch;\n \n while ((tagMatch = toolTagRegex.exec(processed)) !== null) {\n const content = (tagMatch[1] || tagMatch[2] || '').trim();\n if (content) {\n // Split content by newlines in case there are multiple tool calls in one tag\n const lines = content.split(/\\r?\\n/).map(l => l.trim()).filter(l => l);\n toolCallLines.push(...lines);\n }\n }\n \n // Step 4: Also extract raw tool calls (lines starting with valid commands)\n // This provides backwards compatibility\n const allLines = processed.split(/\\r?\\n/).map(l => l.trim());\n for (const line of allLines) {\n if (!line) continue;\n \n // Skip lines that are inside XML tags (already processed above)\n if (line.startsWith('<')) continue;\n \n // Check if line starts with a valid command\n const firstWord = line.split(/\\s/)[0];\n if (VALID_COMMANDS.includes(firstWord)) {\n // Avoid duplicates\n if (!toolCallLines.includes(line)) {\n toolCallLines.push(line);\n }\n }\n }\n \n return toolCallLines;\n}\n\nexport class LLMResponseParser {\n private readonly finishSpecSplitRe = /,(?=[^,\\s]+:)/;\n\n parse(text: string): ToolCall[] {\n if (typeof text !== 'string') {\n // no way we hit this, but sure, we can throw here\n throw new TypeError('Command text must be a string.');\n }\n \n // Preprocess to handle XML tags\n const lines = preprocessText(text);\n \n const commands: ToolCall[] = [];\n let finishAccumulator: Map<string, number[][]> | null = null;\n\n lines.forEach((line) => {\n if (!line || line.startsWith('#')) return;\n const parts = this.splitLine(line);\n if (parts.length === 0) return;\n const cmd = parts[0];\n switch (cmd) {\n case 'analyse':\n this.handleAnalyse(parts, line, commands);\n break;\n case 'grep':\n this.handleGrep(parts, line, commands);\n break;\n case 'read':\n this.handleRead(parts, line, commands);\n break;\n case 'finish':\n finishAccumulator = this.handleFinish(parts, line, commands, finishAccumulator);\n break;\n default:\n // Silently ignore unknown commands after preprocessing\n // (they might be remnants of XML or other content)\n break;\n }\n });\n\n if (finishAccumulator) {\n const map = finishAccumulator as Map<string, number[][]>;\n const entries = [...map.entries()];\n const filesPayload = entries.map(([path, ranges]) => ({\n path,\n lines: [...ranges].sort((a, b) => a[0] - b[0]) as Array<[number, number]>,\n }));\n commands.push({ name: 'finish', arguments: { files: filesPayload } });\n }\n return commands;\n }\n\n private splitLine(line: string): string[] {\n // Split by whitespace but keep quoted blocks as one\n const parts: string[] = [];\n let current = '';\n let inSingle = false;\n for (let i = 0; i < line.length; i++) {\n const ch = line[i];\n if (ch === \"'\" && line[i - 1] !== '\\\\') {\n inSingle = !inSingle;\n current += ch;\n } else if (!inSingle && /\\s/.test(ch)) {\n if (current) {\n parts.push(current);\n current = '';\n }\n } else {\n current += ch;\n }\n }\n if (current) parts.push(current);\n return parts;\n }\n\n /** Helper to create a _skip tool call with an error message */\n private skip(message: string): ToolCall {\n return { name: '_skip', arguments: { message } };\n }\n\n private handleAnalyse(parts: string[], rawLine: string, commands: ToolCall[]) {\n // analyse <path> [pattern]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: analyse <path> [pattern]. Example: analyse src/`\n ));\n return;\n }\n const path = parts[1];\n const pattern = parts[2]?.replace(/^\"|\"$/g, '') ?? null;\n commands.push({ name: 'analyse', arguments: { path, pattern } });\n }\n\n // no glob tool in MCP\n\n private handleGrep(parts: string[], rawLine: string, commands: ToolCall[]) {\n // grep '<pattern>' <path>\n if (parts.length < 3) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing arguments. ` +\n `Correct format: grep '<pattern>' <path>. Example: grep 'TODO' src/`\n ));\n return;\n }\n let pat = parts[1];\n // Be lenient: accept unquoted patterns by treating the first arg as the pattern\n if (pat.startsWith(\"'\") && pat.endsWith(\"'\")) {\n pat = pat.slice(1, -1);\n }\n // If pattern is empty after processing, skip\n if (!pat) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" has an empty pattern. ` +\n `Provide a non-empty search pattern. Example: grep 'function' src/`\n ));\n return;\n }\n commands.push({ name: 'grep', arguments: { pattern: pat, path: parts[2] } });\n }\n\n private handleRead(parts: string[], rawLine: string, commands: ToolCall[]) {\n // read <path>[:start-end]\n if (parts.length < 2) {\n commands.push(this.skip(\n `[SKIPPED] Your command \"${rawLine}\" is missing a path. ` +\n `Correct format: read <path> or read <path>:<start>-<end>. Example: read src/index.ts:1-50`\n ));\n return;\n }\n const spec = parts[1];\n const rangeIdx = spec.indexOf(':');\n if (rangeIdx === -1) {\n commands.push({ name: 'read', arguments: { path: spec } });\n return;\n }\n const filePath = spec.slice(0, rangeIdx);\n const range = spec.slice(rangeIdx + 1);\n const [s, e] = range.split('-').map(v => parseInt(v, 10));\n // If range is invalid, fallback to reading the whole file\n if (!Number.isFinite(s) || !Number.isFinite(e)) {\n commands.push({ name: 'read', arguments: { path: filePath } });\n return;\n }\n commands.push({ name: 'read', arguments: { path: filePath, start: s, end: e } });\n }\n\n private handleFinish(parts: string[], rawLine: string, commands: ToolCall[], acc: Map<string, number[][]> | null) {\n // finish file1:1-10,20-30 file2:5-7\n const map = acc ?? new Map<string, number[][]>();\n const args = parts.slice(1);\n for (const token of args) {\n const [filePath, rangesText] = token.split(':', 2);\n if (!filePath || !rangesText) {\n // Skip this malformed token, continue processing others\n commands.push(this.skip(\n `[SKIPPED] Invalid finish token \"${token}\". ` +\n `Correct format: finish <path>:<start>-<end>. Example: finish src/index.ts:1-50`\n ));\n continue;\n }\n const rangeSpecs = rangesText.split(',').filter(Boolean);\n for (const spec of rangeSpecs) {\n const [s, e] = spec.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {\n // Skip this invalid range, continue with others\n commands.push(this.skip(\n `[SKIPPED] Invalid range \"${spec}\" in \"${token}\". ` +\n `Ranges must be <start>-<end> where start <= end. Example: 1-50`\n ));\n continue;\n }\n const arr = map.get(filePath) ?? [];\n arr.push([s, e]);\n map.set(filePath, arr);\n }\n }\n return map;\n }\n}\n"],"mappings":";AAIO,IAAM,wBAAN,cAAoC,MAAM;AAAA,EAC/C,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAGA,IAAM,iBAAiB,CAAC,WAAW,QAAQ,QAAQ,QAAQ;AAS3D,SAAS,eAAe,MAAwB;AAE9C,MAAI,YAAY,KAAK,QAAQ,8BAA8B,EAAE;AAI7D,QAAM,kBAAkB;AACxB,QAAM,kBAAkB;AAGxB,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAC5D,QAAM,iBAAiB,UAAU,MAAM,eAAe,KAAK,CAAC;AAI5D,MAAI,eAAe,SAAS,eAAe,QAAQ;AAEjD,UAAM,mBAAmB;AACzB,QAAI,mBAAmB;AACvB,QAAI;AACJ,YAAQ,QAAQ,iBAAiB,KAAK,SAAS,OAAO,MAAM;AAC1D,yBAAmB,MAAM,QAAQ,MAAM,CAAC,EAAE;AAAA,IAC5C;AACA,QAAI,mBAAmB,GAAG;AACxB,kBAAY,UAAU,MAAM,GAAG,gBAAgB;AAAA,IACjD;AAAA,EACF;AAGA,QAAM,gBAA0B,CAAC;AACjC,QAAM,eAAe;AACrB,MAAI;AAEJ,UAAQ,WAAW,aAAa,KAAK,SAAS,OAAO,MAAM;AACzD,UAAM,WAAW,SAAS,CAAC,KAAK,SAAS,CAAC,KAAK,IAAI,KAAK;AACxD,QAAI,SAAS;AAEX,YAAM,QAAQ,QAAQ,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAK,CAAC;AACrE,oBAAc,KAAK,GAAG,KAAK;AAAA,IAC7B;AAAA,EACF;AAIA,QAAM,WAAW,UAAU,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAC3D,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAM;AAGX,QAAI,KAAK,WAAW,GAAG,EAAG;AAG1B,UAAM,YAAY,KAAK,MAAM,IAAI,EAAE,CAAC;AACpC,QAAI,eAAe,SAAS,SAAS,GAAG;AAEtC,UAAI,CAAC,cAAc,SAAS,IAAI,GAAG;AACjC,sBAAc,KAAK,IAAI;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEO,IAAM,oBAAN,MAAwB;AAAA,EACZ,oBAAoB;AAAA,EAErC,MAAM,MAA0B;AAC9B,QAAI,OAAO,SAAS,UAAU;AAE5B,YAAM,IAAI,UAAU,gCAAgC;AAAA,IACtD;AAGA,UAAM,QAAQ,eAAe,IAAI;AAEjC,UAAM,WAAuB,CAAC;AAC9B,QAAI,oBAAoD;AAExD,UAAM,QAAQ,CAAC,SAAS;AACtB,UAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AACnC,YAAM,QAAQ,KAAK,UAAU,IAAI;AACjC,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,MAAM,MAAM,CAAC;AACnB,cAAQ,KAAK;AAAA,QACX,KAAK;AACH,eAAK,cAAc,OAAO,MAAM,QAAQ;AACxC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,MAAM,QAAQ;AACrC;AAAA,QACF,KAAK;AACH,8BAAoB,KAAK,aAAa,OAAO,MAAM,UAAU,iBAAiB;AAC9E;AAAA,QACF;AAGE;AAAA,MACJ;AAAA,IACF,CAAC;AAED,QAAI,mBAAmB;AACrB,YAAM,MAAM;AACZ,YAAM,UAAU,CAAC,GAAG,IAAI,QAAQ,CAAC;AACjC,YAAM,eAAe,QAAQ,IAAI,CAAC,CAAC,MAAM,MAAM,OAAO;AAAA,QACpD;AAAA,QACA,OAAO,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAAA,MAC/C,EAAE;AACF,eAAS,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,OAAO,aAAa,EAAE,CAAC;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAwB;AAExC,UAAM,QAAkB,CAAC;AACzB,QAAI,UAAU;AACd,QAAI,WAAW;AACf,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,KAAK,KAAK,CAAC;AACjB,UAAI,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,MAAM;AACtC,mBAAW,CAAC;AACZ,mBAAW;AAAA,MACb,WAAW,CAAC,YAAY,KAAK,KAAK,EAAE,GAAG;AACrC,YAAI,SAAS;AACX,gBAAM,KAAK,OAAO;AAClB,oBAAU;AAAA,QACZ;AAAA,MACF,OAAO;AACL,mBAAW;AAAA,MACb;AAAA,IACF;AACA,QAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,KAAK,SAA2B;AACtC,WAAO,EAAE,MAAM,SAAS,WAAW,EAAE,QAAQ,EAAE;AAAA,EACjD;AAAA,EAEQ,cAAc,OAAiB,SAAiB,UAAsB;AAE5E,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,MAAM,CAAC,GAAG,QAAQ,UAAU,EAAE,KAAK;AACnD,aAAS,KAAK,EAAE,MAAM,WAAW,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,EACjE;AAAA;AAAA,EAIQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,QAAI,MAAM,MAAM,CAAC;AAEjB,QAAI,IAAI,WAAW,GAAG,KAAK,IAAI,SAAS,GAAG,GAAG;AAC5C,YAAM,IAAI,MAAM,GAAG,EAAE;AAAA,IACvB;AAEA,QAAI,CAAC,KAAK;AACR,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,SAAS,KAAK,MAAM,MAAM,CAAC,EAAE,EAAE,CAAC;AAAA,EAC7E;AAAA,EAEQ,WAAW,OAAiB,SAAiB,UAAsB;AAEzE,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK;AAAA,QACjB,2BAA2B,OAAO;AAAA,MAEpC,CAAC;AACD;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,QAAI,aAAa,IAAI;AACnB,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,KAAK,EAAE,CAAC;AACzD;AAAA,IACF;AACA,UAAM,WAAW,KAAK,MAAM,GAAG,QAAQ;AACvC,UAAM,QAAQ,KAAK,MAAM,WAAW,CAAC;AACrC,UAAM,CAAC,GAAG,CAAC,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AAExD,QAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,GAAG;AAC9C,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,SAAS,EAAE,CAAC;AAC7D;AAAA,IACF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,UAAU,OAAO,GAAG,KAAK,EAAE,EAAE,CAAC;AAAA,EACjF;AAAA,EAEQ,aAAa,OAAiB,SAAiB,UAAsB,KAAqC;AAEhH,UAAM,MAAM,OAAO,oBAAI,IAAwB;AAC/C,UAAM,OAAO,MAAM,MAAM,CAAC;AAC1B,eAAW,SAAS,MAAM;AACxB,YAAM,CAAC,UAAU,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AACjD,UAAI,CAAC,YAAY,CAAC,YAAY;AAE5B,iBAAS,KAAK,KAAK;AAAA,UACjB,mCAAmC,KAAK;AAAA,QAE1C,CAAC;AACD;AAAA,MACF;AACA,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,OAAO,OAAO;AACvD,iBAAW,QAAQ,YAAY;AAC7B,cAAM,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACvD,YAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,KAAK,IAAI,GAAG;AAEvD,mBAAS,KAAK,KAAK;AAAA,YACjB,4BAA4B,IAAI,SAAS,KAAK;AAAA,UAEhD,CAAC;AACD;AAAA,QACF;AACA,cAAM,MAAM,IAAI,IAAI,QAAQ,KAAK,CAAC;AAClC,YAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACf,YAAI,IAAI,UAAU,GAAG;AAAA,MACvB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../git/client.ts"],"sourcesContent":["/**\n * Morph Git Client - Simple, high-level Git operations\n * Built on isomorphic-git with explicit configuration\n */\n\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\nimport fs from 'fs';\nimport type {\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n MorphGitConfig,\n MorphNotesSchema,\n WaitForEmbeddingsOptions,\n EmbeddingProgress,\n} from './types.js';\n\nconst DEFAULT_PROXY_URL = 'https://repos.morphllm.com';\n\n/**\n * MorphGit - Git operations for AI agents with Morph backend\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!,\n * proxyUrl: 'https://repos.morphllm.com' // Optional\n * });\n * \n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.push({ dir: './my-project' });\n * ```\n */\nexport class MorphGit {\n private readonly apiKey: string;\n private readonly proxyUrl: string;\n\n constructor(config: MorphGitConfig) {\n // Validate API key\n if (!config.apiKey) {\n throw new Error('API key is required. Get one at https://morphllm.com/dashboard');\n }\n \n if (!config.apiKey.startsWith('sk-') && !config.apiKey.startsWith('morph-')) {\n throw new Error('Invalid API key format. Expected: sk-... or morph-...');\n }\n \n this.apiKey = config.apiKey;\n this.proxyUrl = config.proxyUrl || DEFAULT_PROXY_URL;\n }\n \n /**\n * Get auth callback for isomorphic-git operations\n * @private\n */\n private getAuthCallback() {\n return () => ({\n username: 'morph',\n password: this.apiKey,\n });\n }\n\n /**\n * Initialize a new repository\n * Creates the repo in the database and in the git provider\n * \n * @example\n * ```ts\n * await morphGit.init({\n * repoId: 'my-project',\n * dir: './my-project',\n * defaultBranch: 'main'\n * });\n * ```\n */\n async init(options: {\n repoId: string;\n dir: string;\n defaultBranch?: string;\n }): Promise<void> {\n const { repoId, dir, defaultBranch = 'main' } = options;\n\n // Call backend API to create repository\n const response = await fetch(`${this.proxyUrl}/v1/repos`, {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n repoId,\n name: repoId,\n defaultBranch,\n }),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`Failed to create repository: ${error}`);\n }\n\n // Initialize local git repository (industry standard: no clone needed)\n await git.init({\n fs,\n dir,\n defaultBranch,\n });\n\n // Add remote pointing to Morph git-proxy\n await git.addRemote({\n fs,\n dir,\n remote: 'origin',\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n });\n\n console.log(`✓ Repository '${repoId}' initialized`);\n }\n\n /**\n * Clone a repository from Morph repos\n * \n * @example\n * ```ts\n * await morphGit.clone({\n * repoId: 'my-project',\n * dir: './my-project'\n * });\n * ```\n */\n async clone(options: CloneOptions): Promise<void> {\n const { repoId, dir, branch = 'main', depth, singleBranch = true } = options;\n\n await git.clone({\n fs,\n http,\n dir,\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n ref: branch,\n singleBranch,\n depth,\n onAuth: this.getAuthCallback(),\n });\n }\n\n /**\n * Push changes to remote repository\n * \n * @example\n * ```ts\n * await morphGit.push({ \n * dir: './my-project',\n * branch: 'main', // Required: explicit branch name\n * index: true // Optional: generate embeddings (default: true)\n * });\n * ```\n */\n async push(options: PushOptions): Promise<void> {\n const { dir, remote = 'origin', branch, waitForEmbeddings, index = true } = options;\n\n if (!branch) {\n throw new Error(\n 'branch is required for push operations. ' +\n 'Specify the branch explicitly: { dir: \"./my-project\", branch: \"main\" }'\n );\n }\n\n // Get commit hash and repoId before pushing\n const commitHash = await git.resolveRef({ fs, dir, ref: 'HEAD' });\n \n // Get repoId from git remote URL\n let repoId: string | undefined;\n const remotes = await git.listRemotes({ fs, dir });\n const originRemote = remotes.find(r => r.remote === remote);\n if (originRemote) {\n // Extract repoId from URL: https://repos.morphllm.com/v1/repos/{repoId}\n const match = originRemote.url.match(/\\/repos\\/([^\\/]+)$/);\n if (match) {\n repoId = match[1];\n }\n }\n\n await git.push({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n });\n \n // Configure commit after successful push (set index flag)\n if (repoId && commitHash) {\n await this.configureCommit({ repoId, commitHash, branch, index });\n }\n \n // Wait for embeddings if requested (and indexing is enabled)\n if (waitForEmbeddings && repoId && commitHash && index) {\n await this.waitForEmbeddings({ repoId, commitHash });\n }\n }\n\n /**\n * Configure commit settings on the backend after push.\n * Sets the index flag to control embedding generation.\n * @private\n */\n private async configureCommit(options: {\n repoId: string;\n commitHash: string;\n branch: string;\n index: boolean;\n }): Promise<void> {\n const { repoId, commitHash, branch, index } = options;\n \n const response = await fetch(\n `${this.proxyUrl}/v1/repos/${repoId}/commits/${commitHash}/config`,\n {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({ index, branch }),\n }\n );\n\n if (!response.ok) {\n // Non-fatal: log warning but don't throw\n console.warn(`Failed to configure commit: ${response.status}`);\n }\n }\n\n /**\n * Pull changes from remote repository\n * \n * @example\n * ```ts\n * await morphGit.pull({ \n * dir: './my-project',\n * branch: 'main' // Required: explicit branch name\n * });\n * ```\n */\n async pull(options: PullOptions): Promise<void> {\n const { dir, remote = 'origin', branch } = options;\n\n if (!branch) {\n throw new Error(\n 'branch is required for pull operations. ' +\n 'Specify the branch explicitly: { dir: \"./my-project\", branch: \"main\" }'\n );\n }\n\n await git.pull({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n author: {\n name: 'Morph Agent',\n email: 'agent@morph.com',\n },\n });\n }\n\n /**\n * Wait for embeddings to complete after push.\n * Polls status endpoint until embeddings are done.\n * \n * @example\n * ```ts\n * await morphGit.push({ dir: './my-project', branch: 'main' });\n * await morphGit.waitForEmbeddings({\n * repoId: 'my-project',\n * onProgress: (p) => console.log(`${p.filesProcessed}/${p.totalFiles}`)\n * });\n * ```\n */\n async waitForEmbeddings(options: WaitForEmbeddingsOptions): Promise<void> {\n const { repoId, commitHash, timeout = 120000, onProgress } = options;\n const startTime = Date.now();\n const pollInterval = 1000; // Poll every 1s\n \n while (Date.now() - startTime < timeout) {\n const statusUrl = `${this.proxyUrl}/v1/repos/${repoId}/embedding-status` +\n (commitHash ? `?commit_hash=${commitHash}` : '');\n \n const response = await fetch(statusUrl, {\n headers: { 'Authorization': `Bearer ${this.apiKey}` }\n });\n \n if (response.status === 404) {\n // No job found yet - might still be creating\n await new Promise(resolve => setTimeout(resolve, pollInterval));\n continue;\n }\n \n if (!response.ok) {\n throw new Error(`Failed to get embedding status: ${response.status}`);\n }\n \n const status = await response.json();\n \n if (onProgress && status.progress) {\n onProgress(status.progress);\n }\n \n if (status.status === 'completed') {\n return; // Done!\n }\n \n if (status.status === 'failed') {\n throw new Error(`Embeddings failed: ${status.error || 'Unknown error'}`);\n }\n \n // Still processing (queued or processing), wait and poll again\n await new Promise(resolve => setTimeout(resolve, pollInterval));\n }\n \n throw new Error(`Embeddings timed out after ${timeout}ms`);\n }\n\n /**\n * Stage a file for commit\n * \n * @example\n * ```ts\n * await morphGit.add({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * ```\n */\n async add(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.add({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Remove a file from staging\n * \n * @example\n * ```ts\n * await morphGit.remove({\n * dir: './my-project',\n * filepath: 'src/old-file.ts'\n * });\n * ```\n */\n async remove(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.remove({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Commit staged changes\n * \n * @example\n * ```ts\n * await morphGit.commit({\n * dir: './my-project',\n * message: 'Add new feature',\n * author: {\n * name: 'AI Agent',\n * email: 'ai@example.com'\n * },\n * metadata: { issueId: 'PROJ-123', source: 'agent' },\n * chatHistory: [\n * { role: 'user', content: 'Please add a new feature' },\n * { role: 'assistant', content: 'I will add that feature' }\n * ],\n * recordingId: 'rec_123'\n * });\n * ```\n */\n async commit(options: CommitOptions): Promise<string> {\n const { dir, message, author, metadata, chatHistory, recordingId } = options;\n\n // Provide default author if not specified\n const commitAuthor = author || {\n name: 'Morph SDK',\n email: 'sdk@morphllm.com'\n };\n\n const sha = await git.commit({\n fs,\n dir,\n message,\n author: commitAuthor,\n });\n\n // Store notes if any note fields are provided\n if (metadata || chatHistory || recordingId) {\n const notes: MorphNotesSchema = {\n metadata,\n chatHistory,\n recordingId,\n _version: 1\n };\n \n await git.addNote({\n fs,\n dir,\n ref: 'refs/notes/morph-metadata',\n oid: sha,\n note: JSON.stringify(notes, null, 2),\n author: commitAuthor\n });\n }\n\n return sha;\n }\n\n /**\n * Get status of a file\n * \n * @example\n * ```ts\n * const status = await morphGit.status({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * console.log(status); // 'modified', '*added', etc.\n * ```\n */\n async status(options: StatusOptions): Promise<string> {\n const { dir, filepath } = options;\n\n if (!filepath) {\n throw new Error('filepath is required for status check');\n }\n\n const status = await git.status({\n fs,\n dir,\n filepath,\n });\n\n return status;\n }\n\n /**\n * Get commit history\n * \n * @example\n * ```ts\n * const commits = await morphGit.log({\n * dir: './my-project',\n * depth: 10\n * });\n * ```\n */\n async log(options: LogOptions): Promise<CommitObject[]> {\n const { dir, depth, ref } = options;\n\n const commits = await git.log({\n fs,\n dir,\n depth,\n ref,\n });\n\n return commits as CommitObject[];\n }\n\n /**\n * Checkout a branch or commit\n * \n * @example\n * ```ts\n * await morphGit.checkout({\n * dir: './my-project',\n * ref: 'feature-branch'\n * });\n * ```\n */\n async checkout(options: CheckoutOptions): Promise<void> {\n const { dir, ref } = options;\n\n await git.checkout({\n fs,\n dir,\n ref,\n });\n }\n\n /**\n * Create a new branch\n * \n * @example\n * ```ts\n * await morphGit.branch({\n * dir: './my-project',\n * name: 'feature-branch',\n * checkout: true\n * });\n * ```\n */\n async branch(options: BranchOptions): Promise<void> {\n const { dir, name, checkout = false } = options;\n\n await git.branch({\n fs,\n dir,\n ref: name,\n checkout,\n });\n }\n\n /**\n * List all branches\n * \n * @example\n * ```ts\n * const branches = await morphGit.listBranches({\n * dir: './my-project'\n * });\n * ```\n */\n async listBranches(options: { dir: string }): Promise<string[]> {\n const { dir } = options;\n\n const branches = await git.listBranches({\n fs,\n dir,\n });\n\n return branches;\n }\n\n /**\n * Get the current branch name\n * \n * @example\n * ```ts\n * const branch = await morphGit.currentBranch({\n * dir: './my-project'\n * });\n * ```\n */\n async currentBranch(options: { dir: string }): Promise<string | undefined> {\n const { dir } = options;\n\n const branch = await git.currentBranch({\n fs,\n dir,\n });\n\n return branch || undefined;\n }\n\n /**\n * Get list of changed files (similar to git diff --name-only)\n * \n * @example\n * ```ts\n * const changes = await morphGit.statusMatrix({\n * dir: './my-project'\n * });\n * ```\n */\n async statusMatrix(options: { dir: string }): Promise<StatusResult[]> {\n const { dir } = options;\n\n const matrix = await git.statusMatrix({\n fs,\n dir,\n });\n\n return matrix.map(([filepath, HEADStatus, workdirStatus, stageStatus]) => {\n let status: StatusResult['status'] = 'unmodified';\n\n // Determine status based on statusMatrix values\n if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 2) {\n status = 'modified';\n } else if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 1) {\n status = '*modified';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 2) {\n status = 'added';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 0) {\n status = '*added';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 0) {\n status = 'deleted';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 1) {\n status = '*deleted';\n } else if (HEADStatus === 1 && workdirStatus === 1 && stageStatus === 1) {\n status = 'unmodified';\n } else if (HEADStatus === 0 && workdirStatus === 0 && stageStatus === 0) {\n status = 'absent';\n }\n\n return {\n filepath,\n status,\n };\n });\n }\n\n /**\n * Get the current commit hash\n * \n * @example\n * ```ts\n * const hash = await morphGit.resolveRef({\n * dir: './my-project',\n * ref: 'HEAD'\n * });\n * ```\n */\n async resolveRef(options: { dir: string; ref: string }): Promise<string> {\n const { dir, ref } = options;\n\n const oid = await git.resolveRef({\n fs,\n dir,\n ref,\n });\n\n return oid;\n }\n\n /**\n * Get notes (metadata, chat history, recording ID) attached to a commit\n * \n * @example\n * ```ts\n * const notes = await morphGit.getCommitMetadata({\n * dir: './my-project',\n * commitSha: 'abc123...'\n * });\n * \n * if (notes) {\n * console.log('Metadata:', notes.metadata);\n * console.log('Chat history:', notes.chatHistory);\n * console.log('Recording ID:', notes.recordingId);\n * }\n * ```\n */\n async getCommitMetadata(options: {\n dir: string;\n commitSha: string;\n }): Promise<MorphNotesSchema | null> {\n try {\n const note = await git.readNote({\n fs,\n dir: options.dir,\n ref: 'refs/notes/morph-metadata',\n oid: options.commitSha\n });\n \n const notes: MorphNotesSchema = JSON.parse(new TextDecoder().decode(note));\n return notes;\n } catch (err) {\n // No notes found for this commit\n return null;\n }\n }\n}\n\n"],"mappings":";AAKA,OAAO,SAAS;AAChB,OAAO,UAAU;AACjB,OAAO,QAAQ;AAoBf,IAAM,oBAAoB;AAkBnB,IAAM,WAAN,MAAe;AAAA,EACH;AAAA,EACA;AAAA,EAEjB,YAAY,QAAwB;AAElC,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,IAAI,MAAM,gEAAgE;AAAA,IAClF;AAEA,QAAI,CAAC,OAAO,OAAO,WAAW,KAAK,KAAK,CAAC,OAAO,OAAO,WAAW,QAAQ,GAAG;AAC3E,YAAM,IAAI,MAAM,uDAAuD;AAAA,IACzE;AAEA,SAAK,SAAS,OAAO;AACrB,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB;AACxB,WAAO,OAAO;AAAA,MACZ,UAAU;AAAA,MACV,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,KAAK,SAIO;AAChB,UAAM,EAAE,QAAQ,KAAK,gBAAgB,OAAO,IAAI;AAGhD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,QAAQ,aAAa;AAAA,MACxD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,iBAAiB,UAAU,KAAK,MAAM;AAAA,QACtC,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,IACzD;AAGA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,IAAI,UAAU;AAAA,MAClB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,IAC1C,CAAC;AAED,YAAQ,IAAI,sBAAiB,MAAM,eAAe;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,MAAM,SAAsC;AAChD,UAAM,EAAE,QAAQ,KAAK,SAAS,QAAQ,OAAO,eAAe,KAAK,IAAI;AAErE,UAAM,IAAI,MAAM;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,QAAQ,mBAAmB,QAAQ,KAAK,IAAI;AAE5E,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,IAAI,WAAW,EAAE,IAAI,KAAK,KAAK,OAAO,CAAC;AAGhE,QAAI;AACJ,UAAM,UAAU,MAAM,IAAI,YAAY,EAAE,IAAI,IAAI,CAAC;AACjD,UAAM,eAAe,QAAQ,KAAK,OAAK,EAAE,WAAW,MAAM;AAC1D,QAAI,cAAc;AAEhB,YAAM,QAAQ,aAAa,IAAI,MAAM,oBAAoB;AACzD,UAAI,OAAO;AACT,iBAAS,MAAM,CAAC;AAAA,MAClB;AAAA,IACF;AAEA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAGD,QAAI,UAAU,YAAY;AACxB,YAAM,KAAK,gBAAgB,EAAE,QAAQ,YAAY,QAAQ,MAAM,CAAC;AAAA,IAClE;AAGA,QAAI,qBAAqB,UAAU,cAAc,OAAO;AACtD,YAAM,KAAK,kBAAkB,EAAE,QAAQ,WAAW,CAAC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,gBAAgB,SAKZ;AAChB,UAAM,EAAE,QAAQ,YAAY,QAAQ,MAAM,IAAI;AAE9C,UAAM,WAAW,MAAM;AAAA,MACrB,GAAG,KAAK,QAAQ,aAAa,MAAM,YAAY,UAAU;AAAA,MACzD;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,iBAAiB,UAAU,KAAK,MAAM;AAAA,UACtC,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU,EAAE,OAAO,OAAO,CAAC;AAAA,MACxC;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,IAAI;AAEhB,cAAQ,KAAK,+BAA+B,SAAS,MAAM,EAAE;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,OAAO,IAAI;AAE3C,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,kBAAkB,SAAkD;AACxE,UAAM,EAAE,QAAQ,YAAY,UAAU,MAAQ,WAAW,IAAI;AAC7D,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,eAAe;AAErB,WAAO,KAAK,IAAI,IAAI,YAAY,SAAS;AACvC,YAAM,YAAY,GAAG,KAAK,QAAQ,aAAa,MAAM,uBAClD,aAAa,gBAAgB,UAAU,KAAK;AAE/C,YAAM,WAAW,MAAM,MAAM,WAAW;AAAA,QACtC,SAAS,EAAE,iBAAiB,UAAU,KAAK,MAAM,GAAG;AAAA,MACtD,CAAC;AAED,UAAI,SAAS,WAAW,KAAK;AAE3B,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,YAAY,CAAC;AAC9D;AAAA,MACF;AAEA,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,mCAAmC,SAAS,MAAM,EAAE;AAAA,MACtE;AAEA,YAAM,SAAS,MAAM,SAAS,KAAK;AAEnC,UAAI,cAAc,OAAO,UAAU;AACjC,mBAAW,OAAO,QAAQ;AAAA,MAC5B;AAEA,UAAI,OAAO,WAAW,aAAa;AACjC;AAAA,MACF;AAEA,UAAI,OAAO,WAAW,UAAU;AAC9B,cAAM,IAAI,MAAM,sBAAsB,OAAO,SAAS,eAAe,EAAE;AAAA,MACzE;AAGA,YAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,YAAY,CAAC;AAAA,IAChE;AAEA,UAAM,IAAI,MAAM,8BAA8B,OAAO,IAAI;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAAoC;AAC5C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,IAAI;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,OAAO,SAAoC;AAC/C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,QAAQ,UAAU,aAAa,YAAY,IAAI;AAGrE,UAAM,eAAe,UAAU;AAAA,MAC7B,MAAM;AAAA,MACN,OAAO;AAAA,IACT;AAEA,UAAM,MAAM,MAAM,IAAI,OAAO;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,QAAI,YAAY,eAAe,aAAa;AAC1C,YAAM,QAA0B;AAAA,QAC9B;AAAA,QACA;AAAA,QACA;AAAA,QACA,UAAU;AAAA,MACZ;AAEA,YAAM,IAAI,QAAQ;AAAA,QAChB;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL,KAAK;AAAA,QACL,MAAM,KAAK,UAAU,OAAO,MAAM,CAAC;AAAA,QACnC,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,UAAM,SAAS,MAAM,IAAI,OAAO;AAAA,MAC9B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAA8C;AACtD,UAAM,EAAE,KAAK,OAAO,IAAI,IAAI;AAE5B,UAAM,UAAU,MAAM,IAAI,IAAI;AAAA,MAC5B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,SAAS,SAAyC;AACtD,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,IAAI,SAAS;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAuC;AAClD,UAAM,EAAE,KAAK,MAAM,WAAW,MAAM,IAAI;AAExC,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAA6C;AAC9D,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,WAAW,MAAM,IAAI,aAAa;AAAA,MACtC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,cAAc,SAAuD;AACzE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,cAAc;AAAA,MACrC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,UAAU;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAAmD;AACpE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,aAAa;AAAA,MACpC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,OAAO,IAAI,CAAC,CAAC,UAAU,YAAY,eAAe,WAAW,MAAM;AACxE,UAAI,SAAiC;AAGrC,UAAI,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AAChE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,WAAW,SAAwD;AACvE,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,MAAM,MAAM,IAAI,WAAW;AAAA,MAC/B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MAAM,kBAAkB,SAGa;AACnC,QAAI;AACF,YAAM,OAAO,MAAM,IAAI,SAAS;AAAA,QAC9B;AAAA,QACA,KAAK,QAAQ;AAAA,QACb,KAAK;AAAA,QACL,KAAK,QAAQ;AAAA,MACf,CAAC;AAED,YAAM,QAA0B,KAAK,MAAM,IAAI,YAAY,EAAE,OAAO,IAAI,CAAC;AACzE,aAAO;AAAA,IACT,SAAS,KAAK;AAEZ,aAAO;AAAA,IACT;AAAA,EACF;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../factories/openai.ts","../factories/anthropic.ts","../factories/vercel.ts","../client.ts"],"sourcesContent":["/**\n * OpenAI Tool Factory for MorphClient\n * \n * Creates OpenAI-compatible tools that inherit the API key from MorphClient.\n */\n\nimport type { MorphClientConfig } from '../client.js';\nimport { createMorphWarpGrepTool } from '../tools/warp_grep/openai.js';\nimport { createCodebaseSearchTool } from '../tools/codebase_search/openai.js';\nimport { createEditFileTool } from '../tools/fastapply/openai.js';\nimport type { WarpGrepToolConfig } from '../tools/warp_grep/types.js';\nimport type { CodebaseSearchConfig } from '../tools/codebase_search/types.js';\nimport type { EditFileConfig } from '../tools/fastapply/types.js';\n\n/**\n * Factory for creating OpenAI-compatible tools with inherited API key\n * \n * @example\n * ```typescript\n * const morph = new MorphClient({ apiKey: process.env.MORPH_API_KEY });\n * \n * const grepTool = morph.openai.createWarpGrepTool({ repoRoot: '.' });\n * const searchTool = morph.openai.createCodebaseSearchTool({ repoId: 'my-project' });\n * const editTool = morph.openai.createEditFileTool({ baseDir: './src' });\n * \n * // Use with OpenAI client\n * const response = await openai.chat.completions.create({\n * model: 'gpt-4o',\n * tools: [grepTool, searchTool, editTool],\n * messages: [{ role: 'user', content: 'Find and fix the bug' }]\n * });\n * ```\n */\nexport class OpenAIToolFactory {\n constructor(private config: MorphClientConfig) {}\n\n /**\n * Create an OpenAI-compatible warp grep tool\n * \n * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)\n * @returns OpenAI ChatCompletionTool with execute and formatResult methods\n */\n createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, 'apiKey'>) {\n return createMorphWarpGrepTool({ \n ...toolConfig, \n apiKey: this.config.apiKey,\n });\n }\n\n /**\n * Create an OpenAI-compatible codebase search tool\n * \n * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)\n * @returns OpenAI ChatCompletionTool with execute and formatResult methods\n */\n createCodebaseSearchTool(toolConfig: Omit<CodebaseSearchConfig, 'apiKey'>) {\n return createCodebaseSearchTool({ \n ...toolConfig, \n apiKey: this.config.apiKey,\n });\n }\n\n /**\n * Create an OpenAI-compatible edit file tool\n * \n * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)\n * @returns OpenAI ChatCompletionTool with execute and formatResult methods\n */\n createEditFileTool(toolConfig: Omit<EditFileConfig, 'morphApiKey'> = {}) {\n return createEditFileTool({ \n ...toolConfig, \n morphApiKey: this.config.apiKey,\n });\n }\n}\n\n","/**\n * Anthropic Tool Factory for MorphClient\n * \n * Creates Anthropic-compatible tools that inherit the API key from MorphClient.\n */\n\nimport type { MorphClientConfig } from '../client.js';\nimport { createMorphWarpGrepTool } from '../tools/warp_grep/anthropic.js';\nimport { createCodebaseSearchTool } from '../tools/codebase_search/anthropic.js';\nimport { createEditFileTool } from '../tools/fastapply/anthropic.js';\nimport type { WarpGrepToolConfig } from '../tools/warp_grep/types.js';\nimport type { CodebaseSearchConfig } from '../tools/codebase_search/types.js';\nimport type { EditFileConfig } from '../tools/fastapply/types.js';\n\n/**\n * Factory for creating Anthropic-compatible tools with inherited API key\n * \n * @example\n * ```typescript\n * const morph = new MorphClient({ apiKey: process.env.MORPH_API_KEY });\n * \n * const grepTool = morph.anthropic.createWarpGrepTool({ repoRoot: '.' });\n * const searchTool = morph.anthropic.createCodebaseSearchTool({ repoId: 'my-project' });\n * const editTool = morph.anthropic.createEditFileTool({ baseDir: './src' });\n * \n * // Use with Anthropic client\n * const response = await anthropic.messages.create({\n * model: 'claude-sonnet-4-5-20250929',\n * tools: [grepTool, searchTool, editTool],\n * messages: [{ role: 'user', content: 'Find and fix the bug' }]\n * });\n * ```\n */\nexport class AnthropicToolFactory {\n constructor(private config: MorphClientConfig) {}\n\n /**\n * Create an Anthropic-compatible warp grep tool\n * \n * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)\n * @returns Anthropic Tool with execute and formatResult methods\n */\n createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, 'apiKey'>) {\n return createMorphWarpGrepTool({ \n ...toolConfig, \n apiKey: this.config.apiKey,\n });\n }\n\n /**\n * Create an Anthropic-compatible codebase search tool\n * \n * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)\n * @returns Anthropic Tool with execute and formatResult methods\n */\n createCodebaseSearchTool(toolConfig: Omit<CodebaseSearchConfig, 'apiKey'>) {\n return createCodebaseSearchTool({ \n ...toolConfig, \n apiKey: this.config.apiKey,\n });\n }\n\n /**\n * Create an Anthropic-compatible edit file tool\n * \n * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)\n * @returns Anthropic Tool with execute and formatResult methods\n */\n createEditFileTool(toolConfig: Omit<EditFileConfig, 'morphApiKey'> = {}) {\n return createEditFileTool({ \n ...toolConfig, \n morphApiKey: this.config.apiKey,\n });\n }\n}\n\n","/**\n * Vercel AI SDK Tool Factory for MorphClient\n * \n * Creates Vercel AI SDK-compatible tools that inherit the API key from MorphClient.\n */\n\nimport type { MorphClientConfig } from '../client.js';\nimport { createMorphWarpGrepTool } from '../tools/warp_grep/vercel.js';\nimport { createCodebaseSearchTool } from '../tools/codebase_search/vercel.js';\nimport { createEditFileTool } from '../tools/fastapply/vercel.js';\nimport type { WarpGrepToolConfig } from '../tools/warp_grep/types.js';\nimport type { CodebaseSearchConfig } from '../tools/codebase_search/types.js';\nimport type { EditFileConfig } from '../tools/fastapply/types.js';\n\n/**\n * Factory for creating Vercel AI SDK-compatible tools with inherited API key\n * \n * @example\n * ```typescript\n * const morph = new MorphClient({ apiKey: process.env.MORPH_API_KEY });\n * \n * const grepTool = morph.vercel.createWarpGrepTool({ repoRoot: '.' });\n * const searchTool = morph.vercel.createCodebaseSearchTool({ repoId: 'my-project' });\n * const editTool = morph.vercel.createEditFileTool({ baseDir: './src' });\n * \n * // Use with Vercel AI SDK\n * const result = await generateText({\n * model: anthropic('claude-sonnet-4-5-20250929'),\n * tools: { grep: grepTool, search: searchTool, edit: editTool },\n * prompt: 'Find and fix the bug'\n * });\n * ```\n */\nexport class VercelToolFactory {\n constructor(private config: MorphClientConfig) {}\n\n /**\n * Create a Vercel AI SDK-compatible warp grep tool\n * \n * @param toolConfig - Tool configuration (apiKey inherited from MorphClient)\n * @returns Vercel AI SDK tool\n */\n createWarpGrepTool(toolConfig: Omit<WarpGrepToolConfig, 'apiKey'>) {\n return createMorphWarpGrepTool({ \n ...toolConfig, \n apiKey: this.config.apiKey,\n });\n }\n\n /**\n * Create a Vercel AI SDK-compatible codebase search tool\n * \n * @param toolConfig - Tool configuration with repoId (apiKey inherited from MorphClient)\n * @returns Vercel AI SDK tool\n */\n createCodebaseSearchTool(toolConfig: Omit<CodebaseSearchConfig, 'apiKey'>) {\n return createCodebaseSearchTool({ \n ...toolConfig, \n apiKey: this.config.apiKey,\n });\n }\n\n /**\n * Create a Vercel AI SDK-compatible edit file tool\n * \n * @param toolConfig - Tool configuration (morphApiKey inherited from MorphClient)\n * @returns Vercel AI SDK tool\n */\n createEditFileTool(toolConfig: Omit<EditFileConfig, 'morphApiKey'> = {}) {\n return createEditFileTool({ \n ...toolConfig, \n morphApiKey: this.config.apiKey,\n });\n }\n}\n\n","/**\n * Unified Morph SDK Client\n * \n * Provides access to all Morph tools through a single interface\n * \n * @example\n * ```typescript\n * import { MorphClient } from '@morphllm/morphsdk';\n * \n * const morph = new MorphClient({ \n * apiKey: process.env.MORPH_API_KEY,\n * debug: true,\n * timeout: 60000\n * });\n * \n * // Direct execution\n * await morph.fastApply.execute({ target_filepath: 'src/index.ts', ... });\n * await morph.warpGrep.execute({ query: 'Find auth', repoRoot: '.' });\n * await morph.codebaseSearch.search({ query: 'auth logic', repoId: 'x' });\n * \n * // Tool creation for AI agents (API key inherited)\n * const grepTool = morph.openai.createWarpGrepTool({ repoRoot: '.' });\n * const searchTool = morph.anthropic.createCodebaseSearchTool({ repoId: 'x' });\n * const editTool = morph.vercel.createEditFileTool({ baseDir: './src' });\n * \n * // Use tools with OpenAI\n * const response = await openai.chat.completions.create({\n * model: 'gpt-4o',\n * tools: [grepTool],\n * messages: [{ role: 'user', content: 'Find the bug' }]\n * });\n * ```\n */\n\nimport type { RetryConfig } from './tools/utils/resilience.js';\nimport { FastApplyClient } from './tools/fastapply/core.js';\nimport { CodebaseSearchClient } from './tools/codebase_search/core.js';\nimport { BrowserClient } from './tools/browser/core.js';\nimport { WarpGrepClient } from './tools/warp_grep/core.js';\nimport { MorphGit } from './git/index.js';\nimport { OpenAIRouter, AnthropicRouter, GeminiRouter, RawRouter } from './modelrouter/core.js';\nimport { OpenAIToolFactory, AnthropicToolFactory, VercelToolFactory } from './factories/index.js';\n\n/**\n * Configuration for the MorphClient\n */\nexport interface MorphClientConfig {\n /** Morph API key for authentication (defaults to MORPH_API_KEY env var) */\n apiKey?: string;\n /** Enable debug logging across all tools */\n debug?: boolean;\n /** Default timeout in milliseconds for API requests */\n timeout?: number;\n /** Retry configuration for failed requests */\n retryConfig?: RetryConfig;\n}\n\n/**\n * Unified Morph SDK Client\n * \n * Provides access to all Morph tools through a single interface:\n * - fastApply: AI-powered file editing with intelligent merging\n * - codebaseSearch: Semantic code search\n * - warpGrep: Fast code search with ripgrep\n * - browser: AI-powered browser automation\n * - git: Version control operations\n * - routers: Intelligent model selection (OpenAI, Anthropic, Gemini)\n * - openai/anthropic/vercel: Tool factories for agent frameworks\n */\nexport class MorphClient {\n /** Client configuration */\n public config: MorphClientConfig;\n\n /** FastApply tool for editing files with AI-powered merge */\n public fastApply: FastApplyClient;\n\n /** CodebaseSearch tool for semantic code search */\n public codebaseSearch: CodebaseSearchClient;\n\n /** WarpGrep tool for fast code search using ripgrep */\n public warpGrep: WarpGrepClient;\n\n /** Browser tool for AI-powered browser automation */\n public browser: BrowserClient;\n\n /** Git tool for version control operations */\n public git: MorphGit;\n\n /** Model routers for intelligent model selection */\n public routers: {\n openai: OpenAIRouter;\n anthropic: AnthropicRouter;\n gemini: GeminiRouter;\n raw: RawRouter;\n };\n\n /** OpenAI-compatible tool factories */\n public openai: OpenAIToolFactory;\n\n /** Anthropic-compatible tool factories */\n public anthropic: AnthropicToolFactory;\n\n /** Vercel AI SDK tool factories */\n public vercel: VercelToolFactory;\n\n /**\n * Create a new Morph SDK client\n * \n * @param config - Client configuration (apiKey, debug, timeout, retryConfig)\n * \n * @example\n * ```typescript\n * const morph = new MorphClient({ \n * apiKey: process.env.MORPH_API_KEY,\n * debug: true,\n * timeout: 60000\n * });\n * ```\n */\n constructor(config: MorphClientConfig = {}) {\n this.config = config;\n\n // Initialize all sub-clients with shared config\n this.fastApply = new FastApplyClient({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n });\n\n this.codebaseSearch = new CodebaseSearchClient({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n });\n\n this.warpGrep = new WarpGrepClient({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n });\n\n this.browser = new BrowserClient({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n });\n\n this.git = new MorphGit({\n apiKey: config.apiKey,\n retryConfig: config.retryConfig,\n });\n\n this.routers = {\n openai: new OpenAIRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n anthropic: new AnthropicRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n gemini: new GeminiRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n raw: new RawRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n };\n\n // Initialize tool factories\n this.openai = new OpenAIToolFactory(config);\n this.anthropic = new AnthropicToolFactory(config);\n this.vercel = new VercelToolFactory(config);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCO,IAAM,oBAAN,MAAwB;AAAA,EAC7B,YAAoB,QAA2B;AAA3B;AAAA,EAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQhD,mBAAmB,YAAgD;AACjE,WAAO,wBAAwB;AAAA,MAC7B,GAAG;AAAA,MACH,QAAQ,KAAK,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,yBAAyB,YAAkD;AACzE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,QAAQ,KAAK,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,mBAAmB,aAAkD,CAAC,GAAG;AACvE,WAAO,mBAAmB;AAAA,MACxB,GAAG;AAAA,MACH,aAAa,KAAK,OAAO;AAAA,IAC3B,CAAC;AAAA,EACH;AACF;;;ACzCO,IAAM,uBAAN,MAA2B;AAAA,EAChC,YAAoB,QAA2B;AAA3B;AAAA,EAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQhD,mBAAmB,YAAgD;AACjE,WAAOA,yBAAwB;AAAA,MAC7B,GAAG;AAAA,MACH,QAAQ,KAAK,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,yBAAyB,YAAkD;AACzE,WAAOC,0BAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,QAAQ,KAAK,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,mBAAmB,aAAkD,CAAC,GAAG;AACvE,WAAOC,oBAAmB;AAAA,MACxB,GAAG;AAAA,MACH,aAAa,KAAK,OAAO;AAAA,IAC3B,CAAC;AAAA,EACH;AACF;;;ACzCO,IAAM,oBAAN,MAAwB;AAAA,EAC7B,YAAoB,QAA2B;AAA3B;AAAA,EAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQhD,mBAAmB,YAAgD;AACjE,WAAOC,yBAAwB;AAAA,MAC7B,GAAG;AAAA,MACH,QAAQ,KAAK,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,yBAAyB,YAAkD;AACzE,WAAOC,0BAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,QAAQ,KAAK,OAAO;AAAA,IACtB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,mBAAmB,aAAkD,CAAC,GAAG;AACvE,WAAOC,oBAAmB;AAAA,MACxB,GAAG;AAAA,MACH,aAAa,KAAK,OAAO;AAAA,IAC3B,CAAC;AAAA,EACH;AACF;;;ACLO,IAAM,cAAN,MAAkB;AAAA;AAAA,EAEhB;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAQA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBP,YAAY,SAA4B,CAAC,GAAG;AAC1C,SAAK,SAAS;AAGd,SAAK,YAAY,IAAI,gBAAgB;AAAA,MACnC,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,iBAAiB,IAAI,qBAAqB;AAAA,MAC7C,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,WAAW,IAAI,eAAe;AAAA,MACjC,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,UAAU,IAAI,cAAc;AAAA,MAC/B,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,MAAM,IAAI,SAAS;AAAA,MACtB,QAAQ,OAAO;AAAA,MACf,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,UAAU;AAAA,MACb,QAAQ,IAAI,aAAa;AAAA,QACvB,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,WAAW,IAAI,gBAAgB;AAAA,QAC7B,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,QAAQ,IAAI,aAAa;AAAA,QACvB,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,KAAK,IAAI,UAAU;AAAA,QACjB,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,IACH;AAGA,SAAK,SAAS,IAAI,kBAAkB,MAAM;AAC1C,SAAK,YAAY,IAAI,qBAAqB,MAAM;AAChD,SAAK,SAAS,IAAI,kBAAkB,MAAM;AAAA,EAC5C;AACF;","names":["createMorphWarpGrepTool","createCodebaseSearchTool","createEditFileTool","createMorphWarpGrepTool","createCodebaseSearchTool","createEditFileTool"]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/agent/formatter.ts"],"sourcesContent":["export class ToolOutputFormatter {\n\tformat(\n\t\ttoolName: string,\n\t\targs: Record<string, unknown> | null | undefined,\n\t\toutput: string,\n\t\toptions: { isError?: boolean } = {}\n\t): string {\n\t\tconst name = (toolName ?? \"\").trim();\n\t\tif (!name) {\n\t\t\treturn \"\";\n\t\t}\n\t\tconst payload = (output as any)?.toString?.()?.trim?.() ?? \"\";\n\t\tconst isError = Boolean(options.isError);\n\t\tconst safeArgs = args ?? {};\n\n\t\tif (!payload && !isError) {\n\t\t\treturn \"\";\n\t\t}\n\n\t\tswitch (name) {\n\t\t\tcase \"read\":\n\t\t\t\treturn this.formatRead(safeArgs, payload, isError);\n\t\t\tcase \"analyse\":\n\t\t\t\treturn this.formatAnalyse(safeArgs, payload, isError);\n\t\t\tcase \"grep\":\n\t\t\t\treturn this.formatGrep(safeArgs, payload, isError);\n\t\t\tdefault:\n\t\t\t\treturn payload ? `<tool_output>\\n${payload}\\n</tool_output>` : \"\";\n\t\t}\n\t}\n\n\tprivate formatRead(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tif (isError) {\n\t\t\treturn payload;\n\t\t}\n\t\tconst path = this.asString(args.path) || \"...\";\n\t\treturn `<file path=\"${path}\">\\n${payload}\\n</file>`;\n\t}\n\n\tprivate formatAnalyse(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst path = this.asString(args.path) || \".\";\n\t\tif (isError) {\n\t\t\treturn `<analyse_results path=\"${path}\" status=\"error\">\\n${payload}\\n</analyse_results>`;\n\t\t}\n\t\treturn `<analyse_results path=\"${path}\">\\n${payload}\\n</analyse_results>`;\n\t}\n\n\tprivate formatGrep(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst pattern = this.asString(args.pattern);\n\t\tconst path = this.asString(args.path);\n\t\tconst attributes: string[] = [];\n\t\tif (pattern !== undefined) {\n\t\t\tattributes.push(`pattern=\"${pattern}\"`);\n\t\t}\n\t\tif (path !== undefined) {\n\t\t\tattributes.push(`path=\"${path}\"`);\n\t\t}\n\t\tif (isError) {\n\t\t\tattributes.push('status=\"error\"');\n\t\t}\n\t\tconst attrText = attributes.length ? ` ${attributes.join(\" \")}` : \"\";\n\t\treturn `<grep_output${attrText}>\\n${payload}\\n</grep_output>`;\n\t}\n\n\tprivate asString(value: unknown): string | undefined {\n\t\tif (value === null || value === undefined) {\n\t\t\treturn undefined;\n\t\t}\n\t\treturn String(value);\n\t}\n}\n\nconst sharedFormatter = new ToolOutputFormatter();\n\nexport function formatAgentToolOutput(\n\ttoolName: string,\n\targs: Record<string, unknown> | null | undefined,\n\toutput: string,\n\toptions: { isError?: boolean } = {}\n): string {\n\treturn sharedFormatter.format(toolName, args, output, options);\n}\n\n"],"mappings":";AAAO,IAAM,sBAAN,MAA0B;AAAA,EAChC,OACC,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,UAAM,QAAQ,YAAY,IAAI,KAAK;AACnC,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AACA,UAAM,UAAW,QAAgB,WAAW,GAAG,OAAO,KAAK;AAC3D,UAAM,UAAU,QAAQ,QAAQ,OAAO;AACvC,UAAM,WAAW,QAAQ,CAAC;AAE1B,QAAI,CAAC,WAAW,CAAC,SAAS;AACzB,aAAO;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD,KAAK;AACJ,eAAO,KAAK,cAAc,UAAU,SAAS,OAAO;AAAA,MACrD,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD;AACC,eAAO,UAAU;AAAA,EAAkB,OAAO;AAAA,kBAAqB;AAAA,IACjE;AAAA,EACD;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,QAAI,SAAS;AACZ,aAAO;AAAA,IACR;AACA,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,WAAO,eAAe,IAAI;AAAA,EAAO,OAAO;AAAA;AAAA,EACzC;AAAA,EAEQ,cAAc,MAA+B,SAAiB,SAA0B;AAC/F,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,QAAI,SAAS;AACZ,aAAO,0BAA0B,IAAI;AAAA,EAAsB,OAAO;AAAA;AAAA,IACnE;AACA,WAAO,0BAA0B,IAAI;AAAA,EAAO,OAAO;AAAA;AAAA,EACpD;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,UAAM,UAAU,KAAK,SAAS,KAAK,OAAO;AAC1C,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI;AACpC,UAAM,aAAuB,CAAC;AAC9B,QAAI,YAAY,QAAW;AAC1B,iBAAW,KAAK,YAAY,OAAO,GAAG;AAAA,IACvC;AACA,QAAI,SAAS,QAAW;AACvB,iBAAW,KAAK,SAAS,IAAI,GAAG;AAAA,IACjC;AACA,QAAI,SAAS;AACZ,iBAAW,KAAK,gBAAgB;AAAA,IACjC;AACA,UAAM,WAAW,WAAW,SAAS,IAAI,WAAW,KAAK,GAAG,CAAC,KAAK;AAClE,WAAO,eAAe,QAAQ;AAAA,EAAM,OAAO;AAAA;AAAA,EAC5C;AAAA,EAEQ,SAAS,OAAoC;AACpD,QAAI,UAAU,QAAQ,UAAU,QAAW;AAC1C,aAAO;AAAA,IACR;AACA,WAAO,OAAO,KAAK;AAAA,EACpB;AACD;AAEA,IAAM,kBAAkB,IAAI,oBAAoB;AAEzC,SAAS,sBACf,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,SAAO,gBAAgB,OAAO,UAAU,MAAM,QAAQ,OAAO;AAC9D;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/agent/config.ts"],"sourcesContent":["// Agent configuration defaults for morph-warp-grep\n// Hard-coded: SDK does not expose control over rounds or timeout.\nexport const AGENT_CONFIG = {\n // Give the model freedom; failsafe cap to prevent infinite loops\n MAX_ROUNDS: 10,\n TIMEOUT_MS: 30000,\n};\n\n/**\n * Comprehensive exclusion list for directories and files\n * These patterns are used with ripgrep's -g flag\n */\nconst BUILTIN_EXCLUDES = [\n // Version control\n '.git', '.svn', '.hg', '.bzr',\n \n // Dependencies\n 'node_modules', 'bower_components', '.pnpm', '.yarn',\n 'vendor', 'packages', 'Pods', '.bundle',\n \n // Python\n '__pycache__', '.pytest_cache', '.mypy_cache', '.ruff_cache',\n '.venv', 'venv', '.tox', '.nox', '.eggs', '*.egg-info',\n \n // Build outputs\n 'dist', 'build', 'out', 'output', 'target', '_build',\n '.next', '.nuxt', '.output', '.vercel', '.netlify',\n \n // Cache directories\n '.cache', '.parcel-cache', '.turbo', '.nx', '.gradle',\n \n // IDE/Editor\n '.idea', '.vscode', '.vs',\n \n // Coverage\n 'coverage', '.coverage', 'htmlcov', '.nyc_output',\n \n // Temporary\n 'tmp', 'temp', '.tmp', '.temp',\n \n // Lock files\n 'package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb',\n 'Cargo.lock', 'Gemfile.lock', 'poetry.lock',\n \n // Binary/minified\n '*.min.js', '*.min.css', '*.bundle.js',\n '*.wasm', '*.so', '*.dll', '*.pyc',\n '*.map', '*.js.map',\n \n // Hidden directories catch-all\n '.*',\n];\n\nexport const DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || '')\n .split(',')\n .map(s => s.trim())\n .filter(Boolean)\n .concat(BUILTIN_EXCLUDES);\n\nexport const DEFAULT_MODEL = 'morph-warp-grep';\n\n\n"],"mappings":";AAEO,IAAM,eAAe;AAAA;AAAA,EAE1B,YAAY;AAAA,EACZ,YAAY;AACd;AAMA,IAAM,mBAAmB;AAAA;AAAA,EAEvB;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAO;AAAA;AAAA,EAGvB;AAAA,EAAgB;AAAA,EAAoB;AAAA,EAAS;AAAA,EAC7C;AAAA,EAAU;AAAA,EAAY;AAAA,EAAQ;AAAA;AAAA,EAG9B;AAAA,EAAe;AAAA,EAAiB;AAAA,EAAe;AAAA,EAC/C;AAAA,EAAS;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAQ;AAAA,EAAS;AAAA;AAAA,EAG1C;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAO;AAAA,EAAU;AAAA,EAAU;AAAA,EAC5C;AAAA,EAAS;AAAA,EAAS;AAAA,EAAW;AAAA,EAAW;AAAA;AAAA,EAGxC;AAAA,EAAU;AAAA,EAAiB;AAAA,EAAU;AAAA,EAAO;AAAA;AAAA,EAG5C;AAAA,EAAS;AAAA,EAAW;AAAA;AAAA,EAGpB;AAAA,EAAY;AAAA,EAAa;AAAA,EAAW;AAAA;AAAA,EAGpC;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAQ;AAAA;AAAA,EAGvB;AAAA,EAAqB;AAAA,EAAa;AAAA,EAAkB;AAAA,EACpD;AAAA,EAAc;AAAA,EAAgB;AAAA;AAAA,EAG9B;AAAA,EAAY;AAAA,EAAa;AAAA,EACzB;AAAA,EAAU;AAAA,EAAQ;AAAA,EAAS;AAAA,EAC3B;AAAA,EAAS;AAAA;AAAA,EAGT;AACF;AAEO,IAAM,oBAAoB,QAAQ,IAAI,2BAA2B,IACrE,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAO,EACd,OAAO,gBAAgB;AAEnB,IAAM,gBAAgB;","names":[]}
@@ -1,129 +0,0 @@
1
- // tools/warp_grep/agent/prompt.ts
2
- var SYSTEM_PROMPT = `You are a code search agent. Your task is to find all relevant code for a given query.
3
-
4
- <workflow>
5
- You have exactly 4 turns. The 4th turn MUST be a \`finish\` call. Each turn allows up to 8 parallel tool calls.
6
-
7
- - Turn 1: Map the territory OR dive deep (based on query specificity)
8
- - Turn 2-3: Refine based on findings
9
- - Turn 4: MUST call \`finish\` with all relevant code locations
10
- - You MAY call \`finish\` early if confident\u2014but never before at least 1 search turn.
11
-
12
- Remember, if the task feels easy to you, it is strongly desirable to call \`finish\` early using fewer turns, but quality over speed.
13
- </workflow>
14
-
15
- <tools>
16
- ### \`analyse <path> [pattern]\`
17
- Directory tree or file search. Shows structure of a path, optionally filtered by regex pattern.
18
- - \`path\`: Required. Directory or file path (use \`.\` for repo root)
19
- - \`pattern\`: Optional regex to filter results
20
-
21
- Examples:
22
- \`\`\`
23
- analyse .
24
- analyse src/api
25
- analyse . ".*\\.ts$"
26
- analyse src "test.*"
27
- \`\`\`
28
-
29
- ### \`read <path>[:start-end]\`
30
- Read file contents. Line range is 1-based, inclusive.
31
- - Returns numbered lines for easy reference
32
- - Omit range to read entire file
33
-
34
- Examples:
35
- \`\`\`
36
- read src/main.py
37
- read src/db/conn.py:10-50
38
- read package.json:1-20
39
- \`\`\`
40
-
41
- ### \`grep '<pattern>' <path>\`
42
- Ripgrep search. Finds pattern matches across files.
43
- - \`'<pattern>'\`: Required. Regex pattern wrapped in single quotes
44
- - \`<path>\`: Required. Directory or file to search (use \`.\` for repo root)
45
-
46
- Examples:
47
- \`\`\`
48
- grep 'class.*Service' src/
49
- grep 'def authenticate' .
50
- grep 'import.*from' src/components/
51
- grep 'TODO' .
52
- \`\`\`
53
-
54
- ### \`finish <file1:ranges> [file2:ranges ...]\`
55
- Submit final answer with all relevant code locations.
56
- - Include generous line ranges\u2014don't be stingy with context
57
- - Ranges are comma-separated: \`file.py:10-30,50-60\`
58
- - ALWAYS include import statements at the top of files (usually lines 1-20)
59
- - If code spans multiple files, include ALL of them
60
- - Small files can be returned in full
61
-
62
- Examples:
63
- \`\`\`
64
- finish src/auth.py:1-15,25-50,75-80 src/models/user.py:1-10,20-45
65
- finish src/index.ts:1-100
66
- \`\`\`
67
- </tools>
68
-
69
- <strategy>
70
- **Before your first tool call, classify the query:**
71
-
72
- | Query Type | Turn 1 Strategy | Early Finish? |
73
- |------------|-----------------|---------------|
74
- | **Specific** (function name, error string, unique identifier) | 8 parallel greps on likely paths | Often by turn 2 |
75
- | **Conceptual** (how does X work, where is Y handled) | analyse + 2-3 broad greps | Rarely early |
76
- | **Exploratory** (find all tests, list API endpoints) | analyse at multiple depths | Usually needs 3 turns |
77
-
78
- **Parallel call patterns:**
79
- - **Shotgun grep**: Same pattern, 8 different directories\u2014fast coverage
80
- - **Variant grep**: 8 pattern variations (synonyms, naming conventions)\u2014catches inconsistent codebases
81
- - **Funnel**: 1 analyse + 7 greps\u2014orient and search simultaneously
82
- - **Deep read**: 8 reads on files you already identified\u2014gather full context fast
83
- </strategy>
84
-
85
- <output_format>
86
- EVERY response MUST follow this exact format:
87
-
88
- 1. First, wrap your reasoning in \`<think>...</think>\` tags containing:
89
- - Query classification (specific/conceptual/exploratory)
90
- - Confidence estimate (can I finish in 1-2 turns?)
91
- - This turn's parallel strategy
92
- - What signals would let me finish early?
93
-
94
- 2. Then, output tool calls wrapped in \`<tool_call>...</tool_call>\` tags, one per line.
95
-
96
- Example:
97
- \`\`\`
98
- <think>
99
- This is a specific query about authentication. I'll grep for auth-related patterns.
100
- High confidence I can finish in 2 turns if I find the auth module.
101
- Strategy: Shotgun grep across likely directories.
102
- </think>
103
- <tool_call>grep 'authenticate' src/</tool_call>
104
- <tool_call>grep 'login' src/</tool_call>
105
- <tool_call>analyse src/auth</tool_call>
106
- \`\`\`
107
-
108
- No commentary outside \`<think>\`. No explanations after tool calls.
109
- </output_format>
110
-
111
- <finishing_requirements>
112
- When calling \`finish\`:
113
- - Include the import section (typically lines 1-20) of each file
114
- - Include all function/class definitions that are relevant
115
- - Include any type definitions, interfaces, or constants used
116
- - Better to over-include than leave the user missing context
117
- - If unsure about boundaries, include more rather than less
118
- </finishing_requirements>
119
-
120
- Begin your exploration now to find code relevant to the query.`;
121
- function getSystemPrompt() {
122
- return SYSTEM_PROMPT;
123
- }
124
-
125
- export {
126
- SYSTEM_PROMPT,
127
- getSystemPrompt
128
- };
129
- //# sourceMappingURL=chunk-WETRQJGU.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/agent/prompt.ts"],"sourcesContent":["export const SYSTEM_PROMPT = `You are a code search agent. Your task is to find all relevant code for a given query.\n\n<workflow>\nYou have exactly 4 turns. The 4th turn MUST be a \\`finish\\` call. Each turn allows up to 8 parallel tool calls.\n\n- Turn 1: Map the territory OR dive deep (based on query specificity)\n- Turn 2-3: Refine based on findings\n- Turn 4: MUST call \\`finish\\` with all relevant code locations\n- You MAY call \\`finish\\` early if confident—but never before at least 1 search turn.\n\nRemember, if the task feels easy to you, it is strongly desirable to call \\`finish\\` early using fewer turns, but quality over speed.\n</workflow>\n\n<tools>\n### \\`analyse <path> [pattern]\\`\nDirectory tree or file search. Shows structure of a path, optionally filtered by regex pattern.\n- \\`path\\`: Required. Directory or file path (use \\`.\\` for repo root)\n- \\`pattern\\`: Optional regex to filter results\n\nExamples:\n\\`\\`\\`\nanalyse .\nanalyse src/api\nanalyse . \".*\\\\.ts$\"\nanalyse src \"test.*\"\n\\`\\`\\`\n\n### \\`read <path>[:start-end]\\`\nRead file contents. Line range is 1-based, inclusive.\n- Returns numbered lines for easy reference\n- Omit range to read entire file\n\nExamples:\n\\`\\`\\`\nread src/main.py\nread src/db/conn.py:10-50\nread package.json:1-20\n\\`\\`\\`\n\n### \\`grep '<pattern>' <path>\\`\nRipgrep search. Finds pattern matches across files.\n- \\`'<pattern>'\\`: Required. Regex pattern wrapped in single quotes\n- \\`<path>\\`: Required. Directory or file to search (use \\`.\\` for repo root)\n\nExamples:\n\\`\\`\\`\ngrep 'class.*Service' src/\ngrep 'def authenticate' .\ngrep 'import.*from' src/components/\ngrep 'TODO' .\n\\`\\`\\`\n\n### \\`finish <file1:ranges> [file2:ranges ...]\\`\nSubmit final answer with all relevant code locations.\n- Include generous line ranges—don't be stingy with context\n- Ranges are comma-separated: \\`file.py:10-30,50-60\\`\n- ALWAYS include import statements at the top of files (usually lines 1-20)\n- If code spans multiple files, include ALL of them\n- Small files can be returned in full\n\nExamples:\n\\`\\`\\`\nfinish src/auth.py:1-15,25-50,75-80 src/models/user.py:1-10,20-45\nfinish src/index.ts:1-100\n\\`\\`\\`\n</tools>\n\n<strategy>\n**Before your first tool call, classify the query:**\n\n| Query Type | Turn 1 Strategy | Early Finish? |\n|------------|-----------------|---------------|\n| **Specific** (function name, error string, unique identifier) | 8 parallel greps on likely paths | Often by turn 2 |\n| **Conceptual** (how does X work, where is Y handled) | analyse + 2-3 broad greps | Rarely early |\n| **Exploratory** (find all tests, list API endpoints) | analyse at multiple depths | Usually needs 3 turns |\n\n**Parallel call patterns:**\n- **Shotgun grep**: Same pattern, 8 different directories—fast coverage\n- **Variant grep**: 8 pattern variations (synonyms, naming conventions)—catches inconsistent codebases\n- **Funnel**: 1 analyse + 7 greps—orient and search simultaneously\n- **Deep read**: 8 reads on files you already identified—gather full context fast\n</strategy>\n\n<output_format>\nEVERY response MUST follow this exact format:\n\n1. First, wrap your reasoning in \\`<think>...</think>\\` tags containing:\n - Query classification (specific/conceptual/exploratory)\n - Confidence estimate (can I finish in 1-2 turns?)\n - This turn's parallel strategy\n - What signals would let me finish early?\n\n2. Then, output tool calls wrapped in \\`<tool_call>...</tool_call>\\` tags, one per line.\n\nExample:\n\\`\\`\\`\n<think>\nThis is a specific query about authentication. I'll grep for auth-related patterns.\nHigh confidence I can finish in 2 turns if I find the auth module.\nStrategy: Shotgun grep across likely directories.\n</think>\n<tool_call>grep 'authenticate' src/</tool_call>\n<tool_call>grep 'login' src/</tool_call>\n<tool_call>analyse src/auth</tool_call>\n\\`\\`\\`\n\nNo commentary outside \\`<think>\\`. No explanations after tool calls.\n</output_format>\n\n<finishing_requirements>\nWhen calling \\`finish\\`:\n- Include the import section (typically lines 1-20) of each file\n- Include all function/class definitions that are relevant\n- Include any type definitions, interfaces, or constants used\n- Better to over-include than leave the user missing context\n- If unsure about boundaries, include more rather than less\n</finishing_requirements>\n\nBegin your exploration now to find code relevant to the query.`;\n\nexport function getSystemPrompt(): string {\n\treturn SYSTEM_PROMPT;\n}\n"],"mappings":";AAAO,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAwHtB,SAAS,kBAA0B;AACzC,SAAO;AACR;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/providers/local.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport fssync from 'fs';\nimport path from 'path';\nimport { runRipgrep } from '../utils/ripgrep.js';\nimport { ensureWithinRepo, resolveUnderRepo, toRepoRelative, isSymlink, isTextualFile } from '../utils/paths.js';\nimport type { WarpGrepProvider, GrepResult, ReadResult, AnalyseEntry } from './types.js';\nimport { readAllLines } from '../utils/files.js';\nimport { DEFAULT_EXCLUDES } from '../agent/config.js';\n\nexport class LocalRipgrepProvider implements WarpGrepProvider {\n constructor(private readonly repoRoot: string, private readonly excludes: string[] = DEFAULT_EXCLUDES) {}\n\n async grep(params: { pattern: string; path: string }): Promise<GrepResult> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat) return { lines: [] };\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--no-heading',\n '--with-filename',\n '--line-number',\n '--color=never',\n '--trim',\n '--max-columns=400',\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n params.pattern,\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n \n // Gracefully handle ripgrep not being available\n if (res.exitCode === -1) {\n return {\n lines: [],\n error: `[RIPGREP NOT AVAILABLE] ripgrep (rg) is required but failed to execute. Please install it:\\n` +\n ` • macOS: brew install ripgrep\\n` +\n ` • Ubuntu/Debian: apt install ripgrep\\n` +\n ` • Windows: choco install ripgrep\\n` +\n ` • Or visit: https://github.com/BurntSushi/ripgrep#installation\\n` +\n `Exit code: ${res.exitCode}${res.stderr ? `\\nDetails: ${res.stderr}` : ''}`,\n };\n }\n \n // Handle other ripgrep errors gracefully\n if (res.exitCode !== 0 && res.exitCode !== 1) {\n return {\n lines: [],\n error: `[RIPGREP ERROR] grep failed with exit code ${res.exitCode}${res.stderr ? `: ${res.stderr}` : ''}`,\n };\n }\n \n const lines = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n return { lines };\n }\n\n async glob(params: { pattern: string; path: string }): Promise<{ files: string[] }> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--files',\n '-g',\n params.pattern,\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n \n // Gracefully handle ripgrep not being available\n if (res.exitCode === -1) {\n // Return empty files with a console warning - glob is less critical than grep\n console.warn(`[warp_grep] ripgrep not available for glob: ${res.stderr || 'execution failed'}`);\n return { files: [] };\n }\n \n const files = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n return { files };\n }\n\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const stat = await fs.stat(abs).catch(() => null);\n \n // Gracefully handle file not found / not a file\n if (!stat || !stat.isFile()) {\n return {\n lines: [],\n error: `[FILE NOT FOUND] You tried to read \"${params.path}\" but there is no file at this path. ` +\n `Double-check the path exists and is spelled correctly.`,\n };\n }\n \n // Gracefully handle symlinks\n if (isSymlink(abs)) {\n return {\n lines: [],\n error: `[SYMLINK] You tried to read \"${params.path}\" but this is a symlink. ` +\n `Try reading the actual file it points to instead.`,\n };\n }\n \n // Gracefully handle non-text or too-large files\n if (!isTextualFile(abs)) {\n return {\n lines: [],\n error: `[UNREADABLE FILE] You tried to read \"${params.path}\" but this file is either too large ` +\n `or not a text file, so it cannot be read. Try a different file.`,\n };\n }\n \n const lines = await readAllLines(abs);\n const total = lines.length;\n let s = params.start ?? 1;\n let e = Math.min(params.end ?? total, total);\n if (s > total && total > 0) {\n // Model hallucinated range - fallback to full file\n s = 1;\n e = total;\n }\n const out: string[] = [];\n for (let i = s; i <= e; i += 1) {\n const content = lines[i - 1] ?? '';\n out.push(`${i}|${content}`);\n }\n return { lines: out };\n }\n\n async analyse(params: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }): Promise<AnalyseEntry[]> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat || !stat.isDirectory()) {\n return [];\n }\n const maxResults = params.maxResults ?? 100;\n const maxDepth = params.maxDepth ?? 2;\n const regex = params.pattern ? new RegExp(params.pattern) : null;\n\n const results: AnalyseEntry[] = [];\n async function walk(dir: string, depth: number) {\n if (depth > maxDepth || results.length >= maxResults) return;\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const full = path.join(dir, entry.name);\n const rel = toRepoRelative(abs, full).replace(/^[.][/\\\\]?/, '');\n if (DEFAULT_EXCLUDES.some((ex) => rel.split(path.sep).includes(ex))) continue;\n if (regex && !regex.test(entry.name)) continue;\n if (results.length >= maxResults) break;\n results.push({\n name: entry.name,\n path: toRepoRelative(path.resolve(''), full), // relative display\n type: entry.isDirectory() ? 'dir' : 'file',\n depth,\n });\n if (entry.isDirectory()) {\n await walk(full, depth + 1);\n }\n }\n }\n await walk(abs, 0);\n return results;\n }\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;AAAA,OAAO,QAAQ;AAEf,OAAO,UAAU;AAOV,IAAM,uBAAN,MAAuD;AAAA,EAC5D,YAA6B,UAAmC,WAAqB,kBAAkB;AAA1E;AAAmC;AAAA,EAAwC;AAAA,EAExG,MAAM,KAAK,QAAgE;AACzE,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,KAAM,QAAO,EAAE,OAAO,CAAC,EAAE;AAC9B,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AAGzD,QAAI,IAAI,aAAa,IAAI;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,aAKc,IAAI,QAAQ,GAAG,IAAI,SAAS;AAAA,WAAc,IAAI,MAAM,KAAK,EAAE;AAAA,MAClF;AAAA,IACF;AAGA,QAAI,IAAI,aAAa,KAAK,IAAI,aAAa,GAAG;AAC5C,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,8CAA8C,IAAI,QAAQ,GAAG,IAAI,SAAS,KAAK,IAAI,MAAM,KAAK,EAAE;AAAA,MACzG;AAAA,IACF;AAEA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAAyE;AAClF,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO;AAAA,MACP,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AAGzD,QAAI,IAAI,aAAa,IAAI;AAEvB,cAAQ,KAAK,+CAA+C,IAAI,UAAU,kBAAkB,EAAE;AAC9F,aAAO,EAAE,OAAO,CAAC,EAAE;AAAA,IACrB;AAEA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAA6E;AACtF,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAGhD,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,GAAG;AAC3B,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,uCAAuC,OAAO,IAAI;AAAA,MAE3D;AAAA,IACF;AAGA,QAAI,UAAU,GAAG,GAAG;AAClB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI;AAAA,MAEpD;AAAA,IACF;AAGA,QAAI,CAAC,cAAc,GAAG,GAAG;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,wCAAwC,OAAO,IAAI;AAAA,MAE5D;AAAA,IACF;AAEA,UAAM,QAAQ,MAAM,aAAa,GAAG;AACpC,UAAM,QAAQ,MAAM;AACpB,QAAI,IAAI,OAAO,SAAS;AACxB,QAAI,IAAI,KAAK,IAAI,OAAO,OAAO,OAAO,KAAK;AAC3C,QAAI,IAAI,SAAS,QAAQ,GAAG;AAE1B,UAAI;AACJ,UAAI;AAAA,IACN;AACA,UAAM,MAAgB,CAAC;AACvB,aAAS,IAAI,GAAG,KAAK,GAAG,KAAK,GAAG;AAC9B,YAAM,UAAU,MAAM,IAAI,CAAC,KAAK;AAChC,UAAI,KAAK,GAAG,CAAC,IAAI,OAAO,EAAE;AAAA,IAC5B;AACA,WAAO,EAAE,OAAO,IAAI;AAAA,EACtB;AAAA,EAEA,MAAM,QAAQ,QAAoH;AAChI,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AACA,UAAM,aAAa,OAAO,cAAc;AACxC,UAAM,WAAW,OAAO,YAAY;AACpC,UAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAE5D,UAAM,UAA0B,CAAC;AACjC,mBAAe,KAAK,KAAa,OAAe;AAC9C,UAAI,QAAQ,YAAY,QAAQ,UAAU,WAAY;AACtD,YAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC7D,iBAAW,SAAS,SAAS;AAC3B,cAAM,OAAO,KAAK,KAAK,KAAK,MAAM,IAAI;AACtC,cAAM,MAAM,eAAe,KAAK,IAAI,EAAE,QAAQ,cAAc,EAAE;AAC9D,YAAI,iBAAiB,KAAK,CAAC,OAAO,IAAI,MAAM,KAAK,GAAG,EAAE,SAAS,EAAE,CAAC,EAAG;AACrE,YAAI,SAAS,CAAC,MAAM,KAAK,MAAM,IAAI,EAAG;AACtC,YAAI,QAAQ,UAAU,WAAY;AAClC,gBAAQ,KAAK;AAAA,UACX,MAAM,MAAM;AAAA,UACZ,MAAM,eAAe,KAAK,QAAQ,EAAE,GAAG,IAAI;AAAA;AAAA,UAC3C,MAAM,MAAM,YAAY,IAAI,QAAQ;AAAA,UACpC;AAAA,QACF,CAAC;AACD,YAAI,MAAM,YAAY,GAAG;AACvB,gBAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,KAAK,CAAC;AACjB,WAAO;AAAA,EACT;AACF;","names":[]}