@morphllm/morphsdk 0.2.45 → 0.2.46

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. package/README.md +1 -1
  2. package/dist/{chunk-TVFGHXPE.js → chunk-3FTAIJBH.js} +4 -4
  3. package/dist/chunk-5JTJOQUX.js +283 -0
  4. package/dist/chunk-5JTJOQUX.js.map +1 -0
  5. package/dist/{chunk-ZRLEAPZV.js → chunk-76DJEQEP.js} +4 -4
  6. package/dist/{chunk-W3XLPMV3.js → chunk-7HS6YXA3.js} +21 -5
  7. package/dist/{chunk-W3XLPMV3.js.map → chunk-7HS6YXA3.js.map} +1 -1
  8. package/dist/chunk-7T7YOPJV.js +82 -0
  9. package/dist/chunk-7T7YOPJV.js.map +1 -0
  10. package/dist/chunk-CL45IWIU.js +105 -0
  11. package/dist/chunk-CL45IWIU.js.map +1 -0
  12. package/dist/chunk-D6OD3IST.js +70 -0
  13. package/dist/chunk-D6OD3IST.js.map +1 -0
  14. package/dist/{chunk-PEGZVGG4.js → chunk-G4AWE5A2.js} +4 -4
  15. package/dist/{chunk-OUEJ6XEO.js → chunk-GJU7UOFL.js} +4 -4
  16. package/dist/{chunk-Q7PDN7TS.js → chunk-GZMUGMOZ.js} +1 -1
  17. package/dist/{chunk-Q7PDN7TS.js.map → chunk-GZMUGMOZ.js.map} +1 -1
  18. package/dist/chunk-JYBVRF72.js +1 -0
  19. package/dist/{chunk-GDR65N2J.js → chunk-OXHGFHEU.js} +53 -26
  20. package/dist/chunk-OXHGFHEU.js.map +1 -0
  21. package/dist/{chunk-VBBJGWHY.js → chunk-P2XKFWFD.js} +2 -2
  22. package/dist/chunk-PABIV7X6.js +76 -0
  23. package/dist/chunk-PABIV7X6.js.map +1 -0
  24. package/dist/{chunk-GTOXMAF2.js → chunk-SWQPIKPY.js} +44 -3
  25. package/dist/chunk-SWQPIKPY.js.map +1 -0
  26. package/dist/chunk-TJIUA27P.js +94 -0
  27. package/dist/chunk-TJIUA27P.js.map +1 -0
  28. package/dist/{chunk-O5DA5V5S.js → chunk-UBX7QYBD.js} +4 -4
  29. package/dist/{chunk-X4CQ6D3G.js → chunk-UIZT3KVJ.js} +4 -4
  30. package/dist/{chunk-UYBIKZPM.js → chunk-UXYK7WZX.js} +2 -2
  31. package/dist/chunk-WETRQJGU.js +129 -0
  32. package/dist/chunk-WETRQJGU.js.map +1 -0
  33. package/dist/client-BGctTHu9.d.ts +318 -0
  34. package/dist/client.cjs +1885 -44
  35. package/dist/client.cjs.map +1 -1
  36. package/dist/client.d.ts +14 -110
  37. package/dist/client.js +28 -3
  38. package/dist/core-DxiUwyBe.d.ts +156 -0
  39. package/dist/git/client.cjs +52 -25
  40. package/dist/git/client.cjs.map +1 -1
  41. package/dist/git/client.d.ts +17 -8
  42. package/dist/git/client.js +1 -1
  43. package/dist/git/index.cjs +52 -25
  44. package/dist/git/index.cjs.map +1 -1
  45. package/dist/git/index.d.ts +1 -1
  46. package/dist/git/index.js +2 -2
  47. package/dist/git/types.cjs.map +1 -1
  48. package/dist/git/types.d.ts +20 -2
  49. package/dist/index.cjs +1964 -46
  50. package/dist/index.cjs.map +1 -1
  51. package/dist/index.d.ts +8 -1
  52. package/dist/index.js +47 -5
  53. package/dist/tools/codebase_search/anthropic.js +2 -2
  54. package/dist/tools/codebase_search/index.js +9 -9
  55. package/dist/tools/codebase_search/openai.js +2 -2
  56. package/dist/tools/codebase_search/vercel.js +2 -2
  57. package/dist/tools/fastapply/anthropic.js +2 -2
  58. package/dist/tools/fastapply/index.js +7 -7
  59. package/dist/tools/fastapply/openai.js +2 -2
  60. package/dist/tools/fastapply/vercel.js +2 -2
  61. package/dist/tools/index.js +7 -7
  62. package/dist/tools/warp_grep/agent/config.cjs +80 -1
  63. package/dist/tools/warp_grep/agent/config.cjs.map +1 -1
  64. package/dist/tools/warp_grep/agent/config.js +1 -1
  65. package/dist/tools/warp_grep/agent/parser.cjs +43 -2
  66. package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
  67. package/dist/tools/warp_grep/agent/parser.js +1 -1
  68. package/dist/tools/warp_grep/agent/prompt.cjs +89 -45
  69. package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -1
  70. package/dist/tools/warp_grep/agent/prompt.d.ts +1 -1
  71. package/dist/tools/warp_grep/agent/prompt.js +1 -1
  72. package/dist/tools/warp_grep/agent/runner.cjs +229 -49
  73. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
  74. package/dist/tools/warp_grep/agent/runner.js +4 -4
  75. package/dist/tools/warp_grep/agent/types.js +0 -1
  76. package/dist/tools/warp_grep/anthropic.cjs +311 -83
  77. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  78. package/dist/tools/warp_grep/anthropic.d.ts +75 -12
  79. package/dist/tools/warp_grep/anthropic.js +21 -8
  80. package/dist/tools/warp_grep/index.cjs +415 -126
  81. package/dist/tools/warp_grep/index.cjs.map +1 -1
  82. package/dist/tools/warp_grep/index.d.ts +17 -4
  83. package/dist/tools/warp_grep/index.js +29 -21
  84. package/dist/tools/warp_grep/openai.cjs +314 -83
  85. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  86. package/dist/tools/warp_grep/openai.d.ts +73 -29
  87. package/dist/tools/warp_grep/openai.js +21 -8
  88. package/dist/tools/warp_grep/providers/command.cjs +80 -1
  89. package/dist/tools/warp_grep/providers/command.cjs.map +1 -1
  90. package/dist/tools/warp_grep/providers/command.js +2 -2
  91. package/dist/tools/warp_grep/providers/local.cjs +80 -1
  92. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  93. package/dist/tools/warp_grep/providers/local.js +2 -2
  94. package/dist/tools/warp_grep/vercel.cjs +291 -57
  95. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  96. package/dist/tools/warp_grep/vercel.d.ts +40 -19
  97. package/dist/tools/warp_grep/vercel.js +17 -8
  98. package/package.json +1 -1
  99. package/dist/chunk-AFEPUNAO.js +0 -15
  100. package/dist/chunk-AFEPUNAO.js.map +0 -1
  101. package/dist/chunk-GDR65N2J.js.map +0 -1
  102. package/dist/chunk-GTOXMAF2.js.map +0 -1
  103. package/dist/chunk-HKZB23U7.js +0 -85
  104. package/dist/chunk-HKZB23U7.js.map +0 -1
  105. package/dist/chunk-IQHKEIQX.js +0 -54
  106. package/dist/chunk-IQHKEIQX.js.map +0 -1
  107. package/dist/chunk-JKFVDM62.js +0 -45
  108. package/dist/chunk-JKFVDM62.js.map +0 -1
  109. package/dist/chunk-K6FQZZ2E.js +0 -104
  110. package/dist/chunk-K6FQZZ2E.js.map +0 -1
  111. package/dist/chunk-KL4YVZRF.js +0 -57
  112. package/dist/chunk-KL4YVZRF.js.map +0 -1
  113. package/dist/chunk-XYPMN4A3.js +0 -1
  114. /package/dist/{chunk-TVFGHXPE.js.map → chunk-3FTAIJBH.js.map} +0 -0
  115. /package/dist/{chunk-ZRLEAPZV.js.map → chunk-76DJEQEP.js.map} +0 -0
  116. /package/dist/{chunk-PEGZVGG4.js.map → chunk-G4AWE5A2.js.map} +0 -0
  117. /package/dist/{chunk-OUEJ6XEO.js.map → chunk-GJU7UOFL.js.map} +0 -0
  118. /package/dist/{chunk-XYPMN4A3.js.map → chunk-JYBVRF72.js.map} +0 -0
  119. /package/dist/{chunk-VBBJGWHY.js.map → chunk-P2XKFWFD.js.map} +0 -0
  120. /package/dist/{chunk-O5DA5V5S.js.map → chunk-UBX7QYBD.js.map} +0 -0
  121. /package/dist/{chunk-X4CQ6D3G.js.map → chunk-UIZT3KVJ.js.map} +0 -0
  122. /package/dist/{chunk-UYBIKZPM.js.map → chunk-UXYK7WZX.js.map} +0 -0
@@ -0,0 +1,105 @@
1
+ import {
2
+ runWarpGrep
3
+ } from "./chunk-7HS6YXA3.js";
4
+ import {
5
+ LocalRipgrepProvider
6
+ } from "./chunk-UXYK7WZX.js";
7
+
8
+ // tools/warp_grep/core.ts
9
+ var WarpGrepClient = class {
10
+ config;
11
+ constructor(config = {}) {
12
+ this.config = {
13
+ apiKey: config.apiKey,
14
+ debug: config.debug,
15
+ timeout: config.timeout,
16
+ retryConfig: config.retryConfig
17
+ };
18
+ }
19
+ /**
20
+ * Execute a code search query
21
+ *
22
+ * @param input - Search parameters including query, repoRoot, and optional provider
23
+ * @returns Search results with relevant code contexts
24
+ *
25
+ * @example
26
+ * ```typescript
27
+ * const result = await client.execute({
28
+ * query: 'Find authentication middleware',
29
+ * repoRoot: '.'
30
+ * });
31
+ *
32
+ * if (result.success) {
33
+ * for (const ctx of result.contexts) {
34
+ * console.log(`File: ${ctx.file}`);
35
+ * console.log(ctx.content);
36
+ * }
37
+ * }
38
+ * ```
39
+ */
40
+ async execute(input) {
41
+ const provider = input.provider ?? new LocalRipgrepProvider(input.repoRoot, input.excludes);
42
+ const result = await runWarpGrep({
43
+ query: input.query,
44
+ repoRoot: input.repoRoot,
45
+ provider,
46
+ excludes: input.excludes,
47
+ includes: input.includes,
48
+ debug: input.debug ?? this.config.debug ?? false,
49
+ apiKey: this.config.apiKey
50
+ });
51
+ const finish = result.finish;
52
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
53
+ return {
54
+ success: false,
55
+ error: "Search did not complete"
56
+ };
57
+ }
58
+ const contexts = (finish.resolved ?? []).map((r) => ({
59
+ file: r.path,
60
+ content: r.content
61
+ }));
62
+ return {
63
+ success: true,
64
+ contexts,
65
+ summary: finish.payload
66
+ };
67
+ }
68
+ };
69
+ async function executeWarpGrep(input, config) {
70
+ const client = new WarpGrepClient(config);
71
+ return client.execute(input);
72
+ }
73
+ function formatResult(result) {
74
+ if (!result.success) {
75
+ return `Search failed: ${result.error}`;
76
+ }
77
+ if (!result.contexts || result.contexts.length === 0) {
78
+ return "No relevant code found. Try rephrasing your query.";
79
+ }
80
+ const lines = [];
81
+ lines.push(`Found ${result.contexts.length} relevant code sections:
82
+ `);
83
+ result.contexts.forEach((ctx, i) => {
84
+ lines.push(`${i + 1}. ${ctx.file}`);
85
+ lines.push("```");
86
+ lines.push(ctx.content);
87
+ lines.push("```");
88
+ lines.push("");
89
+ });
90
+ if (result.summary) {
91
+ lines.push(`Summary: ${result.summary}`);
92
+ }
93
+ return lines.join("\n");
94
+ }
95
+
96
+ // tools/warp_grep/prompts.ts
97
+ var WARP_GREP_DESCRIPTION = "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.";
98
+
99
+ export {
100
+ WarpGrepClient,
101
+ executeWarpGrep,
102
+ formatResult,
103
+ WARP_GREP_DESCRIPTION
104
+ };
105
+ //# sourceMappingURL=chunk-CL45IWIU.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/core.ts","../tools/warp_grep/prompts.ts"],"sourcesContent":["/**\n * Core implementation for morph-warp-grep\n * Provides a client class for programmatic code search\n */\n\nimport { runWarpGrep } from './agent/runner.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport type { \n WarpGrepClientConfig, \n WarpGrepInput, \n WarpGrepResult,\n WarpGrepContext,\n} from './types.js';\n\n/**\n * WarpGrep client for programmatic code search\n * \n * @example\n * ```typescript\n * import { WarpGrepClient } from '@morphllm/morphsdk';\n * \n * const client = new WarpGrepClient({ apiKey: process.env.MORPH_API_KEY });\n * \n * // Simple usage - defaults to LocalRipgrepProvider\n * const result = await client.execute({\n * query: 'Find authentication middleware',\n * repoRoot: '.'\n * });\n * \n * // With custom excludes\n * const result = await client.execute({\n * query: 'Find database models',\n * repoRoot: '.',\n * excludes: ['node_modules', '.git', 'dist']\n * });\n * ```\n */\nexport class WarpGrepClient {\n private config: WarpGrepClientConfig;\n\n constructor(config: WarpGrepClientConfig = {}) {\n this.config = {\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n };\n }\n\n /**\n * Execute a code search query\n * \n * @param input - Search parameters including query, repoRoot, and optional provider\n * @returns Search results with relevant code contexts\n * \n * @example\n * ```typescript\n * const result = await client.execute({\n * query: 'Find authentication middleware',\n * repoRoot: '.'\n * });\n * \n * if (result.success) {\n * for (const ctx of result.contexts) {\n * console.log(`File: ${ctx.file}`);\n * console.log(ctx.content);\n * }\n * }\n * ```\n */\n async execute(input: WarpGrepInput): Promise<WarpGrepResult> {\n // Default to LocalRipgrepProvider if no provider specified\n const provider = input.provider ?? new LocalRipgrepProvider(input.repoRoot, input.excludes);\n \n const result = await runWarpGrep({\n query: input.query,\n repoRoot: input.repoRoot,\n provider,\n excludes: input.excludes,\n includes: input.includes,\n debug: input.debug ?? this.config.debug ?? false,\n apiKey: this.config.apiKey,\n });\n\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { \n success: false, \n error: 'Search did not complete',\n };\n }\n\n const contexts: WarpGrepContext[] = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n\n return { \n success: true, \n contexts, \n summary: finish.payload,\n };\n }\n}\n\n/**\n * Execute a warp grep search directly\n * \n * @param input - Search parameters\n * @param config - Optional client configuration\n * @returns Search results\n * \n * @example\n * ```typescript\n * import { executeWarpGrep } from '@morphllm/morphsdk/tools/warp-grep';\n * \n * const result = await executeWarpGrep({\n * query: 'Find authentication middleware',\n * repoRoot: '.'\n * });\n * ```\n */\nexport async function executeWarpGrep(\n input: WarpGrepInput,\n config?: WarpGrepClientConfig\n): Promise<WarpGrepResult> {\n const client = new WarpGrepClient(config);\n return client.execute(input);\n}\n\n/**\n * Format warp grep results for display or tool responses\n * \n * @param result - The search result\n * @returns Formatted string representation\n */\nexport function formatResult(result: WarpGrepResult): string {\n if (!result.success) {\n return `Search failed: ${result.error}`;\n }\n\n if (!result.contexts || result.contexts.length === 0) {\n return 'No relevant code found. Try rephrasing your query.';\n }\n\n const lines: string[] = [];\n lines.push(`Found ${result.contexts.length} relevant code sections:\\n`);\n\n result.contexts.forEach((ctx, i) => {\n lines.push(`${i + 1}. ${ctx.file}`);\n lines.push('```');\n lines.push(ctx.content);\n lines.push('```');\n lines.push('');\n });\n\n if (result.summary) {\n lines.push(`Summary: ${result.summary}`);\n }\n\n return lines.join('\\n');\n}\n\n","/**\n * Prompts and descriptions for morph-warp-grep\n */\n\n/**\n * Default tool description for the warp grep tool\n */\nexport const WARP_GREP_DESCRIPTION = \n 'A fast and accurate tool that can search for all relevant context in a codebase. ' +\n 'You must use this tool to save time and avoid context pollution.';\n\n/**\n * System prompt for the warp grep agent\n * Re-exported from agent/prompt.ts for convenience\n */\nexport { SYSTEM_PROMPT as WARP_GREP_SYSTEM_PROMPT, getSystemPrompt } from './agent/prompt.js';\n\n"],"mappings":";;;;;;;;AAqCO,IAAM,iBAAN,MAAqB;AAAA,EAClB;AAAA,EAER,YAAY,SAA+B,CAAC,GAAG;AAC7C,SAAK,SAAS;AAAA,MACZ,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,QAAQ,OAA+C;AAE3D,UAAM,WAAW,MAAM,YAAY,IAAI,qBAAqB,MAAM,UAAU,MAAM,QAAQ;AAE1F,UAAM,SAAS,MAAM,YAAY;AAAA,MAC/B,OAAO,MAAM;AAAA,MACb,UAAU,MAAM;AAAA,MAChB;AAAA,MACA,UAAU,MAAM;AAAA,MAChB,UAAU,MAAM;AAAA,MAChB,OAAO,MAAM,SAAS,KAAK,OAAO,SAAS;AAAA,MAC3C,QAAQ,KAAK,OAAO;AAAA,IACtB,CAAC;AAED,UAAM,SAAS,OAAO;AACtB,QAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,YAA+B,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,MACpE,MAAM,EAAE;AAAA,MACR,SAAS,EAAE;AAAA,IACb,EAAE;AAEF,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA,SAAS,OAAO;AAAA,IAClB;AAAA,EACF;AACF;AAmBA,eAAsB,gBACpB,OACA,QACyB;AACzB,QAAM,SAAS,IAAI,eAAe,MAAM;AACxC,SAAO,OAAO,QAAQ,KAAK;AAC7B;AAQO,SAAS,aAAa,QAAgC;AAC3D,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,kBAAkB,OAAO,KAAK;AAAA,EACvC;AAEA,MAAI,CAAC,OAAO,YAAY,OAAO,SAAS,WAAW,GAAG;AACpD,WAAO;AAAA,EACT;AAEA,QAAM,QAAkB,CAAC;AACzB,QAAM,KAAK,SAAS,OAAO,SAAS,MAAM;AAAA,CAA4B;AAEtE,SAAO,SAAS,QAAQ,CAAC,KAAK,MAAM;AAClC,UAAM,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,IAAI,EAAE;AAClC,UAAM,KAAK,KAAK;AAChB,UAAM,KAAK,IAAI,OAAO;AACtB,UAAM,KAAK,KAAK;AAChB,UAAM,KAAK,EAAE;AAAA,EACf,CAAC;AAED,MAAI,OAAO,SAAS;AAClB,UAAM,KAAK,YAAY,OAAO,OAAO,EAAE;AAAA,EACzC;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;;;AC1JO,IAAM,wBACX;","names":[]}
@@ -0,0 +1,70 @@
1
+ import {
2
+ WARP_GREP_DESCRIPTION
3
+ } from "./chunk-CL45IWIU.js";
4
+ import {
5
+ runWarpGrep
6
+ } from "./chunk-7HS6YXA3.js";
7
+ import {
8
+ LocalRipgrepProvider
9
+ } from "./chunk-UXYK7WZX.js";
10
+
11
+ // tools/warp_grep/vercel.ts
12
+ import { tool } from "ai";
13
+ import { z } from "zod";
14
+ var warpGrepSchema = z.object({
15
+ query: z.string().describe("Free-form repository question")
16
+ });
17
+ async function execute(input, config) {
18
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
19
+ const result = await runWarpGrep({
20
+ query: input.query,
21
+ repoRoot: config.repoRoot,
22
+ provider,
23
+ excludes: config.excludes,
24
+ includes: config.includes,
25
+ debug: config.debug ?? false,
26
+ apiKey: config.apiKey
27
+ });
28
+ const finish = result.finish;
29
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
30
+ return { success: false, error: "Search did not complete" };
31
+ }
32
+ const contexts = (finish.resolved ?? []).map((r) => ({
33
+ file: r.path,
34
+ content: r.content
35
+ }));
36
+ return { success: true, contexts, summary: finish.payload };
37
+ }
38
+ function createMorphWarpGrepTool(config) {
39
+ return tool({
40
+ description: config.description ?? WARP_GREP_DESCRIPTION,
41
+ inputSchema: warpGrepSchema,
42
+ execute: async (params) => {
43
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
44
+ const result = await runWarpGrep({
45
+ query: params.query,
46
+ repoRoot: config.repoRoot,
47
+ provider,
48
+ excludes: config.excludes,
49
+ includes: config.includes,
50
+ debug: config.debug ?? false,
51
+ apiKey: config.apiKey
52
+ });
53
+ const finish = result.finish;
54
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
55
+ return { success: false, error: "Search did not complete" };
56
+ }
57
+ const contexts = (finish.resolved ?? []).map((r) => ({
58
+ file: r.path,
59
+ content: r.content
60
+ }));
61
+ return { success: true, contexts, summary: finish.payload };
62
+ }
63
+ });
64
+ }
65
+
66
+ export {
67
+ execute,
68
+ createMorphWarpGrepTool
69
+ };
70
+ //# sourceMappingURL=chunk-D6OD3IST.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/vercel.ts"],"sourcesContent":["/**\n * Vercel AI SDK adapter for morph-warp-grep tool\n */\n\nimport { tool } from 'ai';\nimport { z } from 'zod';\nimport { runWarpGrep } from './agent/runner.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { WARP_GREP_DESCRIPTION, getSystemPrompt } from './prompts.js';\nimport { formatResult } from './core.js';\nimport type { WarpGrepToolConfig, WarpGrepResult, WarpGrepContext } from './types.js';\n\n/**\n * Zod schema for warp grep input\n */\nconst warpGrepSchema = z.object({\n query: z.string().describe('Free-form repository question'),\n});\n\n/**\n * Execute warp grep search\n * \n * @param input - Tool input with query\n * @param config - Configuration with repoRoot and optional provider\n * @returns Search results\n */\nexport async function execute(\n input: { query: string },\n config: WarpGrepToolConfig\n): Promise<WarpGrepResult> {\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n \n const result = await runWarpGrep({\n query: input.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete' };\n }\n\n const contexts: WarpGrepContext[] = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n\n return { success: true, contexts, summary: finish.payload };\n}\n\n// Re-export formatResult and getSystemPrompt for convenience\nexport { formatResult, getSystemPrompt };\n\n/**\n * Create Vercel AI SDK warp grep tool\n * \n * @param config - Configuration options\n * @returns Vercel AI SDK tool\n * \n * @example\n * ```typescript\n * import { generateText } from 'ai';\n * import { anthropic } from '@ai-sdk/anthropic';\n * import { createMorphWarpGrepTool } from '@morphllm/morphsdk/tools/warp-grep/vercel';\n * \n * const grepTool = createMorphWarpGrepTool({ repoRoot: '.' });\n * \n * const result = await generateText({\n * model: anthropic('claude-sonnet-4-5-20250929'),\n * tools: { grep: grepTool },\n * prompt: 'Find authentication middleware'\n * });\n * ```\n */\nexport function createMorphWarpGrepTool(config: WarpGrepToolConfig) {\n return tool({\n description: config.description ?? WARP_GREP_DESCRIPTION,\n inputSchema: warpGrepSchema,\n execute: async (params) => {\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n const result = await runWarpGrep({\n query: params.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete' };\n }\n\n const contexts: WarpGrepContext[] = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n\n return { success: true, contexts, summary: finish.payload };\n },\n });\n}\n"],"mappings":";;;;;;;;;;;AAIA,SAAS,YAAY;AACrB,SAAS,SAAS;AAUlB,IAAM,iBAAiB,EAAE,OAAO;AAAA,EAC9B,OAAO,EAAE,OAAO,EAAE,SAAS,+BAA+B;AAC5D,CAAC;AASD,eAAsB,QACpB,OACA,QACyB;AACzB,QAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAE7F,QAAM,SAAS,MAAM,YAAY;AAAA,IAC/B,OAAO,MAAM;AAAA,IACb,UAAU,OAAO;AAAA,IACjB;AAAA,IACA,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO,SAAS;AAAA,IACvB,QAAQ,OAAO;AAAA,EACjB,CAAC;AAED,QAAM,SAAS,OAAO;AACtB,MAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,WAAO,EAAE,SAAS,OAAO,OAAO,0BAA0B;AAAA,EAC5D;AAEA,QAAM,YAA+B,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,IACpE,MAAM,EAAE;AAAA,IACR,SAAS,EAAE;AAAA,EACb,EAAE;AAEF,SAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAC5D;AA0BO,SAAS,wBAAwB,QAA4B;AAClE,SAAO,KAAK;AAAA,IACV,aAAa,OAAO,eAAe;AAAA,IACnC,aAAa;AAAA,IACb,SAAS,OAAO,WAAW;AACzB,YAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAC7F,YAAM,SAAS,MAAM,YAAY;AAAA,QAC/B,OAAO,OAAO;AAAA,QACd,UAAU,OAAO;AAAA,QACjB;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO,SAAS;AAAA,QACvB,QAAQ,OAAO;AAAA,MACjB,CAAC;AAED,YAAM,SAAS,OAAO;AACtB,UAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,eAAO,EAAE,SAAS,OAAO,OAAO,0BAA0B;AAAA,MAC5D;AAEA,YAAM,YAA+B,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,QACpE,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AAEF,aAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAAA,IAC5D;AAAA,EACF,CAAC;AACH;","names":[]}
@@ -1,10 +1,10 @@
1
+ import {
2
+ executeEditFile
3
+ } from "./chunk-64PMM72R.js";
1
4
  import {
2
5
  EDIT_FILE_SYSTEM_PROMPT,
3
6
  EDIT_FILE_TOOL_DESCRIPTION
4
7
  } from "./chunk-63WE2C5R.js";
5
- import {
6
- executeEditFile
7
- } from "./chunk-64PMM72R.js";
8
8
  import {
9
9
  __export
10
10
  } from "./chunk-PZ5AY32C.js";
@@ -103,4 +103,4 @@ export {
103
103
  openai_default,
104
104
  openai_exports
105
105
  };
106
- //# sourceMappingURL=chunk-PEGZVGG4.js.map
106
+ //# sourceMappingURL=chunk-G4AWE5A2.js.map
@@ -1,10 +1,10 @@
1
+ import {
2
+ executeCodebaseSearch
3
+ } from "./chunk-WM77HRKO.js";
1
4
  import {
2
5
  CODEBASE_SEARCH_DESCRIPTION,
3
6
  CODEBASE_SEARCH_SYSTEM_PROMPT
4
7
  } from "./chunk-YQMPVJ2L.js";
5
- import {
6
- executeCodebaseSearch
7
- } from "./chunk-WM77HRKO.js";
8
8
 
9
9
  // tools/codebase_search/anthropic.ts
10
10
  function createCodebaseSearchTool(config) {
@@ -80,4 +80,4 @@ function formatResult(result) {
80
80
  export {
81
81
  createCodebaseSearchTool
82
82
  };
83
- //# sourceMappingURL=chunk-OUEJ6XEO.js.map
83
+ //# sourceMappingURL=chunk-GJU7UOFL.js.map
@@ -6,4 +6,4 @@ export {
6
6
  default2 as default,
7
7
  default3 as default2
8
8
  };
9
- //# sourceMappingURL=chunk-Q7PDN7TS.js.map
9
+ //# sourceMappingURL=chunk-GZMUGMOZ.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../git/index.ts"],"sourcesContent":["/**\n * Morph Git SDK\n * \n * Git operations for AI agents using Morph's backend infrastructure.\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!\n * });\n * \n * // Initialize and push\n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.add({ dir: './my-project', filepath: 'src/app.ts' });\n * await morphGit.commit({ dir: './my-project', message: 'Update' });\n * await morphGit.push({ dir: './my-project', branch: 'main' });\n * ```\n */\n\nexport { MorphGit } from './client.js';\nexport type {\n MorphGitConfig,\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n ChatMessage,\n CommitMetadata,\n} from './types.js';\n\n// Re-export isomorphic-git for advanced use cases\nexport { default as git } from 'isomorphic-git';\nexport { default as http } from 'isomorphic-git/http/node';\n\n"],"mappings":";AAyCA,SAAoB,WAAXA,gBAAsB;AAC/B,SAAoB,WAAXA,gBAAuB;","names":["default"]}
1
+ {"version":3,"sources":["../git/index.ts"],"sourcesContent":["/**\n * Morph Git SDK\n * \n * Git operations for AI agents using Morph's backend infrastructure.\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!\n * });\n * \n * // Initialize and push\n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.add({ dir: './my-project', filepath: 'src/app.ts' });\n * await morphGit.commit({ dir: './my-project', message: 'Update' });\n * await morphGit.push({ dir: './my-project', branch: 'main' });\n * ```\n */\n\nexport { MorphGit } from './client.js';\nexport type {\n MorphGitConfig,\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n ChatMessage,\n CommitMetadata,\n MorphNotesSchema,\n} from './types.js';\n\n// Re-export isomorphic-git for advanced use cases\nexport { default as git } from 'isomorphic-git';\nexport { default as http } from 'isomorphic-git/http/node';\n\n"],"mappings":";AA0CA,SAAoB,WAAXA,gBAAsB;AAC/B,SAAoB,WAAXA,gBAAuB;","names":["default"]}
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=chunk-JYBVRF72.js.map
@@ -101,28 +101,26 @@ var MorphGit = class {
101
101
  * ```ts
102
102
  * await morphGit.push({
103
103
  * dir: './my-project',
104
- * branch: 'main' // Required: explicit branch name
104
+ * branch: 'main', // Required: explicit branch name
105
+ * index: true // Optional: generate embeddings (default: true)
105
106
  * });
106
107
  * ```
107
108
  */
108
109
  async push(options) {
109
- const { dir, remote = "origin", branch, waitForEmbeddings } = options;
110
+ const { dir, remote = "origin", branch, waitForEmbeddings, index = true } = options;
110
111
  if (!branch) {
111
112
  throw new Error(
112
113
  'branch is required for push operations. Specify the branch explicitly: { dir: "./my-project", branch: "main" }'
113
114
  );
114
115
  }
115
- let commitHash;
116
+ const commitHash = await git.resolveRef({ fs, dir, ref: "HEAD" });
116
117
  let repoId;
117
- if (waitForEmbeddings) {
118
- commitHash = await git.resolveRef({ fs, dir, ref: "HEAD" });
119
- const remotes = await git.listRemotes({ fs, dir });
120
- const originRemote = remotes.find((r) => r.remote === remote);
121
- if (originRemote) {
122
- const match = originRemote.url.match(/\/repos\/([^\/]+)$/);
123
- if (match) {
124
- repoId = match[1];
125
- }
118
+ const remotes = await git.listRemotes({ fs, dir });
119
+ const originRemote = remotes.find((r) => r.remote === remote);
120
+ if (originRemote) {
121
+ const match = originRemote.url.match(/\/repos\/([^\/]+)$/);
122
+ if (match) {
123
+ repoId = match[1];
126
124
  }
127
125
  }
128
126
  await git.push({
@@ -133,10 +131,35 @@ var MorphGit = class {
133
131
  ref: branch,
134
132
  onAuth: this.getAuthCallback()
135
133
  });
136
- if (waitForEmbeddings && repoId && commitHash) {
134
+ if (repoId && commitHash) {
135
+ await this.configureCommit({ repoId, commitHash, branch, index });
136
+ }
137
+ if (waitForEmbeddings && repoId && commitHash && index) {
137
138
  await this.waitForEmbeddings({ repoId, commitHash });
138
139
  }
139
140
  }
141
+ /**
142
+ * Configure commit settings on the backend after push.
143
+ * Sets the index flag to control embedding generation.
144
+ * @private
145
+ */
146
+ async configureCommit(options) {
147
+ const { repoId, commitHash, branch, index } = options;
148
+ const response = await fetch(
149
+ `${this.proxyUrl}/v1/repos/${repoId}/commits/${commitHash}/config`,
150
+ {
151
+ method: "POST",
152
+ headers: {
153
+ "Authorization": `Bearer ${this.apiKey}`,
154
+ "Content-Type": "application/json"
155
+ },
156
+ body: JSON.stringify({ index, branch })
157
+ }
158
+ );
159
+ if (!response.ok) {
160
+ console.warn(`Failed to configure commit: ${response.status}`);
161
+ }
162
+ }
140
163
  /**
141
164
  * Pull changes from remote repository
142
165
  *
@@ -261,6 +284,7 @@ var MorphGit = class {
261
284
  * name: 'AI Agent',
262
285
  * email: 'ai@example.com'
263
286
  * },
287
+ * metadata: { issueId: 'PROJ-123', source: 'agent' },
264
288
  * chatHistory: [
265
289
  * { role: 'user', content: 'Please add a new feature' },
266
290
  * { role: 'assistant', content: 'I will add that feature' }
@@ -270,7 +294,7 @@ var MorphGit = class {
270
294
  * ```
271
295
  */
272
296
  async commit(options) {
273
- const { dir, message, author, chatHistory, recordingId } = options;
297
+ const { dir, message, author, metadata, chatHistory, recordingId } = options;
274
298
  const commitAuthor = author || {
275
299
  name: "Morph SDK",
276
300
  email: "sdk@morphllm.com"
@@ -281,17 +305,19 @@ var MorphGit = class {
281
305
  message,
282
306
  author: commitAuthor
283
307
  });
284
- if (chatHistory || recordingId) {
285
- const metadata = {
308
+ if (metadata || chatHistory || recordingId) {
309
+ const notes = {
310
+ metadata,
286
311
  chatHistory,
287
- recordingId
312
+ recordingId,
313
+ _version: 1
288
314
  };
289
315
  await git.addNote({
290
316
  fs,
291
317
  dir,
292
318
  ref: "refs/notes/morph-metadata",
293
319
  oid: sha,
294
- note: JSON.stringify(metadata, null, 2),
320
+ note: JSON.stringify(notes, null, 2),
295
321
  author: commitAuthor
296
322
  });
297
323
  }
@@ -480,18 +506,19 @@ var MorphGit = class {
480
506
  return oid;
481
507
  }
482
508
  /**
483
- * Get metadata (chat history, recording ID) attached to a commit
509
+ * Get notes (metadata, chat history, recording ID) attached to a commit
484
510
  *
485
511
  * @example
486
512
  * ```ts
487
- * const metadata = await morphGit.getCommitMetadata({
513
+ * const notes = await morphGit.getCommitMetadata({
488
514
  * dir: './my-project',
489
515
  * commitSha: 'abc123...'
490
516
  * });
491
517
  *
492
- * if (metadata) {
493
- * console.log('Chat history:', metadata.chatHistory);
494
- * console.log('Recording ID:', metadata.recordingId);
518
+ * if (notes) {
519
+ * console.log('Metadata:', notes.metadata);
520
+ * console.log('Chat history:', notes.chatHistory);
521
+ * console.log('Recording ID:', notes.recordingId);
495
522
  * }
496
523
  * ```
497
524
  */
@@ -503,8 +530,8 @@ var MorphGit = class {
503
530
  ref: "refs/notes/morph-metadata",
504
531
  oid: options.commitSha
505
532
  });
506
- const metadata = JSON.parse(new TextDecoder().decode(note));
507
- return metadata;
533
+ const notes = JSON.parse(new TextDecoder().decode(note));
534
+ return notes;
508
535
  } catch (err) {
509
536
  return null;
510
537
  }
@@ -514,4 +541,4 @@ var MorphGit = class {
514
541
  export {
515
542
  MorphGit
516
543
  };
517
- //# sourceMappingURL=chunk-GDR65N2J.js.map
544
+ //# sourceMappingURL=chunk-OXHGFHEU.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../git/client.ts"],"sourcesContent":["/**\n * Morph Git Client - Simple, high-level Git operations\n * Built on isomorphic-git with explicit configuration\n */\n\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\nimport fs from 'fs';\nimport type {\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n MorphGitConfig,\n MorphNotesSchema,\n WaitForEmbeddingsOptions,\n EmbeddingProgress,\n} from './types.js';\n\nconst DEFAULT_PROXY_URL = 'https://repos.morphllm.com';\n\n/**\n * MorphGit - Git operations for AI agents with Morph backend\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!,\n * proxyUrl: 'https://repos.morphllm.com' // Optional\n * });\n * \n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.push({ dir: './my-project' });\n * ```\n */\nexport class MorphGit {\n private readonly apiKey: string;\n private readonly proxyUrl: string;\n\n constructor(config: MorphGitConfig) {\n // Validate API key\n if (!config.apiKey) {\n throw new Error('API key is required. Get one at https://morphllm.com/dashboard');\n }\n \n if (!config.apiKey.startsWith('sk-') && !config.apiKey.startsWith('morph-')) {\n throw new Error('Invalid API key format. Expected: sk-... or morph-...');\n }\n \n this.apiKey = config.apiKey;\n this.proxyUrl = config.proxyUrl || DEFAULT_PROXY_URL;\n }\n \n /**\n * Get auth callback for isomorphic-git operations\n * @private\n */\n private getAuthCallback() {\n return () => ({\n username: 'morph',\n password: this.apiKey,\n });\n }\n\n /**\n * Initialize a new repository\n * Creates the repo in the database and in the git provider\n * \n * @example\n * ```ts\n * await morphGit.init({\n * repoId: 'my-project',\n * dir: './my-project',\n * defaultBranch: 'main'\n * });\n * ```\n */\n async init(options: {\n repoId: string;\n dir: string;\n defaultBranch?: string;\n }): Promise<void> {\n const { repoId, dir, defaultBranch = 'main' } = options;\n\n // Call backend API to create repository\n const response = await fetch(`${this.proxyUrl}/v1/repos`, {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n repoId,\n name: repoId,\n defaultBranch,\n }),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`Failed to create repository: ${error}`);\n }\n\n // Initialize local git repository (industry standard: no clone needed)\n await git.init({\n fs,\n dir,\n defaultBranch,\n });\n\n // Add remote pointing to Morph git-proxy\n await git.addRemote({\n fs,\n dir,\n remote: 'origin',\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n });\n\n console.log(`✓ Repository '${repoId}' initialized`);\n }\n\n /**\n * Clone a repository from Morph repos\n * \n * @example\n * ```ts\n * await morphGit.clone({\n * repoId: 'my-project',\n * dir: './my-project'\n * });\n * ```\n */\n async clone(options: CloneOptions): Promise<void> {\n const { repoId, dir, branch = 'main', depth, singleBranch = true } = options;\n\n await git.clone({\n fs,\n http,\n dir,\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n ref: branch,\n singleBranch,\n depth,\n onAuth: this.getAuthCallback(),\n });\n }\n\n /**\n * Push changes to remote repository\n * \n * @example\n * ```ts\n * await morphGit.push({ \n * dir: './my-project',\n * branch: 'main', // Required: explicit branch name\n * index: true // Optional: generate embeddings (default: true)\n * });\n * ```\n */\n async push(options: PushOptions): Promise<void> {\n const { dir, remote = 'origin', branch, waitForEmbeddings, index = true } = options;\n\n if (!branch) {\n throw new Error(\n 'branch is required for push operations. ' +\n 'Specify the branch explicitly: { dir: \"./my-project\", branch: \"main\" }'\n );\n }\n\n // Get commit hash and repoId before pushing\n const commitHash = await git.resolveRef({ fs, dir, ref: 'HEAD' });\n \n // Get repoId from git remote URL\n let repoId: string | undefined;\n const remotes = await git.listRemotes({ fs, dir });\n const originRemote = remotes.find(r => r.remote === remote);\n if (originRemote) {\n // Extract repoId from URL: https://repos.morphllm.com/v1/repos/{repoId}\n const match = originRemote.url.match(/\\/repos\\/([^\\/]+)$/);\n if (match) {\n repoId = match[1];\n }\n }\n\n await git.push({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n });\n \n // Configure commit after successful push (set index flag)\n if (repoId && commitHash) {\n await this.configureCommit({ repoId, commitHash, branch, index });\n }\n \n // Wait for embeddings if requested (and indexing is enabled)\n if (waitForEmbeddings && repoId && commitHash && index) {\n await this.waitForEmbeddings({ repoId, commitHash });\n }\n }\n\n /**\n * Configure commit settings on the backend after push.\n * Sets the index flag to control embedding generation.\n * @private\n */\n private async configureCommit(options: {\n repoId: string;\n commitHash: string;\n branch: string;\n index: boolean;\n }): Promise<void> {\n const { repoId, commitHash, branch, index } = options;\n \n const response = await fetch(\n `${this.proxyUrl}/v1/repos/${repoId}/commits/${commitHash}/config`,\n {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({ index, branch }),\n }\n );\n\n if (!response.ok) {\n // Non-fatal: log warning but don't throw\n console.warn(`Failed to configure commit: ${response.status}`);\n }\n }\n\n /**\n * Pull changes from remote repository\n * \n * @example\n * ```ts\n * await morphGit.pull({ \n * dir: './my-project',\n * branch: 'main' // Required: explicit branch name\n * });\n * ```\n */\n async pull(options: PullOptions): Promise<void> {\n const { dir, remote = 'origin', branch } = options;\n\n if (!branch) {\n throw new Error(\n 'branch is required for pull operations. ' +\n 'Specify the branch explicitly: { dir: \"./my-project\", branch: \"main\" }'\n );\n }\n\n await git.pull({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n author: {\n name: 'Morph Agent',\n email: 'agent@morph.com',\n },\n });\n }\n\n /**\n * Wait for embeddings to complete after push.\n * Polls status endpoint until embeddings are done.\n * \n * @example\n * ```ts\n * await morphGit.push({ dir: './my-project', branch: 'main' });\n * await morphGit.waitForEmbeddings({\n * repoId: 'my-project',\n * onProgress: (p) => console.log(`${p.filesProcessed}/${p.totalFiles}`)\n * });\n * ```\n */\n async waitForEmbeddings(options: WaitForEmbeddingsOptions): Promise<void> {\n const { repoId, commitHash, timeout = 120000, onProgress } = options;\n const startTime = Date.now();\n const pollInterval = 1000; // Poll every 1s\n \n while (Date.now() - startTime < timeout) {\n const statusUrl = `${this.proxyUrl}/v1/repos/${repoId}/embedding-status` +\n (commitHash ? `?commit_hash=${commitHash}` : '');\n \n const response = await fetch(statusUrl, {\n headers: { 'Authorization': `Bearer ${this.apiKey}` }\n });\n \n if (response.status === 404) {\n // No job found yet - might still be creating\n await new Promise(resolve => setTimeout(resolve, pollInterval));\n continue;\n }\n \n if (!response.ok) {\n throw new Error(`Failed to get embedding status: ${response.status}`);\n }\n \n const status = await response.json();\n \n if (onProgress && status.progress) {\n onProgress(status.progress);\n }\n \n if (status.status === 'completed') {\n return; // Done!\n }\n \n if (status.status === 'failed') {\n throw new Error(`Embeddings failed: ${status.error || 'Unknown error'}`);\n }\n \n // Still processing (queued or processing), wait and poll again\n await new Promise(resolve => setTimeout(resolve, pollInterval));\n }\n \n throw new Error(`Embeddings timed out after ${timeout}ms`);\n }\n\n /**\n * Stage a file for commit\n * \n * @example\n * ```ts\n * await morphGit.add({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * ```\n */\n async add(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.add({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Remove a file from staging\n * \n * @example\n * ```ts\n * await morphGit.remove({\n * dir: './my-project',\n * filepath: 'src/old-file.ts'\n * });\n * ```\n */\n async remove(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.remove({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Commit staged changes\n * \n * @example\n * ```ts\n * await morphGit.commit({\n * dir: './my-project',\n * message: 'Add new feature',\n * author: {\n * name: 'AI Agent',\n * email: 'ai@example.com'\n * },\n * metadata: { issueId: 'PROJ-123', source: 'agent' },\n * chatHistory: [\n * { role: 'user', content: 'Please add a new feature' },\n * { role: 'assistant', content: 'I will add that feature' }\n * ],\n * recordingId: 'rec_123'\n * });\n * ```\n */\n async commit(options: CommitOptions): Promise<string> {\n const { dir, message, author, metadata, chatHistory, recordingId } = options;\n\n // Provide default author if not specified\n const commitAuthor = author || {\n name: 'Morph SDK',\n email: 'sdk@morphllm.com'\n };\n\n const sha = await git.commit({\n fs,\n dir,\n message,\n author: commitAuthor,\n });\n\n // Store notes if any note fields are provided\n if (metadata || chatHistory || recordingId) {\n const notes: MorphNotesSchema = {\n metadata,\n chatHistory,\n recordingId,\n _version: 1\n };\n \n await git.addNote({\n fs,\n dir,\n ref: 'refs/notes/morph-metadata',\n oid: sha,\n note: JSON.stringify(notes, null, 2),\n author: commitAuthor\n });\n }\n\n return sha;\n }\n\n /**\n * Get status of a file\n * \n * @example\n * ```ts\n * const status = await morphGit.status({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * console.log(status); // 'modified', '*added', etc.\n * ```\n */\n async status(options: StatusOptions): Promise<string> {\n const { dir, filepath } = options;\n\n if (!filepath) {\n throw new Error('filepath is required for status check');\n }\n\n const status = await git.status({\n fs,\n dir,\n filepath,\n });\n\n return status;\n }\n\n /**\n * Get commit history\n * \n * @example\n * ```ts\n * const commits = await morphGit.log({\n * dir: './my-project',\n * depth: 10\n * });\n * ```\n */\n async log(options: LogOptions): Promise<CommitObject[]> {\n const { dir, depth, ref } = options;\n\n const commits = await git.log({\n fs,\n dir,\n depth,\n ref,\n });\n\n return commits as CommitObject[];\n }\n\n /**\n * Checkout a branch or commit\n * \n * @example\n * ```ts\n * await morphGit.checkout({\n * dir: './my-project',\n * ref: 'feature-branch'\n * });\n * ```\n */\n async checkout(options: CheckoutOptions): Promise<void> {\n const { dir, ref } = options;\n\n await git.checkout({\n fs,\n dir,\n ref,\n });\n }\n\n /**\n * Create a new branch\n * \n * @example\n * ```ts\n * await morphGit.branch({\n * dir: './my-project',\n * name: 'feature-branch',\n * checkout: true\n * });\n * ```\n */\n async branch(options: BranchOptions): Promise<void> {\n const { dir, name, checkout = false } = options;\n\n await git.branch({\n fs,\n dir,\n ref: name,\n checkout,\n });\n }\n\n /**\n * List all branches\n * \n * @example\n * ```ts\n * const branches = await morphGit.listBranches({\n * dir: './my-project'\n * });\n * ```\n */\n async listBranches(options: { dir: string }): Promise<string[]> {\n const { dir } = options;\n\n const branches = await git.listBranches({\n fs,\n dir,\n });\n\n return branches;\n }\n\n /**\n * Get the current branch name\n * \n * @example\n * ```ts\n * const branch = await morphGit.currentBranch({\n * dir: './my-project'\n * });\n * ```\n */\n async currentBranch(options: { dir: string }): Promise<string | undefined> {\n const { dir } = options;\n\n const branch = await git.currentBranch({\n fs,\n dir,\n });\n\n return branch || undefined;\n }\n\n /**\n * Get list of changed files (similar to git diff --name-only)\n * \n * @example\n * ```ts\n * const changes = await morphGit.statusMatrix({\n * dir: './my-project'\n * });\n * ```\n */\n async statusMatrix(options: { dir: string }): Promise<StatusResult[]> {\n const { dir } = options;\n\n const matrix = await git.statusMatrix({\n fs,\n dir,\n });\n\n return matrix.map(([filepath, HEADStatus, workdirStatus, stageStatus]) => {\n let status: StatusResult['status'] = 'unmodified';\n\n // Determine status based on statusMatrix values\n if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 2) {\n status = 'modified';\n } else if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 1) {\n status = '*modified';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 2) {\n status = 'added';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 0) {\n status = '*added';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 0) {\n status = 'deleted';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 1) {\n status = '*deleted';\n } else if (HEADStatus === 1 && workdirStatus === 1 && stageStatus === 1) {\n status = 'unmodified';\n } else if (HEADStatus === 0 && workdirStatus === 0 && stageStatus === 0) {\n status = 'absent';\n }\n\n return {\n filepath,\n status,\n };\n });\n }\n\n /**\n * Get the current commit hash\n * \n * @example\n * ```ts\n * const hash = await morphGit.resolveRef({\n * dir: './my-project',\n * ref: 'HEAD'\n * });\n * ```\n */\n async resolveRef(options: { dir: string; ref: string }): Promise<string> {\n const { dir, ref } = options;\n\n const oid = await git.resolveRef({\n fs,\n dir,\n ref,\n });\n\n return oid;\n }\n\n /**\n * Get notes (metadata, chat history, recording ID) attached to a commit\n * \n * @example\n * ```ts\n * const notes = await morphGit.getCommitMetadata({\n * dir: './my-project',\n * commitSha: 'abc123...'\n * });\n * \n * if (notes) {\n * console.log('Metadata:', notes.metadata);\n * console.log('Chat history:', notes.chatHistory);\n * console.log('Recording ID:', notes.recordingId);\n * }\n * ```\n */\n async getCommitMetadata(options: {\n dir: string;\n commitSha: string;\n }): Promise<MorphNotesSchema | null> {\n try {\n const note = await git.readNote({\n fs,\n dir: options.dir,\n ref: 'refs/notes/morph-metadata',\n oid: options.commitSha\n });\n \n const notes: MorphNotesSchema = JSON.parse(new TextDecoder().decode(note));\n return notes;\n } catch (err) {\n // No notes found for this commit\n return null;\n }\n }\n}\n\n"],"mappings":";AAKA,OAAO,SAAS;AAChB,OAAO,UAAU;AACjB,OAAO,QAAQ;AAoBf,IAAM,oBAAoB;AAkBnB,IAAM,WAAN,MAAe;AAAA,EACH;AAAA,EACA;AAAA,EAEjB,YAAY,QAAwB;AAElC,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,IAAI,MAAM,gEAAgE;AAAA,IAClF;AAEA,QAAI,CAAC,OAAO,OAAO,WAAW,KAAK,KAAK,CAAC,OAAO,OAAO,WAAW,QAAQ,GAAG;AAC3E,YAAM,IAAI,MAAM,uDAAuD;AAAA,IACzE;AAEA,SAAK,SAAS,OAAO;AACrB,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB;AACxB,WAAO,OAAO;AAAA,MACZ,UAAU;AAAA,MACV,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,KAAK,SAIO;AAChB,UAAM,EAAE,QAAQ,KAAK,gBAAgB,OAAO,IAAI;AAGhD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,QAAQ,aAAa;AAAA,MACxD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,iBAAiB,UAAU,KAAK,MAAM;AAAA,QACtC,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,IACzD;AAGA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,IAAI,UAAU;AAAA,MAClB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,IAC1C,CAAC;AAED,YAAQ,IAAI,sBAAiB,MAAM,eAAe;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,MAAM,SAAsC;AAChD,UAAM,EAAE,QAAQ,KAAK,SAAS,QAAQ,OAAO,eAAe,KAAK,IAAI;AAErE,UAAM,IAAI,MAAM;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,QAAQ,mBAAmB,QAAQ,KAAK,IAAI;AAE5E,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,IAAI,WAAW,EAAE,IAAI,KAAK,KAAK,OAAO,CAAC;AAGhE,QAAI;AACJ,UAAM,UAAU,MAAM,IAAI,YAAY,EAAE,IAAI,IAAI,CAAC;AACjD,UAAM,eAAe,QAAQ,KAAK,OAAK,EAAE,WAAW,MAAM;AAC1D,QAAI,cAAc;AAEhB,YAAM,QAAQ,aAAa,IAAI,MAAM,oBAAoB;AACzD,UAAI,OAAO;AACT,iBAAS,MAAM,CAAC;AAAA,MAClB;AAAA,IACF;AAEA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAGD,QAAI,UAAU,YAAY;AACxB,YAAM,KAAK,gBAAgB,EAAE,QAAQ,YAAY,QAAQ,MAAM,CAAC;AAAA,IAClE;AAGA,QAAI,qBAAqB,UAAU,cAAc,OAAO;AACtD,YAAM,KAAK,kBAAkB,EAAE,QAAQ,WAAW,CAAC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,gBAAgB,SAKZ;AAChB,UAAM,EAAE,QAAQ,YAAY,QAAQ,MAAM,IAAI;AAE9C,UAAM,WAAW,MAAM;AAAA,MACrB,GAAG,KAAK,QAAQ,aAAa,MAAM,YAAY,UAAU;AAAA,MACzD;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,iBAAiB,UAAU,KAAK,MAAM;AAAA,UACtC,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU,EAAE,OAAO,OAAO,CAAC;AAAA,MACxC;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,IAAI;AAEhB,cAAQ,KAAK,+BAA+B,SAAS,MAAM,EAAE;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,OAAO,IAAI;AAE3C,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,kBAAkB,SAAkD;AACxE,UAAM,EAAE,QAAQ,YAAY,UAAU,MAAQ,WAAW,IAAI;AAC7D,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,eAAe;AAErB,WAAO,KAAK,IAAI,IAAI,YAAY,SAAS;AACvC,YAAM,YAAY,GAAG,KAAK,QAAQ,aAAa,MAAM,uBAClD,aAAa,gBAAgB,UAAU,KAAK;AAE/C,YAAM,WAAW,MAAM,MAAM,WAAW;AAAA,QACtC,SAAS,EAAE,iBAAiB,UAAU,KAAK,MAAM,GAAG;AAAA,MACtD,CAAC;AAED,UAAI,SAAS,WAAW,KAAK;AAE3B,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,YAAY,CAAC;AAC9D;AAAA,MACF;AAEA,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,mCAAmC,SAAS,MAAM,EAAE;AAAA,MACtE;AAEA,YAAM,SAAS,MAAM,SAAS,KAAK;AAEnC,UAAI,cAAc,OAAO,UAAU;AACjC,mBAAW,OAAO,QAAQ;AAAA,MAC5B;AAEA,UAAI,OAAO,WAAW,aAAa;AACjC;AAAA,MACF;AAEA,UAAI,OAAO,WAAW,UAAU;AAC9B,cAAM,IAAI,MAAM,sBAAsB,OAAO,SAAS,eAAe,EAAE;AAAA,MACzE;AAGA,YAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,YAAY,CAAC;AAAA,IAChE;AAEA,UAAM,IAAI,MAAM,8BAA8B,OAAO,IAAI;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAAoC;AAC5C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,IAAI;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,OAAO,SAAoC;AAC/C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuBA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,QAAQ,UAAU,aAAa,YAAY,IAAI;AAGrE,UAAM,eAAe,UAAU;AAAA,MAC7B,MAAM;AAAA,MACN,OAAO;AAAA,IACT;AAEA,UAAM,MAAM,MAAM,IAAI,OAAO;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,QAAI,YAAY,eAAe,aAAa;AAC1C,YAAM,QAA0B;AAAA,QAC9B;AAAA,QACA;AAAA,QACA;AAAA,QACA,UAAU;AAAA,MACZ;AAEA,YAAM,IAAI,QAAQ;AAAA,QAChB;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL,KAAK;AAAA,QACL,MAAM,KAAK,UAAU,OAAO,MAAM,CAAC;AAAA,QACnC,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,UAAM,SAAS,MAAM,IAAI,OAAO;AAAA,MAC9B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAA8C;AACtD,UAAM,EAAE,KAAK,OAAO,IAAI,IAAI;AAE5B,UAAM,UAAU,MAAM,IAAI,IAAI;AAAA,MAC5B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,SAAS,SAAyC;AACtD,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,IAAI,SAAS;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAuC;AAClD,UAAM,EAAE,KAAK,MAAM,WAAW,MAAM,IAAI;AAExC,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAA6C;AAC9D,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,WAAW,MAAM,IAAI,aAAa;AAAA,MACtC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,cAAc,SAAuD;AACzE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,cAAc;AAAA,MACrC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,UAAU;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAAmD;AACpE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,aAAa;AAAA,MACpC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,OAAO,IAAI,CAAC,CAAC,UAAU,YAAY,eAAe,WAAW,MAAM;AACxE,UAAI,SAAiC;AAGrC,UAAI,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AAChE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,WAAW,SAAwD;AACvE,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,MAAM,MAAM,IAAI,WAAW;AAAA,MAC/B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MAAM,kBAAkB,SAGa;AACnC,QAAI;AACF,YAAM,OAAO,MAAM,IAAI,SAAS;AAAA,QAC9B;AAAA,QACA,KAAK,QAAQ;AAAA,QACb,KAAK;AAAA,QACL,KAAK,QAAQ;AAAA,MACf,CAAC;AAED,YAAM,QAA0B,KAAK,MAAM,IAAI,YAAY,EAAE,OAAO,IAAI,CAAC;AACzE,aAAO;AAAA,IACT,SAAS,KAAK;AAEZ,aAAO;AAAA,IACT;AAAA,EACF;AACF;","names":[]}
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  DEFAULT_EXCLUDES
3
- } from "./chunk-AFEPUNAO.js";
3
+ } from "./chunk-TJIUA27P.js";
4
4
 
5
5
  // tools/warp_grep/providers/command.ts
6
6
  var CommandExecProvider = class {
@@ -70,4 +70,4 @@ var CommandExecProvider = class {
70
70
  export {
71
71
  CommandExecProvider
72
72
  };
73
- //# sourceMappingURL=chunk-VBBJGWHY.js.map
73
+ //# sourceMappingURL=chunk-P2XKFWFD.js.map
@@ -0,0 +1,76 @@
1
+ import {
2
+ WARP_GREP_DESCRIPTION,
3
+ formatResult
4
+ } from "./chunk-CL45IWIU.js";
5
+ import {
6
+ runWarpGrep
7
+ } from "./chunk-7HS6YXA3.js";
8
+ import {
9
+ getSystemPrompt
10
+ } from "./chunk-WETRQJGU.js";
11
+ import {
12
+ LocalRipgrepProvider
13
+ } from "./chunk-UXYK7WZX.js";
14
+
15
+ // tools/warp_grep/anthropic.ts
16
+ var INPUT_SCHEMA = {
17
+ type: "object",
18
+ properties: {
19
+ query: { type: "string", description: "Free-form repository question" }
20
+ },
21
+ required: ["query"]
22
+ };
23
+ var warpGrepTool = {
24
+ name: "morph-warp-grep",
25
+ description: WARP_GREP_DESCRIPTION,
26
+ input_schema: INPUT_SCHEMA
27
+ };
28
+ async function execute(input, config) {
29
+ const parsed = typeof input === "string" ? JSON.parse(input) : input;
30
+ const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
31
+ const result = await runWarpGrep({
32
+ query: parsed.query,
33
+ repoRoot: config.repoRoot,
34
+ provider,
35
+ excludes: config.excludes,
36
+ includes: config.includes,
37
+ debug: config.debug ?? false,
38
+ apiKey: config.apiKey
39
+ });
40
+ const finish = result.finish;
41
+ if (result.terminationReason !== "completed" || !finish?.metadata) {
42
+ return { success: false, error: "Search did not complete" };
43
+ }
44
+ const contexts = (finish.resolved ?? []).map((r) => ({
45
+ file: r.path,
46
+ content: r.content
47
+ }));
48
+ return { success: true, contexts, summary: finish.payload };
49
+ }
50
+ function createMorphWarpGrepTool(config) {
51
+ const tool = {
52
+ name: "morph-warp-grep",
53
+ description: config.description ?? WARP_GREP_DESCRIPTION,
54
+ input_schema: INPUT_SCHEMA
55
+ };
56
+ return Object.assign(tool, {
57
+ execute: async (input) => {
58
+ return execute(input, config);
59
+ },
60
+ formatResult: (result) => {
61
+ return formatResult(result);
62
+ },
63
+ getSystemPrompt: () => {
64
+ return getSystemPrompt();
65
+ }
66
+ });
67
+ }
68
+ var anthropic_default = warpGrepTool;
69
+
70
+ export {
71
+ warpGrepTool,
72
+ execute,
73
+ createMorphWarpGrepTool,
74
+ anthropic_default
75
+ };
76
+ //# sourceMappingURL=chunk-PABIV7X6.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../tools/warp_grep/anthropic.ts"],"sourcesContent":["/**\n * Anthropic SDK adapter for morph-warp-grep tool\n */\n\nimport type { Tool } from '@anthropic-ai/sdk/resources/messages';\nimport { runWarpGrep } from './agent/runner.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { WARP_GREP_DESCRIPTION, getSystemPrompt } from './prompts.js';\nimport { formatResult } from './core.js';\nimport type { WarpGrepToolConfig, WarpGrepResult, WarpGrepContext } from './types.js';\n\n/**\n * Input schema for the warp grep tool\n */\nconst INPUT_SCHEMA = {\n type: 'object',\n properties: {\n query: { type: 'string', description: 'Free-form repository question' },\n },\n required: ['query'],\n} as const;\n\n/**\n * Anthropic-native warp grep tool definition\n * \n * @example\n * ```typescript\n * import Anthropic from '@anthropic-ai/sdk';\n * import { warpGrepTool, execute } from '@morphllm/morphsdk/tools/warp-grep/anthropic';\n * \n * const client = new Anthropic();\n * const response = await client.messages.create({\n * model: 'claude-sonnet-4-5-20250929',\n * tools: [warpGrepTool],\n * messages: [{ role: 'user', content: 'Find authentication middleware' }]\n * });\n * \n * // Execute the tool call\n * const result = await execute({ query: '...' }, { repoRoot: '.' });\n * ```\n */\nexport const warpGrepTool: Tool = {\n name: 'morph-warp-grep',\n description: WARP_GREP_DESCRIPTION,\n input_schema: INPUT_SCHEMA,\n};\n\n/**\n * Execute warp grep search\n * \n * @param input - Tool input with query\n * @param config - Configuration with repoRoot and optional provider\n * @returns Search results\n */\nexport async function execute(\n input: { query: string } | string,\n config: WarpGrepToolConfig\n): Promise<WarpGrepResult> {\n const parsed = typeof input === 'string' ? JSON.parse(input) : input;\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n \n const result = await runWarpGrep({\n query: parsed.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete' };\n }\n\n const contexts: WarpGrepContext[] = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n\n return { success: true, contexts, summary: finish.payload };\n}\n\n// Re-export formatResult and getSystemPrompt for convenience\nexport { formatResult, getSystemPrompt };\n\n/**\n * Create a custom warp grep tool with configuration and methods\n * \n * @param config - Configuration options\n * @returns Tool definition with execute and formatResult methods\n * \n * @example\n * ```typescript\n * import Anthropic from '@anthropic-ai/sdk';\n * import { createMorphWarpGrepTool } from '@morphllm/morphsdk/tools/warp-grep/anthropic';\n * \n * const tool = createMorphWarpGrepTool({\n * repoRoot: '.',\n * description: 'Custom tool description'\n * });\n * \n * const client = new Anthropic();\n * const response = await client.messages.create({\n * model: 'claude-sonnet-4-5-20250929',\n * tools: [tool],\n * messages: [{ role: 'user', content: 'Find authentication middleware' }]\n * });\n * \n * // Execute and format\n * const result = await tool.execute(toolUseBlock.input);\n * const formatted = tool.formatResult(result);\n * ```\n */\nexport function createMorphWarpGrepTool(config: WarpGrepToolConfig) {\n const tool: Tool = {\n name: 'morph-warp-grep',\n description: config.description ?? WARP_GREP_DESCRIPTION,\n input_schema: INPUT_SCHEMA,\n };\n\n return Object.assign(tool, {\n execute: async (input: unknown): Promise<WarpGrepResult> => {\n return execute(input as { query: string } | string, config);\n },\n formatResult: (result: WarpGrepResult): string => {\n return formatResult(result);\n },\n getSystemPrompt: (): string => {\n return getSystemPrompt();\n },\n });\n}\n\nexport default warpGrepTool;\n"],"mappings":";;;;;;;;;;;;;;;AAcA,IAAM,eAAe;AAAA,EACnB,MAAM;AAAA,EACN,YAAY;AAAA,IACV,OAAO,EAAE,MAAM,UAAU,aAAa,gCAAgC;AAAA,EACxE;AAAA,EACA,UAAU,CAAC,OAAO;AACpB;AAqBO,IAAM,eAAqB;AAAA,EAChC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,cAAc;AAChB;AASA,eAAsB,QACpB,OACA,QACyB;AACzB,QAAM,SAAS,OAAO,UAAU,WAAW,KAAK,MAAM,KAAK,IAAI;AAC/D,QAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAE7F,QAAM,SAAS,MAAM,YAAY;AAAA,IAC/B,OAAO,OAAO;AAAA,IACd,UAAU,OAAO;AAAA,IACjB;AAAA,IACA,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO,SAAS;AAAA,IACvB,QAAQ,OAAO;AAAA,EACjB,CAAC;AAED,QAAM,SAAS,OAAO;AACtB,MAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,WAAO,EAAE,SAAS,OAAO,OAAO,0BAA0B;AAAA,EAC5D;AAEA,QAAM,YAA+B,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,IACpE,MAAM,EAAE;AAAA,IACR,SAAS,EAAE;AAAA,EACb,EAAE;AAEF,SAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAC5D;AAiCO,SAAS,wBAAwB,QAA4B;AAClE,QAAM,OAAa;AAAA,IACjB,MAAM;AAAA,IACN,aAAa,OAAO,eAAe;AAAA,IACnC,cAAc;AAAA,EAChB;AAEA,SAAO,OAAO,OAAO,MAAM;AAAA,IACzB,SAAS,OAAO,UAA4C;AAC1D,aAAO,QAAQ,OAAqC,MAAM;AAAA,IAC5D;AAAA,IACA,cAAc,CAAC,WAAmC;AAChD,aAAO,aAAa,MAAM;AAAA,IAC5B;AAAA,IACA,iBAAiB,MAAc;AAC7B,aAAO,gBAAgB;AAAA,IACzB;AAAA,EACF,CAAC;AACH;AAEA,IAAO,oBAAQ;","names":[]}
@@ -5,13 +5,54 @@ var LLMResponseParseError = class extends Error {
5
5
  this.name = "LLMResponseParseError";
6
6
  }
7
7
  };
8
+ var VALID_COMMANDS = ["analyse", "grep", "read", "finish"];
9
+ function preprocessText(text) {
10
+ let processed = text.replace(/<think>[\s\S]*?<\/think>/gi, "");
11
+ const openingTagRegex = /<tool_call>|<tool>/gi;
12
+ const closingTagRegex = /<\/tool_call>|<\/tool>/gi;
13
+ const openingMatches = processed.match(openingTagRegex) || [];
14
+ const closingMatches = processed.match(closingTagRegex) || [];
15
+ if (openingMatches.length > closingMatches.length) {
16
+ const lastClosingMatch = /<\/tool_call>|<\/tool>/gi;
17
+ let lastClosingIndex = -1;
18
+ let match;
19
+ while ((match = lastClosingMatch.exec(processed)) !== null) {
20
+ lastClosingIndex = match.index + match[0].length;
21
+ }
22
+ if (lastClosingIndex > 0) {
23
+ processed = processed.slice(0, lastClosingIndex);
24
+ }
25
+ }
26
+ const toolCallLines = [];
27
+ const toolTagRegex = /<tool_call>([\s\S]*?)<\/tool_call>|<tool>([\s\S]*?)<\/tool>/gi;
28
+ let tagMatch;
29
+ while ((tagMatch = toolTagRegex.exec(processed)) !== null) {
30
+ const content = (tagMatch[1] || tagMatch[2] || "").trim();
31
+ if (content) {
32
+ const lines = content.split(/\r?\n/).map((l) => l.trim()).filter((l) => l);
33
+ toolCallLines.push(...lines);
34
+ }
35
+ }
36
+ const allLines = processed.split(/\r?\n/).map((l) => l.trim());
37
+ for (const line of allLines) {
38
+ if (!line) continue;
39
+ if (line.startsWith("<")) continue;
40
+ const firstWord = line.split(/\s/)[0];
41
+ if (VALID_COMMANDS.includes(firstWord)) {
42
+ if (!toolCallLines.includes(line)) {
43
+ toolCallLines.push(line);
44
+ }
45
+ }
46
+ }
47
+ return toolCallLines;
48
+ }
8
49
  var LLMResponseParser = class {
9
50
  finishSpecSplitRe = /,(?=[^,\s]+:)/;
10
51
  parse(text) {
11
52
  if (typeof text !== "string") {
12
53
  throw new TypeError("Command text must be a string.");
13
54
  }
14
- const lines = text.split(/\r?\n/).map((l) => l.trim());
55
+ const lines = preprocessText(text);
15
56
  const commands = [];
16
57
  let finishAccumulator = null;
17
58
  lines.forEach((line, idx) => {
@@ -34,7 +75,7 @@ var LLMResponseParser = class {
34
75
  finishAccumulator = this.handleFinish(parts, ctx, finishAccumulator);
35
76
  break;
36
77
  default:
37
- throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unsupported command '${cmd}'`);
78
+ break;
38
79
  }
39
80
  });
40
81
  if (finishAccumulator) {
@@ -137,4 +178,4 @@ export {
137
178
  LLMResponseParseError,
138
179
  LLMResponseParser
139
180
  };
140
- //# sourceMappingURL=chunk-GTOXMAF2.js.map
181
+ //# sourceMappingURL=chunk-SWQPIKPY.js.map