@morphllm/morphsdk 0.2.44 → 0.2.46

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. package/README.md +1 -1
  2. package/dist/{chunk-TVFGHXPE.js → chunk-3FTAIJBH.js} +4 -4
  3. package/dist/chunk-5JTJOQUX.js +283 -0
  4. package/dist/chunk-5JTJOQUX.js.map +1 -0
  5. package/dist/{chunk-ZRLEAPZV.js → chunk-76DJEQEP.js} +4 -4
  6. package/dist/{chunk-W3XLPMV3.js → chunk-7HS6YXA3.js} +21 -5
  7. package/dist/{chunk-W3XLPMV3.js.map → chunk-7HS6YXA3.js.map} +1 -1
  8. package/dist/chunk-7T7YOPJV.js +82 -0
  9. package/dist/chunk-7T7YOPJV.js.map +1 -0
  10. package/dist/chunk-CL45IWIU.js +105 -0
  11. package/dist/chunk-CL45IWIU.js.map +1 -0
  12. package/dist/chunk-D6OD3IST.js +70 -0
  13. package/dist/chunk-D6OD3IST.js.map +1 -0
  14. package/dist/{chunk-PEGZVGG4.js → chunk-G4AWE5A2.js} +4 -4
  15. package/dist/{chunk-OUEJ6XEO.js → chunk-GJU7UOFL.js} +4 -4
  16. package/dist/{chunk-Q7PDN7TS.js → chunk-GZMUGMOZ.js} +1 -1
  17. package/dist/{chunk-Q7PDN7TS.js.map → chunk-GZMUGMOZ.js.map} +1 -1
  18. package/dist/chunk-JYBVRF72.js +1 -0
  19. package/dist/{chunk-EYHXBQQX.js → chunk-LVY5LPEX.js} +70 -10
  20. package/dist/chunk-LVY5LPEX.js.map +1 -0
  21. package/dist/{chunk-GDR65N2J.js → chunk-OXHGFHEU.js} +53 -26
  22. package/dist/chunk-OXHGFHEU.js.map +1 -0
  23. package/dist/{chunk-VBBJGWHY.js → chunk-P2XKFWFD.js} +2 -2
  24. package/dist/chunk-PABIV7X6.js +76 -0
  25. package/dist/chunk-PABIV7X6.js.map +1 -0
  26. package/dist/{chunk-GTOXMAF2.js → chunk-SWQPIKPY.js} +44 -3
  27. package/dist/chunk-SWQPIKPY.js.map +1 -0
  28. package/dist/chunk-TJIUA27P.js +94 -0
  29. package/dist/chunk-TJIUA27P.js.map +1 -0
  30. package/dist/{chunk-O5DA5V5S.js → chunk-UBX7QYBD.js} +4 -4
  31. package/dist/{chunk-X4CQ6D3G.js → chunk-UIZT3KVJ.js} +4 -4
  32. package/dist/{chunk-UYBIKZPM.js → chunk-UXYK7WZX.js} +2 -2
  33. package/dist/chunk-WETRQJGU.js +129 -0
  34. package/dist/chunk-WETRQJGU.js.map +1 -0
  35. package/dist/client-BGctTHu9.d.ts +318 -0
  36. package/dist/client.cjs +1954 -53
  37. package/dist/client.cjs.map +1 -1
  38. package/dist/client.d.ts +14 -110
  39. package/dist/client.js +29 -4
  40. package/dist/core-DxiUwyBe.d.ts +156 -0
  41. package/dist/git/client.cjs +52 -25
  42. package/dist/git/client.cjs.map +1 -1
  43. package/dist/git/client.d.ts +17 -8
  44. package/dist/git/client.js +1 -1
  45. package/dist/git/index.cjs +52 -25
  46. package/dist/git/index.cjs.map +1 -1
  47. package/dist/git/index.d.ts +1 -1
  48. package/dist/git/index.js +2 -2
  49. package/dist/git/types.cjs.map +1 -1
  50. package/dist/git/types.d.ts +20 -2
  51. package/dist/index.cjs +2033 -55
  52. package/dist/index.cjs.map +1 -1
  53. package/dist/index.d.ts +8 -1
  54. package/dist/index.js +48 -6
  55. package/dist/tools/browser/anthropic.cjs +1 -0
  56. package/dist/tools/browser/anthropic.cjs.map +1 -1
  57. package/dist/tools/browser/anthropic.js +1 -1
  58. package/dist/tools/browser/core.cjs +69 -9
  59. package/dist/tools/browser/core.cjs.map +1 -1
  60. package/dist/tools/browser/core.js +1 -1
  61. package/dist/tools/browser/index.cjs +69 -9
  62. package/dist/tools/browser/index.cjs.map +1 -1
  63. package/dist/tools/browser/index.js +1 -1
  64. package/dist/tools/browser/openai.cjs +1 -0
  65. package/dist/tools/browser/openai.cjs.map +1 -1
  66. package/dist/tools/browser/openai.js +1 -1
  67. package/dist/tools/browser/types.cjs.map +1 -1
  68. package/dist/tools/browser/types.d.ts +2 -0
  69. package/dist/tools/browser/vercel.cjs +1 -0
  70. package/dist/tools/browser/vercel.cjs.map +1 -1
  71. package/dist/tools/browser/vercel.js +1 -1
  72. package/dist/tools/codebase_search/anthropic.js +2 -2
  73. package/dist/tools/codebase_search/index.js +9 -9
  74. package/dist/tools/codebase_search/openai.js +2 -2
  75. package/dist/tools/codebase_search/vercel.js +2 -2
  76. package/dist/tools/fastapply/anthropic.js +2 -2
  77. package/dist/tools/fastapply/index.js +7 -7
  78. package/dist/tools/fastapply/openai.js +2 -2
  79. package/dist/tools/fastapply/vercel.js +2 -2
  80. package/dist/tools/index.js +7 -7
  81. package/dist/tools/warp_grep/agent/config.cjs +80 -1
  82. package/dist/tools/warp_grep/agent/config.cjs.map +1 -1
  83. package/dist/tools/warp_grep/agent/config.js +1 -1
  84. package/dist/tools/warp_grep/agent/parser.cjs +43 -2
  85. package/dist/tools/warp_grep/agent/parser.cjs.map +1 -1
  86. package/dist/tools/warp_grep/agent/parser.js +1 -1
  87. package/dist/tools/warp_grep/agent/prompt.cjs +89 -45
  88. package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -1
  89. package/dist/tools/warp_grep/agent/prompt.d.ts +1 -1
  90. package/dist/tools/warp_grep/agent/prompt.js +1 -1
  91. package/dist/tools/warp_grep/agent/runner.cjs +229 -49
  92. package/dist/tools/warp_grep/agent/runner.cjs.map +1 -1
  93. package/dist/tools/warp_grep/agent/runner.js +4 -4
  94. package/dist/tools/warp_grep/agent/types.js +0 -1
  95. package/dist/tools/warp_grep/anthropic.cjs +311 -83
  96. package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
  97. package/dist/tools/warp_grep/anthropic.d.ts +75 -12
  98. package/dist/tools/warp_grep/anthropic.js +21 -8
  99. package/dist/tools/warp_grep/index.cjs +415 -126
  100. package/dist/tools/warp_grep/index.cjs.map +1 -1
  101. package/dist/tools/warp_grep/index.d.ts +17 -4
  102. package/dist/tools/warp_grep/index.js +29 -21
  103. package/dist/tools/warp_grep/openai.cjs +314 -83
  104. package/dist/tools/warp_grep/openai.cjs.map +1 -1
  105. package/dist/tools/warp_grep/openai.d.ts +73 -29
  106. package/dist/tools/warp_grep/openai.js +21 -8
  107. package/dist/tools/warp_grep/providers/command.cjs +80 -1
  108. package/dist/tools/warp_grep/providers/command.cjs.map +1 -1
  109. package/dist/tools/warp_grep/providers/command.js +2 -2
  110. package/dist/tools/warp_grep/providers/local.cjs +80 -1
  111. package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
  112. package/dist/tools/warp_grep/providers/local.js +2 -2
  113. package/dist/tools/warp_grep/vercel.cjs +291 -57
  114. package/dist/tools/warp_grep/vercel.cjs.map +1 -1
  115. package/dist/tools/warp_grep/vercel.d.ts +40 -19
  116. package/dist/tools/warp_grep/vercel.js +17 -8
  117. package/package.json +1 -1
  118. package/dist/chunk-AFEPUNAO.js +0 -15
  119. package/dist/chunk-AFEPUNAO.js.map +0 -1
  120. package/dist/chunk-EYHXBQQX.js.map +0 -1
  121. package/dist/chunk-GDR65N2J.js.map +0 -1
  122. package/dist/chunk-GTOXMAF2.js.map +0 -1
  123. package/dist/chunk-HKZB23U7.js +0 -85
  124. package/dist/chunk-HKZB23U7.js.map +0 -1
  125. package/dist/chunk-IQHKEIQX.js +0 -54
  126. package/dist/chunk-IQHKEIQX.js.map +0 -1
  127. package/dist/chunk-JKFVDM62.js +0 -45
  128. package/dist/chunk-JKFVDM62.js.map +0 -1
  129. package/dist/chunk-KL4YVZRF.js +0 -57
  130. package/dist/chunk-KL4YVZRF.js.map +0 -1
  131. package/dist/chunk-SMR2T5BT.js +0 -104
  132. package/dist/chunk-SMR2T5BT.js.map +0 -1
  133. package/dist/chunk-XYPMN4A3.js +0 -1
  134. /package/dist/{chunk-TVFGHXPE.js.map → chunk-3FTAIJBH.js.map} +0 -0
  135. /package/dist/{chunk-ZRLEAPZV.js.map → chunk-76DJEQEP.js.map} +0 -0
  136. /package/dist/{chunk-PEGZVGG4.js.map → chunk-G4AWE5A2.js.map} +0 -0
  137. /package/dist/{chunk-OUEJ6XEO.js.map → chunk-GJU7UOFL.js.map} +0 -0
  138. /package/dist/{chunk-XYPMN4A3.js.map → chunk-JYBVRF72.js.map} +0 -0
  139. /package/dist/{chunk-VBBJGWHY.js.map → chunk-P2XKFWFD.js.map} +0 -0
  140. /package/dist/{chunk-O5DA5V5S.js.map → chunk-UBX7QYBD.js.map} +0 -0
  141. /package/dist/{chunk-X4CQ6D3G.js.map → chunk-UIZT3KVJ.js.map} +0 -0
  142. /package/dist/{chunk-UYBIKZPM.js.map → chunk-UXYK7WZX.js.map} +0 -0
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../git/client.ts"],"sourcesContent":["/**\n * Morph Git Client - Simple, high-level Git operations\n * Built on isomorphic-git with explicit configuration\n */\n\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\nimport fs from 'fs';\nimport type {\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n MorphGitConfig,\n CommitMetadata,\n WaitForEmbeddingsOptions,\n EmbeddingProgress,\n} from './types.js';\n\nconst DEFAULT_PROXY_URL = 'https://repos.morphllm.com';\n\n/**\n * MorphGit - Git operations for AI agents with Morph backend\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!,\n * proxyUrl: 'https://repos.morphllm.com' // Optional\n * });\n * \n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.push({ dir: './my-project' });\n * ```\n */\nexport class MorphGit {\n private readonly apiKey: string;\n private readonly proxyUrl: string;\n\n constructor(config: MorphGitConfig) {\n // Validate API key\n if (!config.apiKey) {\n throw new Error('API key is required. Get one at https://morphllm.com/dashboard');\n }\n \n if (!config.apiKey.startsWith('sk-') && !config.apiKey.startsWith('morph-')) {\n throw new Error('Invalid API key format. Expected: sk-... or morph-...');\n }\n \n this.apiKey = config.apiKey;\n this.proxyUrl = config.proxyUrl || DEFAULT_PROXY_URL;\n }\n \n /**\n * Get auth callback for isomorphic-git operations\n * @private\n */\n private getAuthCallback() {\n return () => ({\n username: 'morph',\n password: this.apiKey,\n });\n }\n\n /**\n * Initialize a new repository\n * Creates the repo in the database and in the git provider\n * \n * @example\n * ```ts\n * await morphGit.init({\n * repoId: 'my-project',\n * dir: './my-project',\n * defaultBranch: 'main'\n * });\n * ```\n */\n async init(options: {\n repoId: string;\n dir: string;\n defaultBranch?: string;\n }): Promise<void> {\n const { repoId, dir, defaultBranch = 'main' } = options;\n\n // Call backend API to create repository\n const response = await fetch(`${this.proxyUrl}/v1/repos`, {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n repoId,\n name: repoId,\n defaultBranch,\n }),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`Failed to create repository: ${error}`);\n }\n\n // Initialize local git repository (industry standard: no clone needed)\n await git.init({\n fs,\n dir,\n defaultBranch,\n });\n\n // Add remote pointing to Morph git-proxy\n await git.addRemote({\n fs,\n dir,\n remote: 'origin',\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n });\n\n console.log(`✓ Repository '${repoId}' initialized`);\n }\n\n /**\n * Clone a repository from Morph repos\n * \n * @example\n * ```ts\n * await morphGit.clone({\n * repoId: 'my-project',\n * dir: './my-project'\n * });\n * ```\n */\n async clone(options: CloneOptions): Promise<void> {\n const { repoId, dir, branch = 'main', depth, singleBranch = true } = options;\n\n await git.clone({\n fs,\n http,\n dir,\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n ref: branch,\n singleBranch,\n depth,\n onAuth: this.getAuthCallback(),\n });\n }\n\n /**\n * Push changes to remote repository\n * \n * @example\n * ```ts\n * await morphGit.push({ \n * dir: './my-project',\n * branch: 'main' // Required: explicit branch name\n * });\n * ```\n */\n async push(options: PushOptions): Promise<void> {\n const { dir, remote = 'origin', branch, waitForEmbeddings } = options;\n\n if (!branch) {\n throw new Error(\n 'branch is required for push operations. ' +\n 'Specify the branch explicitly: { dir: \"./my-project\", branch: \"main\" }'\n );\n }\n\n // Get commit hash and repoId before pushing (for waitForEmbeddings)\n let commitHash: string | undefined;\n let repoId: string | undefined;\n \n if (waitForEmbeddings) {\n commitHash = await git.resolveRef({ fs, dir, ref: 'HEAD' });\n \n // Get repoId from git remote URL\n const remotes = await git.listRemotes({ fs, dir });\n const originRemote = remotes.find(r => r.remote === remote);\n if (originRemote) {\n // Extract repoId from URL: https://repos.morphllm.com/v1/repos/{repoId}\n const match = originRemote.url.match(/\\/repos\\/([^\\/]+)$/);\n if (match) {\n repoId = match[1];\n }\n }\n }\n\n await git.push({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n });\n \n // Wait for embeddings if requested\n if (waitForEmbeddings && repoId && commitHash) {\n await this.waitForEmbeddings({ repoId, commitHash });\n }\n }\n\n /**\n * Pull changes from remote repository\n * \n * @example\n * ```ts\n * await morphGit.pull({ \n * dir: './my-project',\n * branch: 'main' // Required: explicit branch name\n * });\n * ```\n */\n async pull(options: PullOptions): Promise<void> {\n const { dir, remote = 'origin', branch } = options;\n\n if (!branch) {\n throw new Error(\n 'branch is required for pull operations. ' +\n 'Specify the branch explicitly: { dir: \"./my-project\", branch: \"main\" }'\n );\n }\n\n await git.pull({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n author: {\n name: 'Morph Agent',\n email: 'agent@morph.com',\n },\n });\n }\n\n /**\n * Wait for embeddings to complete after push.\n * Polls status endpoint until embeddings are done.\n * \n * @example\n * ```ts\n * await morphGit.push({ dir: './my-project', branch: 'main' });\n * await morphGit.waitForEmbeddings({\n * repoId: 'my-project',\n * onProgress: (p) => console.log(`${p.filesProcessed}/${p.totalFiles}`)\n * });\n * ```\n */\n async waitForEmbeddings(options: WaitForEmbeddingsOptions): Promise<void> {\n const { repoId, commitHash, timeout = 120000, onProgress } = options;\n const startTime = Date.now();\n const pollInterval = 1000; // Poll every 1s\n \n while (Date.now() - startTime < timeout) {\n const statusUrl = `${this.proxyUrl}/v1/repos/${repoId}/embedding-status` +\n (commitHash ? `?commit_hash=${commitHash}` : '');\n \n const response = await fetch(statusUrl, {\n headers: { 'Authorization': `Bearer ${this.apiKey}` }\n });\n \n if (response.status === 404) {\n // No job found yet - might still be creating\n await new Promise(resolve => setTimeout(resolve, pollInterval));\n continue;\n }\n \n if (!response.ok) {\n throw new Error(`Failed to get embedding status: ${response.status}`);\n }\n \n const status = await response.json();\n \n if (onProgress && status.progress) {\n onProgress(status.progress);\n }\n \n if (status.status === 'completed') {\n return; // Done!\n }\n \n if (status.status === 'failed') {\n throw new Error(`Embeddings failed: ${status.error || 'Unknown error'}`);\n }\n \n // Still processing (queued or processing), wait and poll again\n await new Promise(resolve => setTimeout(resolve, pollInterval));\n }\n \n throw new Error(`Embeddings timed out after ${timeout}ms`);\n }\n\n /**\n * Stage a file for commit\n * \n * @example\n * ```ts\n * await morphGit.add({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * ```\n */\n async add(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.add({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Remove a file from staging\n * \n * @example\n * ```ts\n * await morphGit.remove({\n * dir: './my-project',\n * filepath: 'src/old-file.ts'\n * });\n * ```\n */\n async remove(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.remove({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Commit staged changes\n * \n * @example\n * ```ts\n * await morphGit.commit({\n * dir: './my-project',\n * message: 'Add new feature',\n * author: {\n * name: 'AI Agent',\n * email: 'ai@example.com'\n * },\n * chatHistory: [\n * { role: 'user', content: 'Please add a new feature' },\n * { role: 'assistant', content: 'I will add that feature' }\n * ],\n * recordingId: 'rec_123'\n * });\n * ```\n */\n async commit(options: CommitOptions): Promise<string> {\n const { dir, message, author, chatHistory, recordingId } = options;\n\n // Provide default author if not specified\n const commitAuthor = author || {\n name: 'Morph SDK',\n email: 'sdk@morphllm.com'\n };\n\n const sha = await git.commit({\n fs,\n dir,\n message,\n author: commitAuthor,\n });\n\n // Store metadata as git note if provided\n if (chatHistory || recordingId) {\n const metadata: CommitMetadata = {\n chatHistory,\n recordingId\n };\n \n await git.addNote({\n fs,\n dir,\n ref: 'refs/notes/morph-metadata',\n oid: sha,\n note: JSON.stringify(metadata, null, 2),\n author: commitAuthor\n });\n }\n\n return sha;\n }\n\n /**\n * Get status of a file\n * \n * @example\n * ```ts\n * const status = await morphGit.status({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * console.log(status); // 'modified', '*added', etc.\n * ```\n */\n async status(options: StatusOptions): Promise<string> {\n const { dir, filepath } = options;\n\n if (!filepath) {\n throw new Error('filepath is required for status check');\n }\n\n const status = await git.status({\n fs,\n dir,\n filepath,\n });\n\n return status;\n }\n\n /**\n * Get commit history\n * \n * @example\n * ```ts\n * const commits = await morphGit.log({\n * dir: './my-project',\n * depth: 10\n * });\n * ```\n */\n async log(options: LogOptions): Promise<CommitObject[]> {\n const { dir, depth, ref } = options;\n\n const commits = await git.log({\n fs,\n dir,\n depth,\n ref,\n });\n\n return commits as CommitObject[];\n }\n\n /**\n * Checkout a branch or commit\n * \n * @example\n * ```ts\n * await morphGit.checkout({\n * dir: './my-project',\n * ref: 'feature-branch'\n * });\n * ```\n */\n async checkout(options: CheckoutOptions): Promise<void> {\n const { dir, ref } = options;\n\n await git.checkout({\n fs,\n dir,\n ref,\n });\n }\n\n /**\n * Create a new branch\n * \n * @example\n * ```ts\n * await morphGit.branch({\n * dir: './my-project',\n * name: 'feature-branch',\n * checkout: true\n * });\n * ```\n */\n async branch(options: BranchOptions): Promise<void> {\n const { dir, name, checkout = false } = options;\n\n await git.branch({\n fs,\n dir,\n ref: name,\n checkout,\n });\n }\n\n /**\n * List all branches\n * \n * @example\n * ```ts\n * const branches = await morphGit.listBranches({\n * dir: './my-project'\n * });\n * ```\n */\n async listBranches(options: { dir: string }): Promise<string[]> {\n const { dir } = options;\n\n const branches = await git.listBranches({\n fs,\n dir,\n });\n\n return branches;\n }\n\n /**\n * Get the current branch name\n * \n * @example\n * ```ts\n * const branch = await morphGit.currentBranch({\n * dir: './my-project'\n * });\n * ```\n */\n async currentBranch(options: { dir: string }): Promise<string | undefined> {\n const { dir } = options;\n\n const branch = await git.currentBranch({\n fs,\n dir,\n });\n\n return branch || undefined;\n }\n\n /**\n * Get list of changed files (similar to git diff --name-only)\n * \n * @example\n * ```ts\n * const changes = await morphGit.statusMatrix({\n * dir: './my-project'\n * });\n * ```\n */\n async statusMatrix(options: { dir: string }): Promise<StatusResult[]> {\n const { dir } = options;\n\n const matrix = await git.statusMatrix({\n fs,\n dir,\n });\n\n return matrix.map(([filepath, HEADStatus, workdirStatus, stageStatus]) => {\n let status: StatusResult['status'] = 'unmodified';\n\n // Determine status based on statusMatrix values\n if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 2) {\n status = 'modified';\n } else if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 1) {\n status = '*modified';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 2) {\n status = 'added';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 0) {\n status = '*added';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 0) {\n status = 'deleted';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 1) {\n status = '*deleted';\n } else if (HEADStatus === 1 && workdirStatus === 1 && stageStatus === 1) {\n status = 'unmodified';\n } else if (HEADStatus === 0 && workdirStatus === 0 && stageStatus === 0) {\n status = 'absent';\n }\n\n return {\n filepath,\n status,\n };\n });\n }\n\n /**\n * Get the current commit hash\n * \n * @example\n * ```ts\n * const hash = await morphGit.resolveRef({\n * dir: './my-project',\n * ref: 'HEAD'\n * });\n * ```\n */\n async resolveRef(options: { dir: string; ref: string }): Promise<string> {\n const { dir, ref } = options;\n\n const oid = await git.resolveRef({\n fs,\n dir,\n ref,\n });\n\n return oid;\n }\n\n /**\n * Get metadata (chat history, recording ID) attached to a commit\n * \n * @example\n * ```ts\n * const metadata = await morphGit.getCommitMetadata({\n * dir: './my-project',\n * commitSha: 'abc123...'\n * });\n * \n * if (metadata) {\n * console.log('Chat history:', metadata.chatHistory);\n * console.log('Recording ID:', metadata.recordingId);\n * }\n * ```\n */\n async getCommitMetadata(options: {\n dir: string;\n commitSha: string;\n }): Promise<CommitMetadata | null> {\n try {\n const note = await git.readNote({\n fs,\n dir: options.dir,\n ref: 'refs/notes/morph-metadata',\n oid: options.commitSha\n });\n \n const metadata = JSON.parse(new TextDecoder().decode(note));\n return metadata;\n } catch (err) {\n // No metadata found for this commit\n return null;\n }\n }\n}\n\n"],"mappings":";AAKA,OAAO,SAAS;AAChB,OAAO,UAAU;AACjB,OAAO,QAAQ;AAoBf,IAAM,oBAAoB;AAkBnB,IAAM,WAAN,MAAe;AAAA,EACH;AAAA,EACA;AAAA,EAEjB,YAAY,QAAwB;AAElC,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,IAAI,MAAM,gEAAgE;AAAA,IAClF;AAEA,QAAI,CAAC,OAAO,OAAO,WAAW,KAAK,KAAK,CAAC,OAAO,OAAO,WAAW,QAAQ,GAAG;AAC3E,YAAM,IAAI,MAAM,uDAAuD;AAAA,IACzE;AAEA,SAAK,SAAS,OAAO;AACrB,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB;AACxB,WAAO,OAAO;AAAA,MACZ,UAAU;AAAA,MACV,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,KAAK,SAIO;AAChB,UAAM,EAAE,QAAQ,KAAK,gBAAgB,OAAO,IAAI;AAGhD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,QAAQ,aAAa;AAAA,MACxD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,iBAAiB,UAAU,KAAK,MAAM;AAAA,QACtC,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,IACzD;AAGA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,IAAI,UAAU;AAAA,MAClB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,IAC1C,CAAC;AAED,YAAQ,IAAI,sBAAiB,MAAM,eAAe;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,MAAM,SAAsC;AAChD,UAAM,EAAE,QAAQ,KAAK,SAAS,QAAQ,OAAO,eAAe,KAAK,IAAI;AAErE,UAAM,IAAI,MAAM;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,QAAQ,kBAAkB,IAAI;AAE9D,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAGA,QAAI;AACJ,QAAI;AAEJ,QAAI,mBAAmB;AACrB,mBAAa,MAAM,IAAI,WAAW,EAAE,IAAI,KAAK,KAAK,OAAO,CAAC;AAG1D,YAAM,UAAU,MAAM,IAAI,YAAY,EAAE,IAAI,IAAI,CAAC;AACjD,YAAM,eAAe,QAAQ,KAAK,OAAK,EAAE,WAAW,MAAM;AAC1D,UAAI,cAAc;AAEhB,cAAM,QAAQ,aAAa,IAAI,MAAM,oBAAoB;AACzD,YAAI,OAAO;AACT,mBAAS,MAAM,CAAC;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAGD,QAAI,qBAAqB,UAAU,YAAY;AAC7C,YAAM,KAAK,kBAAkB,EAAE,QAAQ,WAAW,CAAC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,OAAO,IAAI;AAE3C,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,kBAAkB,SAAkD;AACxE,UAAM,EAAE,QAAQ,YAAY,UAAU,MAAQ,WAAW,IAAI;AAC7D,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,eAAe;AAErB,WAAO,KAAK,IAAI,IAAI,YAAY,SAAS;AACvC,YAAM,YAAY,GAAG,KAAK,QAAQ,aAAa,MAAM,uBAClD,aAAa,gBAAgB,UAAU,KAAK;AAE/C,YAAM,WAAW,MAAM,MAAM,WAAW;AAAA,QACtC,SAAS,EAAE,iBAAiB,UAAU,KAAK,MAAM,GAAG;AAAA,MACtD,CAAC;AAED,UAAI,SAAS,WAAW,KAAK;AAE3B,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,YAAY,CAAC;AAC9D;AAAA,MACF;AAEA,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,mCAAmC,SAAS,MAAM,EAAE;AAAA,MACtE;AAEA,YAAM,SAAS,MAAM,SAAS,KAAK;AAEnC,UAAI,cAAc,OAAO,UAAU;AACjC,mBAAW,OAAO,QAAQ;AAAA,MAC5B;AAEA,UAAI,OAAO,WAAW,aAAa;AACjC;AAAA,MACF;AAEA,UAAI,OAAO,WAAW,UAAU;AAC9B,cAAM,IAAI,MAAM,sBAAsB,OAAO,SAAS,eAAe,EAAE;AAAA,MACzE;AAGA,YAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,YAAY,CAAC;AAAA,IAChE;AAEA,UAAM,IAAI,MAAM,8BAA8B,OAAO,IAAI;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAAoC;AAC5C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,IAAI;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,OAAO,SAAoC;AAC/C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,QAAQ,aAAa,YAAY,IAAI;AAG3D,UAAM,eAAe,UAAU;AAAA,MAC7B,MAAM;AAAA,MACN,OAAO;AAAA,IACT;AAEA,UAAM,MAAM,MAAM,IAAI,OAAO;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,QAAI,eAAe,aAAa;AAC9B,YAAM,WAA2B;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AAEA,YAAM,IAAI,QAAQ;AAAA,QAChB;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL,KAAK;AAAA,QACL,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC;AAAA,QACtC,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,UAAM,SAAS,MAAM,IAAI,OAAO;AAAA,MAC9B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAA8C;AACtD,UAAM,EAAE,KAAK,OAAO,IAAI,IAAI;AAE5B,UAAM,UAAU,MAAM,IAAI,IAAI;AAAA,MAC5B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,SAAS,SAAyC;AACtD,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,IAAI,SAAS;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAuC;AAClD,UAAM,EAAE,KAAK,MAAM,WAAW,MAAM,IAAI;AAExC,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAA6C;AAC9D,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,WAAW,MAAM,IAAI,aAAa;AAAA,MACtC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,cAAc,SAAuD;AACzE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,cAAc;AAAA,MACrC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,UAAU;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAAmD;AACpE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,aAAa;AAAA,MACpC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,OAAO,IAAI,CAAC,CAAC,UAAU,YAAY,eAAe,WAAW,MAAM;AACxE,UAAI,SAAiC;AAGrC,UAAI,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AAChE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,WAAW,SAAwD;AACvE,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,MAAM,MAAM,IAAI,WAAW;AAAA,MAC/B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBA,MAAM,kBAAkB,SAGW;AACjC,QAAI;AACF,YAAM,OAAO,MAAM,IAAI,SAAS;AAAA,QAC9B;AAAA,QACA,KAAK,QAAQ;AAAA,QACb,KAAK;AAAA,QACL,KAAK,QAAQ;AAAA,MACf,CAAC;AAED,YAAM,WAAW,KAAK,MAAM,IAAI,YAAY,EAAE,OAAO,IAAI,CAAC;AAC1D,aAAO;AAAA,IACT,SAAS,KAAK;AAEZ,aAAO;AAAA,IACT;AAAA,EACF;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/agent/parser.ts"],"sourcesContent":["// Parses assistant lines into structured tool calls\nimport type { ToolCall } from './types.js';\n\nexport class LLMResponseParseError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'LLMResponseParseError';\n }\n}\n\ntype LineContext = { lineNumber: number; raw: string };\n\nexport class LLMResponseParser {\n private readonly finishSpecSplitRe = /,(?=[^,\\s]+:)/;\n\n parse(text: string): ToolCall[] {\n if (typeof text !== 'string') {\n throw new TypeError('Command text must be a string.');\n }\n const lines = text.split(/\\r?\\n/).map(l => l.trim());\n const commands: ToolCall[] = [];\n let finishAccumulator: Map<string, number[][]> | null = null;\n\n lines.forEach((line, idx) => {\n if (!line || line.startsWith('#')) return;\n const ctx: LineContext = { lineNumber: idx + 1, raw: line };\n const parts = this.splitLine(line, ctx);\n if (parts.length === 0) return;\n const cmd = parts[0];\n switch (cmd) {\n case 'analyse':\n this.handleAnalyse(parts, ctx, commands);\n break;\n case 'grep':\n this.handleGrep(parts, ctx, commands);\n break;\n case 'read':\n this.handleRead(parts, ctx, commands);\n break;\n case 'finish':\n finishAccumulator = this.handleFinish(parts, ctx, finishAccumulator);\n break;\n default:\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unsupported command '${cmd}'`);\n }\n });\n\n if (finishAccumulator) {\n const map = finishAccumulator as Map<string, number[][]>;\n const entries = [...map.entries()];\n const filesPayload = entries.map(([path, ranges]) => ({\n path,\n lines: [...ranges].sort((a, b) => a[0] - b[0]) as Array<[number, number]>,\n }));\n commands.push({ name: 'finish', arguments: { files: filesPayload } });\n }\n return commands;\n }\n\n private splitLine(line: string, ctx: LineContext): string[] {\n try {\n // Split by whitespace but keep quoted blocks as one\n const parts: string[] = [];\n let current = '';\n let inSingle = false;\n for (let i = 0; i < line.length; i++) {\n const ch = line[i];\n if (ch === \"'\" && line[i - 1] !== '\\\\') {\n inSingle = !inSingle;\n current += ch;\n } else if (!inSingle && /\\s/.test(ch)) {\n if (current) {\n parts.push(current);\n current = '';\n }\n } else {\n current += ch;\n }\n }\n if (current) parts.push(current);\n return parts;\n } catch {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: Unable to parse line.`);\n }\n }\n\n private handleAnalyse(parts: string[], ctx: LineContext, commands: ToolCall[]) {\n // analyse <path> [pattern]\n if (parts.length < 2) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: analyse requires <path>`);\n }\n const path = parts[1];\n const pattern = parts[2]?.replace(/^\"|\"$/g, '') ?? null;\n commands.push({ name: 'analyse', arguments: { path, pattern } });\n }\n\n // no glob tool in MCP\n\n private handleGrep(parts: string[], ctx: LineContext, commands: ToolCall[]) {\n // grep '<pattern>' <path>\n if (parts.length < 3) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep requires '<pattern>' and <path>`);\n }\n const pat = parts[1];\n if (!pat.startsWith(\"'\") || !pat.endsWith(\"'\")) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: grep pattern must be single-quoted`);\n }\n commands.push({ name: 'grep', arguments: { pattern: pat.slice(1, -1), path: parts[2] } });\n }\n\n private handleRead(parts: string[], ctx: LineContext, commands: ToolCall[]) {\n // read <path>[:start-end]\n if (parts.length < 2) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: read requires <path> or <path>:<start-end>`);\n }\n const spec = parts[1];\n const rangeIdx = spec.indexOf(':');\n if (rangeIdx === -1) {\n commands.push({ name: 'read', arguments: { path: spec } });\n return;\n }\n const path = spec.slice(0, rangeIdx);\n const range = spec.slice(rangeIdx + 1);\n const [s, e] = range.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e)) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid read range '${range}'`);\n }\n commands.push({ name: 'read', arguments: { path, start: s, end: e } });\n }\n\n private handleFinish(parts: string[], ctx: LineContext, acc: Map<string, number[][]> | null) {\n // finish file1:1-10,20-30 file2:5-7\n const map = acc ?? new Map<string, number[][]>();\n const args = parts.slice(1);\n for (const token of args) {\n const [path, rangesText] = token.split(':', 2);\n if (!path || !rangesText) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid finish token '${token}'`);\n }\n const rangeSpecs = rangesText.split(',').filter(Boolean);\n for (const spec of rangeSpecs) {\n const [s, e] = spec.split('-').map(v => parseInt(v, 10));\n if (!Number.isFinite(s) || !Number.isFinite(e) || e < s) {\n throw new LLMResponseParseError(`Line ${ctx.lineNumber}: invalid range '${spec}'`);\n }\n const arr = map.get(path) ?? [];\n arr.push([s, e]);\n map.set(path, arr);\n }\n }\n return map;\n }\n}\n\n\n"],"mappings":";AAGO,IAAM,wBAAN,cAAoC,MAAM;AAAA,EAC/C,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAIO,IAAM,oBAAN,MAAwB;AAAA,EACZ,oBAAoB;AAAA,EAErC,MAAM,MAA0B;AAC9B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,IAAI,UAAU,gCAAgC;AAAA,IACtD;AACA,UAAM,QAAQ,KAAK,MAAM,OAAO,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AACnD,UAAM,WAAuB,CAAC;AAC9B,QAAI,oBAAoD;AAExD,UAAM,QAAQ,CAAC,MAAM,QAAQ;AAC3B,UAAI,CAAC,QAAQ,KAAK,WAAW,GAAG,EAAG;AACnC,YAAM,MAAmB,EAAE,YAAY,MAAM,GAAG,KAAK,KAAK;AAC1D,YAAM,QAAQ,KAAK,UAAU,MAAM,GAAG;AACtC,UAAI,MAAM,WAAW,EAAG;AACxB,YAAM,MAAM,MAAM,CAAC;AACnB,cAAQ,KAAK;AAAA,QACX,KAAK;AACH,eAAK,cAAc,OAAO,KAAK,QAAQ;AACvC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,KAAK,QAAQ;AACpC;AAAA,QACF,KAAK;AACH,eAAK,WAAW,OAAO,KAAK,QAAQ;AACpC;AAAA,QACF,KAAK;AACH,8BAAoB,KAAK,aAAa,OAAO,KAAK,iBAAiB;AACnE;AAAA,QACF;AACE,gBAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,0BAA0B,GAAG,GAAG;AAAA,MAC1F;AAAA,IACF,CAAC;AAED,QAAI,mBAAmB;AACrB,YAAM,MAAM;AACZ,YAAM,UAAU,CAAC,GAAG,IAAI,QAAQ,CAAC;AACjC,YAAM,eAAe,QAAQ,IAAI,CAAC,CAAC,MAAM,MAAM,OAAO;AAAA,QACpD;AAAA,QACA,OAAO,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAAA,MAC/C,EAAE;AACF,eAAS,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE,OAAO,aAAa,EAAE,CAAC;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAc,KAA4B;AAC1D,QAAI;AAEF,YAAM,QAAkB,CAAC;AACzB,UAAI,UAAU;AACd,UAAI,WAAW;AACf,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,cAAM,KAAK,KAAK,CAAC;AACjB,YAAI,OAAO,OAAO,KAAK,IAAI,CAAC,MAAM,MAAM;AACtC,qBAAW,CAAC;AACZ,qBAAW;AAAA,QACb,WAAW,CAAC,YAAY,KAAK,KAAK,EAAE,GAAG;AACrC,cAAI,SAAS;AACX,kBAAM,KAAK,OAAO;AAClB,sBAAU;AAAA,UACZ;AAAA,QACF,OAAO;AACL,qBAAW;AAAA,QACb;AAAA,MACF;AACA,UAAI,QAAS,OAAM,KAAK,OAAO;AAC/B,aAAO;AAAA,IACT,QAAQ;AACN,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,yBAAyB;AAAA,IACjF;AAAA,EACF;AAAA,EAEQ,cAAc,OAAiB,KAAkB,UAAsB;AAE7E,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,2BAA2B;AAAA,IACnF;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,MAAM,CAAC,GAAG,QAAQ,UAAU,EAAE,KAAK;AACnD,aAAS,KAAK,EAAE,MAAM,WAAW,WAAW,EAAE,MAAM,QAAQ,EAAE,CAAC;AAAA,EACjE;AAAA;AAAA,EAIQ,WAAW,OAAiB,KAAkB,UAAsB;AAE1E,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,wCAAwC;AAAA,IAChG;AACA,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,CAAC,IAAI,WAAW,GAAG,KAAK,CAAC,IAAI,SAAS,GAAG,GAAG;AAC9C,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,sCAAsC;AAAA,IAC9F;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,SAAS,IAAI,MAAM,GAAG,EAAE,GAAG,MAAM,MAAM,CAAC,EAAE,EAAE,CAAC;AAAA,EAC1F;AAAA,EAEQ,WAAW,OAAiB,KAAkB,UAAsB;AAE1E,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,8CAA8C;AAAA,IACtG;AACA,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,QAAI,aAAa,IAAI;AACnB,eAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,KAAK,EAAE,CAAC;AACzD;AAAA,IACF;AACA,UAAM,OAAO,KAAK,MAAM,GAAG,QAAQ;AACnC,UAAM,QAAQ,KAAK,MAAM,WAAW,CAAC;AACrC,UAAM,CAAC,GAAG,CAAC,IAAI,MAAM,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACxD,QAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,GAAG;AAC9C,YAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,yBAAyB,KAAK,GAAG;AAAA,IACzF;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,EAAE,MAAM,OAAO,GAAG,KAAK,EAAE,EAAE,CAAC;AAAA,EACvE;AAAA,EAEQ,aAAa,OAAiB,KAAkB,KAAqC;AAE3F,UAAM,MAAM,OAAO,oBAAI,IAAwB;AAC/C,UAAM,OAAO,MAAM,MAAM,CAAC;AAC1B,eAAW,SAAS,MAAM;AACxB,YAAM,CAAC,MAAM,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AAC7C,UAAI,CAAC,QAAQ,CAAC,YAAY;AACxB,cAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,2BAA2B,KAAK,GAAG;AAAA,MAC3F;AACA,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,OAAO,OAAO;AACvD,iBAAW,QAAQ,YAAY;AAC7B,cAAM,CAAC,GAAG,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,SAAS,GAAG,EAAE,CAAC;AACvD,YAAI,CAAC,OAAO,SAAS,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,KAAK,IAAI,GAAG;AACvD,gBAAM,IAAI,sBAAsB,QAAQ,IAAI,UAAU,oBAAoB,IAAI,GAAG;AAAA,QACnF;AACA,cAAM,MAAM,IAAI,IAAI,IAAI,KAAK,CAAC;AAC9B,YAAI,KAAK,CAAC,GAAG,CAAC,CAAC;AACf,YAAI,IAAI,MAAM,GAAG;AAAA,MACnB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -1,85 +0,0 @@
1
- // tools/warp_grep/agent/prompt.ts
2
- var SYSTEM_PROMPT = `You are a code search agent. Your task is to find relevant code snippets based on a search query.
3
-
4
- <workflow>
5
- You operate in exactly 3 rounds of tool exploration, followed by a final answer:
6
-
7
- 1. In each round, you can make MULTIPLE tool calls (up to 8) to search in parallel. All tool results will be returned together after each round.
8
- 2. After your third round of tool calls, your next turn MUST be a single call to the \`finish\` tool with all the context you have found.
9
- </workflow>
10
-
11
- <tool_calling>
12
- You have tools at your disposal to solve the coding task. Follow these rules regarding tool calls:
13
-
14
- ### 1. \`analyse\` - Explore Directories
15
- Explore directory structure in a tree-like format.
16
- **Syntax:** \`analyse <path> [pattern]\`
17
- - \`<path>\`: Directory path to analyze (defaults to \`.\`)
18
- - \`[pattern]\`: Optional regex pattern to filter names
19
-
20
- For example:
21
- \`\`\`
22
- analyse src/api
23
- analyse . "test"
24
- \`\`\`
25
-
26
- ### 2. \`read\` - Read File Contents
27
- Read entire files or specific line ranges.
28
- **Syntax:** \`read <path>[:start-end]\`
29
- - \`<path>\`: File path to read
30
- - \`[:start-end]\`: Optional 1-based, inclusive line range
31
-
32
- For example:
33
- \`\`\`
34
- read src/main.py
35
- read src/database/connection.py:10-50
36
- \`\`\`
37
-
38
- ### 3. \`grep\` - Search with Regex
39
- Search for regex patterns across files using ripgrep.
40
- **Syntax:** \`grep '<pattern>' <path>\`
41
- - \`'<pattern>'\`: Regex pattern (always wrap in single quotes)
42
- - \`<path>\`: Directory or file to search (use \`.\` for the repo root)
43
-
44
- For example:
45
- \`\`\`
46
- grep 'create_user' .
47
- grep 'import.*requests' src/api
48
- grep 'class\\\\s+AuthService' controllers/auth.py
49
- \`\`\`
50
-
51
- ### 4. \`finish\` - Submit Final Answer
52
- Submit your findings when complete.
53
- **Syntax:** \`finish <file1:range1,range2...> [file2:range3...]\`
54
- - Provide file paths with colon-separated, comma-separated line ranges
55
-
56
- For example:
57
- \`\`\`
58
- finish src/api/auth.py:25-50,75-80 src/models/user.py:10-15
59
- \`\`\`
60
- </tool_calling>
61
-
62
- <strategy>
63
- - Use the \`analyse\`, \`grep\`, and \`read\` tools to gather information about the codebase.
64
- - Leverage the tools smartly to make full use of their potential
65
- - Make parallel tool calls within each round to investigate multiple paths or files efficiently
66
- - Be systematic and thorough within your 3-round limit
67
- </strategy>
68
-
69
- <output_format>
70
- - Only output tool calls themselves
71
- - Do not include explanatory text, reasoning, or commentary
72
- - Each tool call should be on its own line
73
- - After 3 rounds of exploration, call \`finish\` with all relevant code snippets you found
74
- </output_format>
75
-
76
- Begin your exploration now to find code relevant to the query.`;
77
- function getSystemPrompt() {
78
- return SYSTEM_PROMPT;
79
- }
80
-
81
- export {
82
- SYSTEM_PROMPT,
83
- getSystemPrompt
84
- };
85
- //# sourceMappingURL=chunk-HKZB23U7.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/agent/prompt.ts"],"sourcesContent":["export const SYSTEM_PROMPT = `You are a code search agent. Your task is to find relevant code snippets based on a search query.\n\n<workflow>\nYou operate in exactly 3 rounds of tool exploration, followed by a final answer:\n\n1. In each round, you can make MULTIPLE tool calls (up to 8) to search in parallel. All tool results will be returned together after each round.\n2. After your third round of tool calls, your next turn MUST be a single call to the \\`finish\\` tool with all the context you have found.\n</workflow>\n\n<tool_calling>\nYou have tools at your disposal to solve the coding task. Follow these rules regarding tool calls:\n\n### 1. \\`analyse\\` - Explore Directories\nExplore directory structure in a tree-like format.\n**Syntax:** \\`analyse <path> [pattern]\\`\n- \\`<path>\\`: Directory path to analyze (defaults to \\`.\\`)\n- \\`[pattern]\\`: Optional regex pattern to filter names\n\nFor example:\n\\`\\`\\`\nanalyse src/api\nanalyse . \"test\"\n\\`\\`\\`\n\n### 2. \\`read\\` - Read File Contents\nRead entire files or specific line ranges.\n**Syntax:** \\`read <path>[:start-end]\\`\n- \\`<path>\\`: File path to read\n- \\`[:start-end]\\`: Optional 1-based, inclusive line range\n\nFor example:\n\\`\\`\\`\nread src/main.py\nread src/database/connection.py:10-50\n\\`\\`\\`\n\n### 3. \\`grep\\` - Search with Regex\nSearch for regex patterns across files using ripgrep.\n**Syntax:** \\`grep '<pattern>' <path>\\`\n- \\`'<pattern>'\\`: Regex pattern (always wrap in single quotes)\n- \\`<path>\\`: Directory or file to search (use \\`.\\` for the repo root)\n\nFor example:\n\\`\\`\\`\ngrep 'create_user' .\ngrep 'import.*requests' src/api\ngrep 'class\\\\\\\\s+AuthService' controllers/auth.py\n\\`\\`\\`\n\n### 4. \\`finish\\` - Submit Final Answer\nSubmit your findings when complete.\n**Syntax:** \\`finish <file1:range1,range2...> [file2:range3...]\\`\n- Provide file paths with colon-separated, comma-separated line ranges\n\nFor example:\n\\`\\`\\`\nfinish src/api/auth.py:25-50,75-80 src/models/user.py:10-15\n\\`\\`\\`\n</tool_calling>\n\n<strategy>\n- Use the \\`analyse\\`, \\`grep\\`, and \\`read\\` tools to gather information about the codebase.\n- Leverage the tools smartly to make full use of their potential\n- Make parallel tool calls within each round to investigate multiple paths or files efficiently\n- Be systematic and thorough within your 3-round limit\n</strategy>\n\n<output_format>\n- Only output tool calls themselves\n- Do not include explanatory text, reasoning, or commentary\n- Each tool call should be on its own line\n- After 3 rounds of exploration, call \\`finish\\` with all relevant code snippets you found\n</output_format>\n\nBegin your exploration now to find code relevant to the query.`;\n\nexport function getSystemPrompt(): string {\n\treturn SYSTEM_PROMPT;\n}\n\n\n"],"mappings":";AAAO,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA4EtB,SAAS,kBAA0B;AACzC,SAAO;AACR;","names":[]}
@@ -1,54 +0,0 @@
1
- import {
2
- runWarpGrep
3
- } from "./chunk-W3XLPMV3.js";
4
- import {
5
- LocalRipgrepProvider
6
- } from "./chunk-UYBIKZPM.js";
7
-
8
- // tools/warp_grep/anthropic.ts
9
- import { z } from "zod";
10
- var INPUT_SCHEMA = z.object({
11
- query: z.string().describe("Free-form repository question")
12
- });
13
- function createMorphWarpGrepTool(config) {
14
- const tool = {
15
- name: "morph-warp-grep",
16
- description: config.description ?? "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.",
17
- input_schema: {
18
- type: "object",
19
- properties: {
20
- query: { type: "string", description: "Free-form repository question" }
21
- },
22
- required: ["query"]
23
- }
24
- };
25
- return Object.assign(tool, {
26
- execute: async (input) => {
27
- const parsed = INPUT_SCHEMA.parse(typeof input === "string" ? JSON.parse(input) : input);
28
- const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
29
- const result = await runWarpGrep({
30
- query: parsed.query,
31
- repoRoot: config.repoRoot,
32
- provider,
33
- excludes: config.excludes,
34
- includes: config.includes,
35
- debug: config.debug ?? false,
36
- apiKey: config.apiKey
37
- });
38
- const finish = result.finish;
39
- if (result.terminationReason !== "completed" || !finish?.metadata) {
40
- return { success: false, error: "Search did not complete", messages: result.messages };
41
- }
42
- const contexts = (finish.resolved ?? []).map((r) => ({
43
- file: r.path,
44
- content: r.content
45
- }));
46
- return { success: true, contexts, summary: finish.payload };
47
- }
48
- });
49
- }
50
-
51
- export {
52
- createMorphWarpGrepTool
53
- };
54
- //# sourceMappingURL=chunk-IQHKEIQX.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/anthropic.ts"],"sourcesContent":["import type Anthropic from '@anthropic-ai/sdk';\nimport { z } from 'zod';\nimport { runWarpGrep } from './agent/runner.js';\nimport type { WarpGrepProvider } from './providers/types.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { AGENT_CONFIG } from './agent/config.js';\n\nexport type WarpGrepConfig = {\n repoRoot: string;\n provider?: WarpGrepProvider;\n excludes?: string[];\n includes?: string[];\n debug?: boolean;\n apiKey?: string;\n description?: string;\n};\n\nconst INPUT_SCHEMA = z.object({\n query: z.string().describe('Free-form repository question'),\n});\n\nexport function createMorphWarpGrepTool(config: WarpGrepConfig) {\n const tool: any = {\n name: 'morph-warp-grep',\n description: config.description ?? 'A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.',\n input_schema: {\n type: 'object',\n properties: {\n query: { type: 'string', description: 'Free-form repository question' },\n },\n required: ['query'],\n },\n };\n\n return Object.assign(tool, {\n execute: async (input: unknown) => {\n const parsed = INPUT_SCHEMA.parse(typeof input === 'string' ? JSON.parse(input) : input);\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n const result = await runWarpGrep({\n query: parsed.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete', messages: result.messages };\n }\n const contexts = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n return { success: true, contexts, summary: finish.payload };\n },\n });\n}\n\n\n"],"mappings":";;;;;;;;AACA,SAAS,SAAS;AAgBlB,IAAM,eAAe,EAAE,OAAO;AAAA,EAC5B,OAAO,EAAE,OAAO,EAAE,SAAS,+BAA+B;AAC5D,CAAC;AAEM,SAAS,wBAAwB,QAAwB;AAC9D,QAAM,OAAY;AAAA,IAChB,MAAM;AAAA,IACN,aAAa,OAAO,eAAe;AAAA,IACnC,cAAc;AAAA,MACZ,MAAM;AAAA,MACN,YAAY;AAAA,QACV,OAAO,EAAE,MAAM,UAAU,aAAa,gCAAgC;AAAA,MACxE;AAAA,MACA,UAAU,CAAC,OAAO;AAAA,IACpB;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,MAAM;AAAA,IACzB,SAAS,OAAO,UAAmB;AACjC,YAAM,SAAS,aAAa,MAAM,OAAO,UAAU,WAAW,KAAK,MAAM,KAAK,IAAI,KAAK;AACvF,YAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAC7F,YAAM,SAAS,MAAM,YAAY;AAAA,QAC/B,OAAO,OAAO;AAAA,QACd,UAAU,OAAO;AAAA,QACjB;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO,SAAS;AAAA,QACvB,QAAQ,OAAO;AAAA,MACjB,CAAC;AACD,YAAM,SAAS,OAAO;AACtB,UAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,eAAO,EAAE,SAAS,OAAO,OAAO,2BAA2B,UAAU,OAAO,SAAS;AAAA,MACvF;AACA,YAAM,YAAY,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,QACjD,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AACF,aAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAAA,IAC5D;AAAA,EACF,CAAC;AACH;","names":[]}
@@ -1,45 +0,0 @@
1
- import {
2
- runWarpGrep
3
- } from "./chunk-W3XLPMV3.js";
4
- import {
5
- LocalRipgrepProvider
6
- } from "./chunk-UYBIKZPM.js";
7
-
8
- // tools/warp_grep/vercel.ts
9
- import { tool } from "ai";
10
- import { z } from "zod";
11
- function createMorphWarpGrepTool(config) {
12
- const schema = z.object({
13
- query: z.string().describe("Free-form repository question")
14
- });
15
- return tool({
16
- description: config.description ?? "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.",
17
- inputSchema: schema,
18
- execute: async (params) => {
19
- const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
20
- const result = await runWarpGrep({
21
- query: params.query,
22
- repoRoot: config.repoRoot,
23
- provider,
24
- excludes: config.excludes,
25
- includes: config.includes,
26
- debug: config.debug ?? false,
27
- apiKey: config.apiKey
28
- });
29
- const finish = result.finish;
30
- if (result.terminationReason !== "completed" || !finish?.metadata) {
31
- return { success: false, error: "Search did not complete", messages: result.messages };
32
- }
33
- const contexts = (finish.resolved ?? []).map((r) => ({
34
- file: r.path,
35
- content: r.content
36
- }));
37
- return { success: true, contexts, summary: finish.payload };
38
- }
39
- });
40
- }
41
-
42
- export {
43
- createMorphWarpGrepTool
44
- };
45
- //# sourceMappingURL=chunk-JKFVDM62.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/vercel.ts"],"sourcesContent":["import { tool } from 'ai';\nimport { z } from 'zod';\nimport { runWarpGrep } from './agent/runner.js';\nimport type { WarpGrepProvider } from './providers/types.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { AGENT_CONFIG } from './agent/config.js';\n\nexport type WarpGrepConfig = {\n repoRoot: string;\n provider?: WarpGrepProvider;\n excludes?: string[];\n includes?: string[];\n debug?: boolean;\n apiKey?: string;\n description?: string;\n};\n\nexport function createMorphWarpGrepTool(config: WarpGrepConfig) {\n const schema = z.object({\n query: z.string().describe('Free-form repository question'),\n });\n\n return tool({\n description: config.description ?? 'A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.',\n inputSchema: schema,\n execute: async (params) => {\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n const result = await runWarpGrep({\n query: params.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete', messages: result.messages };\n }\n const contexts = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n return { success: true, contexts, summary: finish.payload };\n },\n });\n}\n"],"mappings":";;;;;;;;AAAA,SAAS,YAAY;AACrB,SAAS,SAAS;AAgBX,SAAS,wBAAwB,QAAwB;AAC9D,QAAM,SAAS,EAAE,OAAO;AAAA,IACtB,OAAO,EAAE,OAAO,EAAE,SAAS,+BAA+B;AAAA,EAC5D,CAAC;AAED,SAAO,KAAK;AAAA,IACV,aAAa,OAAO,eAAe;AAAA,IACnC,aAAa;AAAA,IACb,SAAS,OAAO,WAAW;AACzB,YAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAC7F,YAAM,SAAS,MAAM,YAAY;AAAA,QAC/B,OAAO,OAAO;AAAA,QACd,UAAU,OAAO;AAAA,QACjB;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO,SAAS;AAAA,QACvB,QAAQ,OAAO;AAAA,MACjB,CAAC;AACD,YAAM,SAAS,OAAO;AACtB,UAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,eAAO,EAAE,SAAS,OAAO,OAAO,2BAA2B,UAAU,OAAO,SAAS;AAAA,MACvF;AACA,YAAM,YAAY,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,QACjD,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AACF,aAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAAA,IAC5D;AAAA,EACF,CAAC;AACH;","names":[]}
@@ -1,57 +0,0 @@
1
- import {
2
- runWarpGrep
3
- } from "./chunk-W3XLPMV3.js";
4
- import {
5
- LocalRipgrepProvider
6
- } from "./chunk-UYBIKZPM.js";
7
-
8
- // tools/warp_grep/openai.ts
9
- import { z } from "zod";
10
- var INPUT_SCHEMA = z.object({
11
- query: z.string().describe("Free-form repository question")
12
- });
13
- function createMorphWarpGrepTool(config) {
14
- const tool = {
15
- type: "function",
16
- function: {
17
- name: "morph-warp-grep",
18
- description: config.description ?? "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.",
19
- parameters: {
20
- type: "object",
21
- properties: {
22
- query: { type: "string", description: "Free-form repository question" }
23
- },
24
- required: ["query"]
25
- }
26
- }
27
- };
28
- return Object.assign(tool, {
29
- execute: async (input) => {
30
- const parsed = INPUT_SCHEMA.parse(typeof input === "string" ? JSON.parse(input) : input);
31
- const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
32
- const result = await runWarpGrep({
33
- query: parsed.query,
34
- repoRoot: config.repoRoot,
35
- provider,
36
- excludes: config.excludes,
37
- includes: config.includes,
38
- debug: config.debug ?? false,
39
- apiKey: config.apiKey
40
- });
41
- const finish = result.finish;
42
- if (result.terminationReason !== "completed" || !finish?.metadata) {
43
- return { success: false, error: "Search did not complete", messages: result.messages };
44
- }
45
- const contexts = (finish.resolved ?? []).map((r) => ({
46
- file: r.path,
47
- content: r.content
48
- }));
49
- return { success: true, contexts, summary: finish.payload };
50
- }
51
- });
52
- }
53
-
54
- export {
55
- createMorphWarpGrepTool
56
- };
57
- //# sourceMappingURL=chunk-KL4YVZRF.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../tools/warp_grep/openai.ts"],"sourcesContent":["import type { ChatCompletionTool } from 'openai/resources/chat/completions';\nimport { z } from 'zod';\nimport { runWarpGrep } from './agent/runner.js';\nimport type { WarpGrepProvider } from './providers/types.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { AGENT_CONFIG } from './agent/config.js';\n\nexport type WarpGrepConfig = {\n repoRoot: string;\n provider?: WarpGrepProvider;\n excludes?: string[];\n includes?: string[];\n debug?: boolean;\n apiKey?: string; // Morph API key (defaults to env)\n description?: string;\n};\n\nconst INPUT_SCHEMA = z.object({\n query: z.string().describe('Free-form repository question'),\n});\n\nexport function createMorphWarpGrepTool(config: WarpGrepConfig) {\n const tool: ChatCompletionTool = {\n type: 'function',\n function: {\n name: 'morph-warp-grep',\n description: config.description ?? 'A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.',\n parameters: {\n type: 'object',\n properties: {\n query: { type: 'string', description: 'Free-form repository question' },\n },\n required: ['query'],\n },\n },\n };\n\n return Object.assign(tool, {\n execute: async (input: unknown) => {\n const parsed = INPUT_SCHEMA.parse(typeof input === 'string' ? JSON.parse(input) : input);\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n const result = await runWarpGrep({\n query: parsed.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n const finish = result.finish;\n if (result.terminationReason !== 'completed' || !finish?.metadata) {\n return { success: false, error: 'Search did not complete', messages: result.messages };\n }\n const contexts = (finish.resolved ?? []).map(r => ({\n file: r.path,\n content: r.content,\n }));\n return { success: true, contexts, summary: finish.payload };\n },\n });\n}\n\n\n"],"mappings":";;;;;;;;AACA,SAAS,SAAS;AAgBlB,IAAM,eAAe,EAAE,OAAO;AAAA,EAC5B,OAAO,EAAE,OAAO,EAAE,SAAS,+BAA+B;AAC5D,CAAC;AAEM,SAAS,wBAAwB,QAAwB;AAC9D,QAAM,OAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM;AAAA,MACN,aAAa,OAAO,eAAe;AAAA,MACnC,YAAY;AAAA,QACV,MAAM;AAAA,QACN,YAAY;AAAA,UACV,OAAO,EAAE,MAAM,UAAU,aAAa,gCAAgC;AAAA,QACxE;AAAA,QACA,UAAU,CAAC,OAAO;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,MAAM;AAAA,IACzB,SAAS,OAAO,UAAmB;AACjC,YAAM,SAAS,aAAa,MAAM,OAAO,UAAU,WAAW,KAAK,MAAM,KAAK,IAAI,KAAK;AACvF,YAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAC7F,YAAM,SAAS,MAAM,YAAY;AAAA,QAC/B,OAAO,OAAO;AAAA,QACd,UAAU,OAAO;AAAA,QACjB;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO,SAAS;AAAA,QACvB,QAAQ,OAAO;AAAA,MACjB,CAAC;AACD,YAAM,SAAS,OAAO;AACtB,UAAI,OAAO,sBAAsB,eAAe,CAAC,QAAQ,UAAU;AACjE,eAAO,EAAE,SAAS,OAAO,OAAO,2BAA2B,UAAU,OAAO,SAAS;AAAA,MACvF;AACA,YAAM,YAAY,OAAO,YAAY,CAAC,GAAG,IAAI,QAAM;AAAA,QACjD,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AACF,aAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,QAAQ;AAAA,IAC5D;AAAA,EACF,CAAC;AACH;","names":[]}
@@ -1,104 +0,0 @@
1
- import {
2
- CodebaseSearchClient
3
- } from "./chunk-WM77HRKO.js";
4
- import {
5
- FastApplyClient
6
- } from "./chunk-64PMM72R.js";
7
- import {
8
- BrowserClient
9
- } from "./chunk-EYHXBQQX.js";
10
- import {
11
- MorphGit
12
- } from "./chunk-GDR65N2J.js";
13
- import {
14
- AnthropicRouter,
15
- GeminiRouter,
16
- OpenAIRouter,
17
- RawRouter
18
- } from "./chunk-CP4NZGRY.js";
19
-
20
- // client.ts
21
- var MorphClient = class {
22
- /** Client configuration */
23
- config;
24
- /** FastApply tool for editing files with AI-powered merge */
25
- fastApply;
26
- /** CodebaseSearch tool for semantic code search */
27
- codebaseSearch;
28
- /** Browser tool for AI-powered browser automation */
29
- browser;
30
- /** Git tool for version control operations */
31
- git;
32
- /** Model routers for intelligent model selection */
33
- routers;
34
- /**
35
- * Create a new Morph SDK client
36
- *
37
- * @param config - Client configuration (apiKey, debug, timeout, retryConfig)
38
- *
39
- * @example
40
- * ```typescript
41
- * const morph = new MorphClient({
42
- * apiKey: process.env.MORPH_API_KEY,
43
- * debug: true,
44
- * timeout: 60000
45
- * });
46
- * ```
47
- */
48
- constructor(config = {}) {
49
- this.config = config;
50
- this.fastApply = new FastApplyClient({
51
- apiKey: config.apiKey,
52
- debug: config.debug,
53
- timeout: config.timeout,
54
- retryConfig: config.retryConfig
55
- });
56
- this.codebaseSearch = new CodebaseSearchClient({
57
- apiKey: config.apiKey,
58
- debug: config.debug,
59
- timeout: config.timeout,
60
- retryConfig: config.retryConfig
61
- });
62
- this.browser = new BrowserClient({
63
- apiKey: config.apiKey,
64
- debug: config.debug,
65
- timeout: config.timeout,
66
- retryConfig: config.retryConfig
67
- });
68
- this.git = new MorphGit({
69
- apiKey: config.apiKey,
70
- retryConfig: config.retryConfig
71
- });
72
- this.routers = {
73
- openai: new OpenAIRouter({
74
- apiKey: config.apiKey,
75
- debug: config.debug,
76
- timeout: config.timeout,
77
- retryConfig: config.retryConfig
78
- }),
79
- anthropic: new AnthropicRouter({
80
- apiKey: config.apiKey,
81
- debug: config.debug,
82
- timeout: config.timeout,
83
- retryConfig: config.retryConfig
84
- }),
85
- gemini: new GeminiRouter({
86
- apiKey: config.apiKey,
87
- debug: config.debug,
88
- timeout: config.timeout,
89
- retryConfig: config.retryConfig
90
- }),
91
- raw: new RawRouter({
92
- apiKey: config.apiKey,
93
- debug: config.debug,
94
- timeout: config.timeout,
95
- retryConfig: config.retryConfig
96
- })
97
- };
98
- }
99
- };
100
-
101
- export {
102
- MorphClient
103
- };
104
- //# sourceMappingURL=chunk-SMR2T5BT.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../client.ts"],"sourcesContent":["/**\n * Unified Morph SDK Client\n * \n * Provides access to all Morph tools through a single interface\n * \n * @example\n * ```typescript\n * import { MorphClient } from '@cuda_oom/morphsdk';\n * \n * const morph = new MorphClient({ \n * apiKey: process.env.MORPH_API_KEY,\n * debug: true,\n * timeout: 60000\n * });\n * \n * // Use FastApply\n * const editResult = await morph.fastApply.execute({\n * target_filepath: 'src/index.ts',\n * instructions: 'Add error handling',\n * code_edit: 'try { ... } catch (e) { ... }'\n * });\n * \n * // Use CodebaseSearch\n * const searchResult = await morph.codebaseSearch.search({\n * query: 'authentication logic',\n * repoId: 'my-project'\n * });\n * \n * // Use Browser automation\n * const browserResult = await morph.browser.execute({\n * task: 'Test the checkout flow',\n * url: 'https://example.com'\n * });\n * \n * // Use Model Router\n * const { model } = await morph.routers.openai.selectModel({\n * input: 'Complex refactoring task',\n * mode: 'balanced'\n * });\n * ```\n */\n\nimport type { RetryConfig } from './tools/utils/resilience.js';\nimport { FastApplyClient } from './tools/fastapply/core.js';\nimport { CodebaseSearchClient } from './tools/codebase_search/core.js';\nimport { BrowserClient } from './tools/browser/core.js';\nimport { MorphGit } from './git/index.js';\nimport { OpenAIRouter, AnthropicRouter, GeminiRouter, RawRouter } from './modelrouter/core.js';\n\n/**\n * Configuration for the MorphClient\n */\nexport interface MorphClientConfig {\n /** Morph API key for authentication (defaults to MORPH_API_KEY env var) */\n apiKey?: string;\n /** Enable debug logging across all tools */\n debug?: boolean;\n /** Default timeout in milliseconds for API requests */\n timeout?: number;\n /** Retry configuration for failed requests */\n retryConfig?: RetryConfig;\n}\n\n/**\n * Unified Morph SDK Client\n * \n * Provides access to all Morph tools through a single interface:\n * - fastApply: AI-powered file editing with intelligent merging\n * - codebaseSearch: Semantic code search\n * - browser: AI-powered browser automation\n * - git: Version control operations\n * - routers: Intelligent model selection (OpenAI, Anthropic, Gemini)\n */\nexport class MorphClient {\n /** Client configuration */\n public config: MorphClientConfig;\n\n /** FastApply tool for editing files with AI-powered merge */\n public fastApply: FastApplyClient;\n\n /** CodebaseSearch tool for semantic code search */\n public codebaseSearch: CodebaseSearchClient;\n\n /** Browser tool for AI-powered browser automation */\n public browser: BrowserClient;\n\n /** Git tool for version control operations */\n public git: MorphGit;\n\n /** Model routers for intelligent model selection */\n public routers: {\n openai: OpenAIRouter;\n anthropic: AnthropicRouter;\n gemini: GeminiRouter;\n raw: RawRouter;\n };\n\n /**\n * Create a new Morph SDK client\n * \n * @param config - Client configuration (apiKey, debug, timeout, retryConfig)\n * \n * @example\n * ```typescript\n * const morph = new MorphClient({ \n * apiKey: process.env.MORPH_API_KEY,\n * debug: true,\n * timeout: 60000\n * });\n * ```\n */\n constructor(config: MorphClientConfig = {}) {\n this.config = config;\n\n // Initialize all sub-clients with shared config\n this.fastApply = new FastApplyClient({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n });\n\n this.codebaseSearch = new CodebaseSearchClient({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n });\n\n this.browser = new BrowserClient({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n });\n\n this.git = new MorphGit({\n apiKey: config.apiKey,\n retryConfig: config.retryConfig,\n });\n\n this.routers = {\n openai: new OpenAIRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n anthropic: new AnthropicRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n gemini: new GeminiRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n raw: new RawRouter({\n apiKey: config.apiKey,\n debug: config.debug,\n timeout: config.timeout,\n retryConfig: config.retryConfig,\n }),\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAyEO,IAAM,cAAN,MAAkB;AAAA;AAAA,EAEhB;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBP,YAAY,SAA4B,CAAC,GAAG;AAC1C,SAAK,SAAS;AAGd,SAAK,YAAY,IAAI,gBAAgB;AAAA,MACnC,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,iBAAiB,IAAI,qBAAqB;AAAA,MAC7C,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,UAAU,IAAI,cAAc;AAAA,MAC/B,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS,OAAO;AAAA,MAChB,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,MAAM,IAAI,SAAS;AAAA,MACtB,QAAQ,OAAO;AAAA,MACf,aAAa,OAAO;AAAA,IACtB,CAAC;AAED,SAAK,UAAU;AAAA,MACb,QAAQ,IAAI,aAAa;AAAA,QACvB,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,WAAW,IAAI,gBAAgB;AAAA,QAC7B,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,QAAQ,IAAI,aAAa;AAAA,QACvB,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,MACD,KAAK,IAAI,UAAU;AAAA,QACjB,QAAQ,OAAO;AAAA,QACf,OAAO,OAAO;AAAA,QACd,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF;AACF;","names":[]}
@@ -1 +0,0 @@
1
- //# sourceMappingURL=chunk-XYPMN4A3.js.map