@morphllm/morphsdk 0.2.21 → 0.2.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-DF2ZOO7R.js → chunk-5VQEQSJQ.js} +3 -52
- package/dist/chunk-5VQEQSJQ.js.map +1 -0
- package/dist/chunk-73RQWOQC.js +16 -0
- package/dist/chunk-73RQWOQC.js.map +1 -0
- package/dist/{chunk-2DXRTGRH.js → chunk-74ZHKB54.js} +1 -1
- package/dist/{chunk-2DXRTGRH.js.map → chunk-74ZHKB54.js.map} +1 -1
- package/dist/chunk-AFEPUNAO.js +15 -0
- package/dist/chunk-AFEPUNAO.js.map +1 -0
- package/dist/chunk-EAA7D24N.js +201 -0
- package/dist/chunk-EAA7D24N.js.map +1 -0
- package/dist/chunk-EK7OQPWD.js +44 -0
- package/dist/chunk-EK7OQPWD.js.map +1 -0
- package/dist/chunk-FSVBNZMU.js +44 -0
- package/dist/chunk-FSVBNZMU.js.map +1 -0
- package/dist/chunk-G2RSY56Q.js +11 -0
- package/dist/chunk-G2RSY56Q.js.map +1 -0
- package/dist/chunk-GTOXMAF2.js +140 -0
- package/dist/chunk-GTOXMAF2.js.map +1 -0
- package/dist/chunk-HKZB23U7.js +85 -0
- package/dist/chunk-HKZB23U7.js.map +1 -0
- package/dist/chunk-JZGU5UC6.js +53 -0
- package/dist/chunk-JZGU5UC6.js.map +1 -0
- package/dist/chunk-NDZO5IPV.js +121 -0
- package/dist/chunk-NDZO5IPV.js.map +1 -0
- package/dist/{chunk-34F3D6JD.js → chunk-NSQGPBMU.js} +9 -9
- package/dist/chunk-RSLIOCOE.js +26 -0
- package/dist/chunk-RSLIOCOE.js.map +1 -0
- package/dist/chunk-SMGZ6A64.js +53 -0
- package/dist/chunk-SMGZ6A64.js.map +1 -0
- package/dist/chunk-TICMYDII.js +81 -0
- package/dist/chunk-TICMYDII.js.map +1 -0
- package/dist/chunk-UYBIKZPM.js +135 -0
- package/dist/chunk-UYBIKZPM.js.map +1 -0
- package/dist/chunk-VBBJGWHY.js +73 -0
- package/dist/chunk-VBBJGWHY.js.map +1 -0
- package/dist/chunk-XQLKK2ZH.js +56 -0
- package/dist/chunk-XQLKK2ZH.js.map +1 -0
- package/dist/chunk-XYPMN4A3.js +1 -0
- package/dist/chunk-XYPMN4A3.js.map +1 -0
- package/dist/chunk-Z2FBMSNE.js +10 -0
- package/dist/chunk-Z2FBMSNE.js.map +1 -0
- package/dist/client.cjs +2 -51
- package/dist/client.cjs.map +1 -1
- package/dist/client.js +4 -4
- package/dist/git/client.cjs +2 -51
- package/dist/git/client.cjs.map +1 -1
- package/dist/git/client.js +1 -1
- package/dist/git/index.cjs +2 -51
- package/dist/git/index.cjs.map +1 -1
- package/dist/git/index.js +2 -2
- package/dist/git/types.cjs.map +1 -1
- package/dist/index.cjs +2 -51
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +10 -10
- package/dist/tools/warp_grep/agent/config.cjs +41 -0
- package/dist/tools/warp_grep/agent/config.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/config.js +12 -0
- package/dist/tools/warp_grep/agent/config.js.map +1 -0
- package/dist/tools/warp_grep/agent/formatter.cjs +106 -0
- package/dist/tools/warp_grep/agent/formatter.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/formatter.js +10 -0
- package/dist/tools/warp_grep/agent/formatter.js.map +1 -0
- package/dist/tools/warp_grep/agent/grep_helpers.cjs +148 -0
- package/dist/tools/warp_grep/agent/grep_helpers.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/grep_helpers.js +14 -0
- package/dist/tools/warp_grep/agent/grep_helpers.js.map +1 -0
- package/dist/tools/warp_grep/agent/parser.cjs +165 -0
- package/dist/tools/warp_grep/agent/parser.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/parser.js +10 -0
- package/dist/tools/warp_grep/agent/parser.js.map +1 -0
- package/dist/tools/warp_grep/agent/prompt.cjs +110 -0
- package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/prompt.js +10 -0
- package/dist/tools/warp_grep/agent/prompt.js.map +1 -0
- package/dist/tools/warp_grep/agent/runner.cjs +744 -0
- package/dist/tools/warp_grep/agent/runner.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/runner.js +17 -0
- package/dist/tools/warp_grep/agent/runner.js.map +1 -0
- package/dist/tools/warp_grep/agent/types.cjs +19 -0
- package/dist/tools/warp_grep/agent/types.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/types.js +2 -0
- package/dist/tools/warp_grep/agent/types.js.map +1 -0
- package/dist/tools/warp_grep/anthropic.cjs +977 -0
- package/dist/tools/warp_grep/anthropic.cjs.map +1 -0
- package/dist/tools/warp_grep/anthropic.js +22 -0
- package/dist/tools/warp_grep/anthropic.js.map +1 -0
- package/dist/tools/warp_grep/index.cjs +1136 -0
- package/dist/tools/warp_grep/index.cjs.map +1 -0
- package/dist/tools/warp_grep/index.js +48 -0
- package/dist/tools/warp_grep/index.js.map +1 -0
- package/dist/tools/warp_grep/openai.cjs +980 -0
- package/dist/tools/warp_grep/openai.cjs.map +1 -0
- package/dist/tools/warp_grep/openai.js +22 -0
- package/dist/tools/warp_grep/openai.js.map +1 -0
- package/dist/tools/warp_grep/providers/command.cjs +98 -0
- package/dist/tools/warp_grep/providers/command.cjs.map +1 -0
- package/dist/tools/warp_grep/providers/command.js +9 -0
- package/dist/tools/warp_grep/providers/command.js.map +1 -0
- package/dist/tools/warp_grep/providers/local.cjs +232 -0
- package/dist/tools/warp_grep/providers/local.cjs.map +1 -0
- package/dist/tools/warp_grep/providers/local.js +12 -0
- package/dist/tools/warp_grep/providers/local.js.map +1 -0
- package/dist/tools/warp_grep/providers/types.cjs +19 -0
- package/dist/tools/warp_grep/providers/types.cjs.map +1 -0
- package/dist/tools/warp_grep/providers/types.js +1 -0
- package/dist/tools/warp_grep/providers/types.js.map +1 -0
- package/dist/tools/warp_grep/tools/analyse.cjs +40 -0
- package/dist/tools/warp_grep/tools/analyse.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/analyse.js +8 -0
- package/dist/tools/warp_grep/tools/analyse.js.map +1 -0
- package/dist/tools/warp_grep/tools/finish.cjs +69 -0
- package/dist/tools/warp_grep/tools/finish.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/finish.js +10 -0
- package/dist/tools/warp_grep/tools/finish.js.map +1 -0
- package/dist/tools/warp_grep/tools/grep.cjs +35 -0
- package/dist/tools/warp_grep/tools/grep.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/grep.js +12 -0
- package/dist/tools/warp_grep/tools/grep.js.map +1 -0
- package/dist/tools/warp_grep/tools/read.cjs +34 -0
- package/dist/tools/warp_grep/tools/read.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/read.js +8 -0
- package/dist/tools/warp_grep/tools/read.js.map +1 -0
- package/dist/tools/warp_grep/utils/files.cjs +45 -0
- package/dist/tools/warp_grep/utils/files.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/files.js +8 -0
- package/dist/tools/warp_grep/utils/files.js.map +1 -0
- package/dist/tools/warp_grep/utils/format.cjs +42 -0
- package/dist/tools/warp_grep/utils/format.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/format.js +18 -0
- package/dist/tools/warp_grep/utils/format.js.map +1 -0
- package/dist/tools/warp_grep/utils/paths.cjs +91 -0
- package/dist/tools/warp_grep/utils/paths.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/paths.js +16 -0
- package/dist/tools/warp_grep/utils/paths.js.map +1 -0
- package/dist/tools/warp_grep/utils/ripgrep.cjs +50 -0
- package/dist/tools/warp_grep/utils/ripgrep.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/ripgrep.js +8 -0
- package/dist/tools/warp_grep/utils/ripgrep.js.map +1 -0
- package/dist/tools/warp_grep/vercel.cjs +968 -0
- package/dist/tools/warp_grep/vercel.cjs.map +1 -0
- package/dist/tools/warp_grep/vercel.js +22 -0
- package/dist/tools/warp_grep/vercel.js.map +1 -0
- package/package.json +23 -3
- package/dist/anthropic-CknfcMoO.d.ts +0 -64
- package/dist/chunk-DF2ZOO7R.js.map +0 -1
- package/dist/client.d.ts +0 -114
- package/dist/git/client.d.ts +0 -255
- package/dist/git/config.d.ts +0 -11
- package/dist/git/index.d.ts +0 -5
- package/dist/git/types.d.ts +0 -102
- package/dist/index.d.ts +0 -14
- package/dist/modelrouter/core.d.ts +0 -56
- package/dist/modelrouter/index.d.ts +0 -2
- package/dist/modelrouter/types.d.ts +0 -35
- package/dist/openai-BkKsS30n.d.ts +0 -111
- package/dist/tools/browser/anthropic.d.ts +0 -51
- package/dist/tools/browser/core.d.ts +0 -196
- package/dist/tools/browser/index.d.ts +0 -72
- package/dist/tools/browser/openai.d.ts +0 -69
- package/dist/tools/browser/prompts.d.ts +0 -7
- package/dist/tools/browser/types.d.ts +0 -227
- package/dist/tools/browser/vercel.d.ts +0 -69
- package/dist/tools/codebase_search/anthropic.d.ts +0 -40
- package/dist/tools/codebase_search/core.d.ts +0 -40
- package/dist/tools/codebase_search/index.d.ts +0 -10
- package/dist/tools/codebase_search/openai.d.ts +0 -87
- package/dist/tools/codebase_search/prompts.d.ts +0 -7
- package/dist/tools/codebase_search/types.d.ts +0 -46
- package/dist/tools/codebase_search/vercel.d.ts +0 -65
- package/dist/tools/fastapply/anthropic.d.ts +0 -4
- package/dist/tools/fastapply/core.d.ts +0 -41
- package/dist/tools/fastapply/index.d.ts +0 -10
- package/dist/tools/fastapply/openai.d.ts +0 -4
- package/dist/tools/fastapply/prompts.d.ts +0 -7
- package/dist/tools/fastapply/types.d.ts +0 -77
- package/dist/tools/fastapply/vercel.d.ts +0 -4
- package/dist/tools/index.d.ts +0 -10
- package/dist/tools/utils/resilience.d.ts +0 -58
- package/dist/vercel-B1GZ_g9N.d.ts +0 -69
- /package/dist/{chunk-34F3D6JD.js.map → chunk-NSQGPBMU.js.map} +0 -0
|
@@ -186,17 +186,12 @@ var MorphGit = class {
|
|
|
186
186
|
* author: {
|
|
187
187
|
* name: 'AI Agent',
|
|
188
188
|
* email: 'ai@example.com'
|
|
189
|
-
* }
|
|
190
|
-
* chatHistory: [
|
|
191
|
-
* { role: 'user', content: 'Please add a new feature' },
|
|
192
|
-
* { role: 'assistant', content: 'I will add that feature' }
|
|
193
|
-
* ],
|
|
194
|
-
* recordingId: 'rec_123'
|
|
189
|
+
* }
|
|
195
190
|
* });
|
|
196
191
|
* ```
|
|
197
192
|
*/
|
|
198
193
|
async commit(options) {
|
|
199
|
-
const { dir, message, author
|
|
194
|
+
const { dir, message, author } = options;
|
|
200
195
|
const commitAuthor = author || {
|
|
201
196
|
name: "Morph SDK",
|
|
202
197
|
email: "sdk@morphllm.com"
|
|
@@ -207,20 +202,6 @@ var MorphGit = class {
|
|
|
207
202
|
message,
|
|
208
203
|
author: commitAuthor
|
|
209
204
|
});
|
|
210
|
-
if (chatHistory || recordingId) {
|
|
211
|
-
const metadata = {
|
|
212
|
-
chatHistory,
|
|
213
|
-
recordingId
|
|
214
|
-
};
|
|
215
|
-
await git.addNote({
|
|
216
|
-
fs,
|
|
217
|
-
dir,
|
|
218
|
-
ref: "refs/notes/morph-metadata",
|
|
219
|
-
oid: sha,
|
|
220
|
-
note: JSON.stringify(metadata, null, 2),
|
|
221
|
-
author: commitAuthor
|
|
222
|
-
});
|
|
223
|
-
}
|
|
224
205
|
return sha;
|
|
225
206
|
}
|
|
226
207
|
/**
|
|
@@ -405,39 +386,9 @@ var MorphGit = class {
|
|
|
405
386
|
});
|
|
406
387
|
return oid;
|
|
407
388
|
}
|
|
408
|
-
/**
|
|
409
|
-
* Get metadata (chat history, recording ID) attached to a commit
|
|
410
|
-
*
|
|
411
|
-
* @example
|
|
412
|
-
* ```ts
|
|
413
|
-
* const metadata = await morphGit.getCommitMetadata({
|
|
414
|
-
* dir: './my-project',
|
|
415
|
-
* commitSha: 'abc123...'
|
|
416
|
-
* });
|
|
417
|
-
*
|
|
418
|
-
* if (metadata) {
|
|
419
|
-
* console.log('Chat history:', metadata.chatHistory);
|
|
420
|
-
* console.log('Recording ID:', metadata.recordingId);
|
|
421
|
-
* }
|
|
422
|
-
* ```
|
|
423
|
-
*/
|
|
424
|
-
async getCommitMetadata(options) {
|
|
425
|
-
try {
|
|
426
|
-
const note = await git.readNote({
|
|
427
|
-
fs,
|
|
428
|
-
dir: options.dir,
|
|
429
|
-
ref: "refs/notes/morph-metadata",
|
|
430
|
-
oid: options.commitSha
|
|
431
|
-
});
|
|
432
|
-
const metadata = JSON.parse(new TextDecoder().decode(note));
|
|
433
|
-
return metadata;
|
|
434
|
-
} catch (err) {
|
|
435
|
-
return null;
|
|
436
|
-
}
|
|
437
|
-
}
|
|
438
389
|
};
|
|
439
390
|
|
|
440
391
|
export {
|
|
441
392
|
MorphGit
|
|
442
393
|
};
|
|
443
|
-
//# sourceMappingURL=chunk-
|
|
394
|
+
//# sourceMappingURL=chunk-5VQEQSJQ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../git/client.ts"],"sourcesContent":["/**\n * Morph Git Client - Simple, high-level Git operations\n * Built on isomorphic-git with explicit configuration\n */\n\nimport git from 'isomorphic-git';\nimport http from 'isomorphic-git/http/node';\nimport fs from 'fs';\nimport type {\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n MorphGitConfig,\n} from './types.js';\n\nconst DEFAULT_PROXY_URL = 'https://repos.morphllm.com';\n\n/**\n * MorphGit - Git operations for AI agents with Morph backend\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!,\n * proxyUrl: 'https://repos.morphllm.com' // Optional\n * });\n * \n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.push({ dir: './my-project' });\n * ```\n */\nexport class MorphGit {\n private readonly apiKey: string;\n private readonly proxyUrl: string;\n\n constructor(config: MorphGitConfig) {\n // Validate API key\n if (!config.apiKey) {\n throw new Error('API key is required. Get one at https://morphllm.com/dashboard');\n }\n \n if (!config.apiKey.startsWith('sk-') && !config.apiKey.startsWith('morph-')) {\n throw new Error('Invalid API key format. Expected: sk-... or morph-...');\n }\n \n this.apiKey = config.apiKey;\n this.proxyUrl = config.proxyUrl || DEFAULT_PROXY_URL;\n }\n \n /**\n * Get auth callback for isomorphic-git operations\n * @private\n */\n private getAuthCallback() {\n return () => ({\n username: 'morph',\n password: this.apiKey,\n });\n }\n\n /**\n * Initialize a new repository\n * Creates the repo in the database and in the git provider\n * \n * @example\n * ```ts\n * await morphGit.init({\n * repoId: 'my-project',\n * dir: './my-project',\n * defaultBranch: 'main'\n * });\n * ```\n */\n async init(options: {\n repoId: string;\n dir: string;\n defaultBranch?: string;\n }): Promise<void> {\n const { repoId, dir, defaultBranch = 'main' } = options;\n\n // Call backend API to create repository\n const response = await fetch(`${this.proxyUrl}/v1/repos`, {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n repoId,\n name: repoId,\n defaultBranch,\n }),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`Failed to create repository: ${error}`);\n }\n\n // Initialize local git repo\n await git.init({\n fs,\n dir,\n defaultBranch,\n });\n\n // Add remote\n await git.addRemote({\n fs,\n dir,\n remote: 'origin',\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n });\n\n console.log(`✓ Repository '${repoId}' initialized`);\n }\n\n /**\n * Clone a repository from Morph repos\n * \n * @example\n * ```ts\n * await morphGit.clone({\n * repoId: 'my-project',\n * dir: './my-project'\n * });\n * ```\n */\n async clone(options: CloneOptions): Promise<void> {\n const { repoId, dir, branch = 'main', depth, singleBranch = true } = options;\n\n await git.clone({\n fs,\n http,\n dir,\n corsProxy: this.proxyUrl,\n url: `${this.proxyUrl}/v1/repos/${repoId}`,\n ref: branch,\n singleBranch,\n depth,\n onAuth: this.getAuthCallback(),\n });\n }\n\n /**\n * Push changes to remote repository\n * \n * @example\n * ```ts\n * await morphGit.push({ dir: './my-project' });\n * ```\n */\n async push(options: PushOptions): Promise<void> {\n const { dir, remote = 'origin', branch } = options;\n\n await git.push({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n });\n }\n\n /**\n * Pull changes from remote repository\n * \n * @example\n * ```ts\n * await morphGit.pull({ dir: './my-project' });\n * ```\n */\n async pull(options: PullOptions): Promise<void> {\n const { dir, remote = 'origin', branch } = options;\n\n await git.pull({\n fs,\n http,\n dir,\n remote,\n ref: branch,\n onAuth: this.getAuthCallback(),\n author: {\n name: 'Morph Agent',\n email: 'agent@morph.com',\n },\n });\n }\n\n /**\n * Stage a file for commit\n * \n * @example\n * ```ts\n * await morphGit.add({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * ```\n */\n async add(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.add({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Remove a file from staging\n * \n * @example\n * ```ts\n * await morphGit.remove({\n * dir: './my-project',\n * filepath: 'src/old-file.ts'\n * });\n * ```\n */\n async remove(options: AddOptions): Promise<void> {\n const { dir, filepath } = options;\n\n await git.remove({\n fs,\n dir,\n filepath,\n });\n }\n\n /**\n * Commit staged changes\n * \n * @example\n * ```ts\n * await morphGit.commit({\n * dir: './my-project',\n * message: 'Add new feature',\n * author: {\n * name: 'AI Agent',\n * email: 'ai@example.com'\n * }\n * });\n * ```\n */\n async commit(options: CommitOptions): Promise<string> {\n const { dir, message, author } = options;\n\n // Provide default author if not specified\n const commitAuthor = author || {\n name: 'Morph SDK',\n email: 'sdk@morphllm.com'\n };\n\n const sha = await git.commit({\n fs,\n dir,\n message,\n author: commitAuthor,\n });\n\n return sha;\n }\n\n /**\n * Get status of a file\n * \n * @example\n * ```ts\n * const status = await morphGit.status({\n * dir: './my-project',\n * filepath: 'src/app.ts'\n * });\n * console.log(status); // 'modified', '*added', etc.\n * ```\n */\n async status(options: StatusOptions): Promise<string> {\n const { dir, filepath } = options;\n\n if (!filepath) {\n throw new Error('filepath is required for status check');\n }\n\n const status = await git.status({\n fs,\n dir,\n filepath,\n });\n\n return status;\n }\n\n /**\n * Get commit history\n * \n * @example\n * ```ts\n * const commits = await morphGit.log({\n * dir: './my-project',\n * depth: 10\n * });\n * ```\n */\n async log(options: LogOptions): Promise<CommitObject[]> {\n const { dir, depth, ref } = options;\n\n const commits = await git.log({\n fs,\n dir,\n depth,\n ref,\n });\n\n return commits as CommitObject[];\n }\n\n /**\n * Checkout a branch or commit\n * \n * @example\n * ```ts\n * await morphGit.checkout({\n * dir: './my-project',\n * ref: 'feature-branch'\n * });\n * ```\n */\n async checkout(options: CheckoutOptions): Promise<void> {\n const { dir, ref } = options;\n\n await git.checkout({\n fs,\n dir,\n ref,\n });\n }\n\n /**\n * Create a new branch\n * \n * @example\n * ```ts\n * await morphGit.branch({\n * dir: './my-project',\n * name: 'feature-branch',\n * checkout: true\n * });\n * ```\n */\n async branch(options: BranchOptions): Promise<void> {\n const { dir, name, checkout = false } = options;\n\n await git.branch({\n fs,\n dir,\n ref: name,\n checkout,\n });\n }\n\n /**\n * List all branches\n * \n * @example\n * ```ts\n * const branches = await morphGit.listBranches({\n * dir: './my-project'\n * });\n * ```\n */\n async listBranches(options: { dir: string }): Promise<string[]> {\n const { dir } = options;\n\n const branches = await git.listBranches({\n fs,\n dir,\n });\n\n return branches;\n }\n\n /**\n * Get the current branch name\n * \n * @example\n * ```ts\n * const branch = await morphGit.currentBranch({\n * dir: './my-project'\n * });\n * ```\n */\n async currentBranch(options: { dir: string }): Promise<string | undefined> {\n const { dir } = options;\n\n const branch = await git.currentBranch({\n fs,\n dir,\n });\n\n return branch || undefined;\n }\n\n /**\n * Get list of changed files (similar to git diff --name-only)\n * \n * @example\n * ```ts\n * const changes = await morphGit.statusMatrix({\n * dir: './my-project'\n * });\n * ```\n */\n async statusMatrix(options: { dir: string }): Promise<StatusResult[]> {\n const { dir } = options;\n\n const matrix = await git.statusMatrix({\n fs,\n dir,\n });\n\n return matrix.map(([filepath, HEADStatus, workdirStatus, stageStatus]) => {\n let status: StatusResult['status'] = 'unmodified';\n\n // Determine status based on statusMatrix values\n if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 2) {\n status = 'modified';\n } else if (HEADStatus === 1 && workdirStatus === 2 && stageStatus === 1) {\n status = '*modified';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 2) {\n status = 'added';\n } else if (HEADStatus === 0 && workdirStatus === 2 && stageStatus === 0) {\n status = '*added';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 0) {\n status = 'deleted';\n } else if (HEADStatus === 1 && workdirStatus === 0 && stageStatus === 1) {\n status = '*deleted';\n } else if (HEADStatus === 1 && workdirStatus === 1 && stageStatus === 1) {\n status = 'unmodified';\n } else if (HEADStatus === 0 && workdirStatus === 0 && stageStatus === 0) {\n status = 'absent';\n }\n\n return {\n filepath,\n status,\n };\n });\n }\n\n /**\n * Get the current commit hash\n * \n * @example\n * ```ts\n * const hash = await morphGit.resolveRef({\n * dir: './my-project',\n * ref: 'HEAD'\n * });\n * ```\n */\n async resolveRef(options: { dir: string; ref: string }): Promise<string> {\n const { dir, ref } = options;\n\n const oid = await git.resolveRef({\n fs,\n dir,\n ref,\n });\n\n return oid;\n }\n}\n\n"],"mappings":";AAKA,OAAO,SAAS;AAChB,OAAO,UAAU;AACjB,OAAO,QAAQ;AAiBf,IAAM,oBAAoB;AAkBnB,IAAM,WAAN,MAAe;AAAA,EACH;AAAA,EACA;AAAA,EAEjB,YAAY,QAAwB;AAElC,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,IAAI,MAAM,gEAAgE;AAAA,IAClF;AAEA,QAAI,CAAC,OAAO,OAAO,WAAW,KAAK,KAAK,CAAC,OAAO,OAAO,WAAW,QAAQ,GAAG;AAC3E,YAAM,IAAI,MAAM,uDAAuD;AAAA,IACzE;AAEA,SAAK,SAAS,OAAO;AACrB,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB;AACxB,WAAO,OAAO;AAAA,MACZ,UAAU;AAAA,MACV,UAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,KAAK,SAIO;AAChB,UAAM,EAAE,QAAQ,KAAK,gBAAgB,OAAO,IAAI;AAGhD,UAAM,WAAW,MAAM,MAAM,GAAG,KAAK,QAAQ,aAAa;AAAA,MACxD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,iBAAiB,UAAU,KAAK,MAAM;AAAA,QACtC,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,MAAM;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,IACzD;AAGA,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,IAAI,UAAU;AAAA,MAClB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,IAC1C,CAAC;AAED,YAAQ,IAAI,sBAAiB,MAAM,eAAe;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,MAAM,SAAsC;AAChD,UAAM,EAAE,QAAQ,KAAK,SAAS,QAAQ,OAAO,eAAe,KAAK,IAAI;AAErE,UAAM,IAAI,MAAM;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,MACA,WAAW,KAAK;AAAA,MAChB,KAAK,GAAG,KAAK,QAAQ,aAAa,MAAM;AAAA,MACxC,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,OAAO,IAAI;AAE3C,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,KAAK,SAAqC;AAC9C,UAAM,EAAE,KAAK,SAAS,UAAU,OAAO,IAAI;AAE3C,UAAM,IAAI,KAAK;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL,QAAQ,KAAK,gBAAgB;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAAoC;AAC5C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,IAAI;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,OAAO,SAAoC;AAC/C,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,OAAO,IAAI;AAGjC,UAAM,eAAe,UAAU;AAAA,MAC7B,MAAM;AAAA,MACN,OAAO;AAAA,IACT;AAEA,UAAM,MAAM,MAAM,IAAI,OAAO;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAyC;AACpD,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,UAAM,SAAS,MAAM,IAAI,OAAO;AAAA,MAC9B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,IAAI,SAA8C;AACtD,UAAM,EAAE,KAAK,OAAO,IAAI,IAAI;AAE5B,UAAM,UAAU,MAAM,IAAI,IAAI;AAAA,MAC5B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,SAAS,SAAyC;AACtD,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,IAAI,SAAS;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,OAAO,SAAuC;AAClD,UAAM,EAAE,KAAK,MAAM,WAAW,MAAM,IAAI;AAExC,UAAM,IAAI,OAAO;AAAA,MACf;AAAA,MACA;AAAA,MACA,KAAK;AAAA,MACL;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAA6C;AAC9D,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,WAAW,MAAM,IAAI,aAAa;AAAA,MACtC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,cAAc,SAAuD;AACzE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,cAAc;AAAA,MACrC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,UAAU;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,aAAa,SAAmD;AACpE,UAAM,EAAE,IAAI,IAAI;AAEhB,UAAM,SAAS,MAAM,IAAI,aAAa;AAAA,MACpC;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO,OAAO,IAAI,CAAC,CAAC,UAAU,YAAY,eAAe,WAAW,MAAM;AACxE,UAAI,SAAiC;AAGrC,UAAI,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AAChE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX,WAAW,eAAe,KAAK,kBAAkB,KAAK,gBAAgB,GAAG;AACvE,iBAAS;AAAA,MACX;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,WAAW,SAAwD;AACvE,UAAM,EAAE,KAAK,IAAI,IAAI;AAErB,UAAM,MAAM,MAAM,IAAI,WAAW;AAAA,MAC/B;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
// tools/warp_grep/tools/analyse.ts
|
|
2
|
+
async function toolAnalyse(provider, args) {
|
|
3
|
+
const list = await provider.analyse({
|
|
4
|
+
path: args.path,
|
|
5
|
+
pattern: args.pattern ?? null,
|
|
6
|
+
maxResults: args.maxResults ?? 100,
|
|
7
|
+
maxDepth: args.maxDepth ?? 2
|
|
8
|
+
});
|
|
9
|
+
if (!list.length) return "empty";
|
|
10
|
+
return list.map((e) => `${" ".repeat(e.depth)}- ${e.type === "dir" ? "[D]" : "[F]"} ${e.name}`).join("\n");
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export {
|
|
14
|
+
toolAnalyse
|
|
15
|
+
};
|
|
16
|
+
//# sourceMappingURL=chunk-73RQWOQC.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/tools/analyse.ts"],"sourcesContent":["import type { WarpGrepProvider } from '../providers/types.js';\n\nexport async function toolAnalyse(\n provider: WarpGrepProvider,\n args: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }\n): Promise<string> {\n const list = await provider.analyse({\n path: args.path,\n pattern: args.pattern ?? null,\n maxResults: args.maxResults ?? 100,\n maxDepth: args.maxDepth ?? 2,\n });\n if (!list.length) return 'empty';\n return list\n .map((e) => `${' '.repeat(e.depth)}- ${e.type === 'dir' ? '[D]' : '[F]'} ${e.name}`)\n .join('\\n');\n}\n\n\n"],"mappings":";AAEA,eAAsB,YACpB,UACA,MACiB;AACjB,QAAM,OAAO,MAAM,SAAS,QAAQ;AAAA,IAClC,MAAM,KAAK;AAAA,IACX,SAAS,KAAK,WAAW;AAAA,IACzB,YAAY,KAAK,cAAc;AAAA,IAC/B,UAAU,KAAK,YAAY;AAAA,EAC7B,CAAC;AACD,MAAI,CAAC,KAAK,OAAQ,QAAO;AACzB,SAAO,KACJ,IAAI,CAAC,MAAM,GAAG,KAAK,OAAO,EAAE,KAAK,CAAC,KAAK,EAAE,SAAS,QAAQ,QAAQ,KAAK,IAAI,EAAE,IAAI,EAAE,EACnF,KAAK,IAAI;AACd;","names":[]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../git/index.ts"],"sourcesContent":["/**\n * Morph Git SDK\n * \n * Git operations for AI agents using Morph's backend infrastructure.\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!\n * });\n * \n * // Initialize and push\n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.add({ dir: './my-project', filepath: 'src/app.ts' });\n * await morphGit.commit({ dir: './my-project', message: 'Update' });\n * await morphGit.push({ dir: './my-project' });\n * ```\n */\n\nexport { MorphGit } from './client.js';\nexport type {\n MorphGitConfig,\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n
|
|
1
|
+
{"version":3,"sources":["../git/index.ts"],"sourcesContent":["/**\n * Morph Git SDK\n * \n * Git operations for AI agents using Morph's backend infrastructure.\n * \n * @example\n * ```typescript\n * import { MorphGit } from 'morphsdk/git';\n * \n * const morphGit = new MorphGit({\n * apiKey: process.env.MORPH_API_KEY!\n * });\n * \n * // Initialize and push\n * await morphGit.init({ repoId: 'my-project', dir: './my-project' });\n * await morphGit.add({ dir: './my-project', filepath: 'src/app.ts' });\n * await morphGit.commit({ dir: './my-project', message: 'Update' });\n * await morphGit.push({ dir: './my-project' });\n * ```\n */\n\nexport { MorphGit } from './client.js';\nexport type {\n MorphGitConfig,\n CloneOptions,\n PushOptions,\n PullOptions,\n AddOptions,\n CommitOptions,\n StatusOptions,\n LogOptions,\n CheckoutOptions,\n BranchOptions,\n DiffOptions,\n CommitObject,\n StatusResult,\n} from './types.js';\n\n// Re-export isomorphic-git for advanced use cases\nexport { default as git } from 'isomorphic-git';\nexport { default as http } from 'isomorphic-git/http/node';\n\n"],"mappings":";AAuCA,SAAoB,WAAXA,gBAAsB;AAC/B,SAAoB,WAAXA,gBAAuB;","names":["default"]}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
// tools/warp_grep/agent/config.ts
|
|
2
|
+
var AGENT_CONFIG = {
|
|
3
|
+
// Give the model freedom; failsafe cap to prevent infinite loops
|
|
4
|
+
MAX_ROUNDS: 10,
|
|
5
|
+
TIMEOUT_MS: 3e4
|
|
6
|
+
};
|
|
7
|
+
var DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || "").split(",").map((s) => s.trim()).filter(Boolean).concat(["node_modules", ".git", "dist", "build", ".cache", "venv", "target"]);
|
|
8
|
+
var DEFAULT_MODEL = "morph-warp-grep";
|
|
9
|
+
|
|
10
|
+
export {
|
|
11
|
+
AGENT_CONFIG,
|
|
12
|
+
DEFAULT_EXCLUDES,
|
|
13
|
+
DEFAULT_MODEL
|
|
14
|
+
};
|
|
15
|
+
//# sourceMappingURL=chunk-AFEPUNAO.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/agent/config.ts"],"sourcesContent":["// Agent configuration defaults for morph-warp-grep\n// Hard-coded: SDK does not expose control over rounds or timeout.\nexport const AGENT_CONFIG = {\n // Give the model freedom; failsafe cap to prevent infinite loops\n MAX_ROUNDS: 10,\n TIMEOUT_MS: 30000,\n};\n\nexport const DEFAULT_EXCLUDES = (process.env.MORPH_WARP_GREP_EXCLUDE || '')\n .split(',')\n .map(s => s.trim())\n .filter(Boolean)\n .concat(['node_modules', '.git', 'dist', 'build', '.cache', 'venv', 'target']);\n\nexport const DEFAULT_MODEL = 'morph-warp-grep';\n\n\n"],"mappings":";AAEO,IAAM,eAAe;AAAA;AAAA,EAE1B,YAAY;AAAA,EACZ,YAAY;AACd;AAEO,IAAM,oBAAoB,QAAQ,IAAI,2BAA2B,IACrE,MAAM,GAAG,EACT,IAAI,OAAK,EAAE,KAAK,CAAC,EACjB,OAAO,OAAO,EACd,OAAO,CAAC,gBAAgB,QAAQ,QAAQ,SAAS,UAAU,QAAQ,QAAQ,CAAC;AAExE,IAAM,gBAAgB;","names":[]}
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
import {
|
|
2
|
+
toolRead
|
|
3
|
+
} from "./chunk-Z2FBMSNE.js";
|
|
4
|
+
import {
|
|
5
|
+
getSystemPrompt
|
|
6
|
+
} from "./chunk-HKZB23U7.js";
|
|
7
|
+
import {
|
|
8
|
+
toolAnalyse
|
|
9
|
+
} from "./chunk-73RQWOQC.js";
|
|
10
|
+
import {
|
|
11
|
+
readFinishFiles
|
|
12
|
+
} from "./chunk-EK7OQPWD.js";
|
|
13
|
+
import {
|
|
14
|
+
AGENT_CONFIG,
|
|
15
|
+
DEFAULT_MODEL
|
|
16
|
+
} from "./chunk-AFEPUNAO.js";
|
|
17
|
+
import {
|
|
18
|
+
formatAgentToolOutput
|
|
19
|
+
} from "./chunk-TICMYDII.js";
|
|
20
|
+
import {
|
|
21
|
+
GrepState,
|
|
22
|
+
formatTurnGrepOutput,
|
|
23
|
+
parseAndFilterGrepOutput
|
|
24
|
+
} from "./chunk-NDZO5IPV.js";
|
|
25
|
+
import {
|
|
26
|
+
LLMResponseParser
|
|
27
|
+
} from "./chunk-GTOXMAF2.js";
|
|
28
|
+
import {
|
|
29
|
+
fetchWithRetry,
|
|
30
|
+
withTimeout
|
|
31
|
+
} from "./chunk-4VWJFZVS.js";
|
|
32
|
+
|
|
33
|
+
// tools/warp_grep/agent/runner.ts
|
|
34
|
+
import path from "path";
|
|
35
|
+
import fs from "fs/promises";
|
|
36
|
+
var parser = new LLMResponseParser();
|
|
37
|
+
async function buildInitialState(repoRoot, query) {
|
|
38
|
+
try {
|
|
39
|
+
const entries = await fs.readdir(repoRoot, { withFileTypes: true });
|
|
40
|
+
const dirs = entries.filter((e) => e.isDirectory()).map((d) => d.name).slice(0, 50);
|
|
41
|
+
const files = entries.filter((e) => e.isFile()).map((f) => f.name).slice(0, 50);
|
|
42
|
+
const parts = [
|
|
43
|
+
`<repo_root>${repoRoot}</repo_root>`,
|
|
44
|
+
`<top_dirs>${dirs.join(", ")}</top_dirs>`,
|
|
45
|
+
`<top_files>${files.join(", ")}</top_files>`
|
|
46
|
+
];
|
|
47
|
+
return parts.join("\n");
|
|
48
|
+
} catch {
|
|
49
|
+
return `<repo_root>${repoRoot}</repo_root>`;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
async function callModel(messages, model, apiKey) {
|
|
53
|
+
const api = "https://api.morphllm.com/v1/chat/completions";
|
|
54
|
+
const fetchPromise = fetchWithRetry(
|
|
55
|
+
api,
|
|
56
|
+
{
|
|
57
|
+
method: "POST",
|
|
58
|
+
headers: {
|
|
59
|
+
"Content-Type": "application/json",
|
|
60
|
+
Authorization: `Bearer ${apiKey || process.env.MORPH_API_KEY || ""}`
|
|
61
|
+
},
|
|
62
|
+
body: JSON.stringify({
|
|
63
|
+
model,
|
|
64
|
+
temperature: 0,
|
|
65
|
+
max_tokens: 1024,
|
|
66
|
+
messages
|
|
67
|
+
})
|
|
68
|
+
},
|
|
69
|
+
{}
|
|
70
|
+
);
|
|
71
|
+
const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, "morph-warp-grep request timed out");
|
|
72
|
+
if (!resp.ok) {
|
|
73
|
+
const t = await resp.text();
|
|
74
|
+
throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);
|
|
75
|
+
}
|
|
76
|
+
const data = await resp.json();
|
|
77
|
+
const content = data?.choices?.[0]?.message?.content;
|
|
78
|
+
if (!content || typeof content !== "string") {
|
|
79
|
+
throw new Error("Invalid response from model");
|
|
80
|
+
}
|
|
81
|
+
return content;
|
|
82
|
+
}
|
|
83
|
+
async function runWarpGrep(config) {
|
|
84
|
+
const repoRoot = path.resolve(config.repoRoot || process.cwd());
|
|
85
|
+
const messages = [];
|
|
86
|
+
const systemMessage = { role: "system", content: getSystemPrompt() };
|
|
87
|
+
messages.push(systemMessage);
|
|
88
|
+
const queryContent = `<query>${config.query}</query>`;
|
|
89
|
+
messages.push({ role: "user", content: queryContent });
|
|
90
|
+
const initialState = await buildInitialState(repoRoot, config.query);
|
|
91
|
+
messages.push({ role: "user", content: initialState });
|
|
92
|
+
const maxRounds = AGENT_CONFIG.MAX_ROUNDS;
|
|
93
|
+
const model = config.model || DEFAULT_MODEL;
|
|
94
|
+
const provider = config.provider;
|
|
95
|
+
const errors = [];
|
|
96
|
+
const grepState = new GrepState();
|
|
97
|
+
let finishMeta;
|
|
98
|
+
let terminationReason = "terminated";
|
|
99
|
+
for (let round = 1; round <= maxRounds; round += 1) {
|
|
100
|
+
const assistantContent = await callModel(messages, model, config.apiKey).catch((e) => {
|
|
101
|
+
errors.push({ message: e instanceof Error ? e.message : String(e) });
|
|
102
|
+
return "";
|
|
103
|
+
});
|
|
104
|
+
if (!assistantContent) break;
|
|
105
|
+
messages.push({ role: "assistant", content: assistantContent });
|
|
106
|
+
let toolCalls = [];
|
|
107
|
+
try {
|
|
108
|
+
toolCalls = parser.parse(assistantContent);
|
|
109
|
+
} catch (e) {
|
|
110
|
+
errors.push({ message: e instanceof Error ? e.message : String(e) });
|
|
111
|
+
terminationReason = "terminated";
|
|
112
|
+
break;
|
|
113
|
+
}
|
|
114
|
+
if (toolCalls.length === 0) {
|
|
115
|
+
errors.push({ message: "No tool calls produced by the model." });
|
|
116
|
+
terminationReason = "terminated";
|
|
117
|
+
break;
|
|
118
|
+
}
|
|
119
|
+
const finishCalls = toolCalls.filter((c) => c.name === "finish");
|
|
120
|
+
const grepCalls = toolCalls.filter((c) => c.name === "grep");
|
|
121
|
+
const analyseCalls = toolCalls.filter((c) => c.name === "analyse");
|
|
122
|
+
const readCalls = toolCalls.filter((c) => c.name === "read");
|
|
123
|
+
const formatted = [];
|
|
124
|
+
const otherPromises = [];
|
|
125
|
+
for (const c of analyseCalls) {
|
|
126
|
+
const args = c.arguments ?? {};
|
|
127
|
+
otherPromises.push(
|
|
128
|
+
toolAnalyse(provider, args).then(
|
|
129
|
+
(p) => formatAgentToolOutput("analyse", args, p, { isError: false }),
|
|
130
|
+
(err) => formatAgentToolOutput("analyse", args, String(err), { isError: true })
|
|
131
|
+
)
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
for (const c of readCalls) {
|
|
135
|
+
const args = c.arguments ?? {};
|
|
136
|
+
otherPromises.push(
|
|
137
|
+
toolRead(provider, args).then(
|
|
138
|
+
(p) => formatAgentToolOutput("read", args, p, { isError: false }),
|
|
139
|
+
(err) => formatAgentToolOutput("read", args, String(err), { isError: true })
|
|
140
|
+
)
|
|
141
|
+
);
|
|
142
|
+
}
|
|
143
|
+
const otherResults = await Promise.all(otherPromises);
|
|
144
|
+
formatted.push(...otherResults);
|
|
145
|
+
for (const c of grepCalls) {
|
|
146
|
+
const args = c.arguments ?? {};
|
|
147
|
+
try {
|
|
148
|
+
const grepRes = await provider.grep({ pattern: args.pattern, path: args.path });
|
|
149
|
+
const rawOutput = Array.isArray(grepRes.lines) ? grepRes.lines.join("\n") : "";
|
|
150
|
+
const newMatches = parseAndFilterGrepOutput(rawOutput, grepState);
|
|
151
|
+
let formattedPayload = formatTurnGrepOutput(newMatches);
|
|
152
|
+
if (formattedPayload === "No new matches found.") {
|
|
153
|
+
formattedPayload = "no new matches";
|
|
154
|
+
}
|
|
155
|
+
formatted.push(formatAgentToolOutput("grep", args, formattedPayload, { isError: false }));
|
|
156
|
+
} catch (err) {
|
|
157
|
+
formatted.push(formatAgentToolOutput("grep", args, String(err), { isError: true }));
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
if (formatted.length > 0) {
|
|
161
|
+
messages.push({ role: "user", content: formatted.join("\n") });
|
|
162
|
+
}
|
|
163
|
+
if (finishCalls.length) {
|
|
164
|
+
const fc = finishCalls[0];
|
|
165
|
+
const files = fc.arguments?.files ?? [];
|
|
166
|
+
finishMeta = { files };
|
|
167
|
+
terminationReason = "completed";
|
|
168
|
+
break;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
if (terminationReason !== "completed" || !finishMeta) {
|
|
172
|
+
return { terminationReason, messages, errors };
|
|
173
|
+
}
|
|
174
|
+
const parts = ["Relevant context found:"];
|
|
175
|
+
for (const f of finishMeta.files) {
|
|
176
|
+
const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(", ");
|
|
177
|
+
parts.push(`- ${f.path}: ${ranges}`);
|
|
178
|
+
}
|
|
179
|
+
const payload = parts.join("\n");
|
|
180
|
+
const resolved = await readFinishFiles(
|
|
181
|
+
repoRoot,
|
|
182
|
+
finishMeta.files,
|
|
183
|
+
async (p, s, e) => {
|
|
184
|
+
const rr = await provider.read({ path: p, start: s, end: e });
|
|
185
|
+
return rr.lines.map((l) => {
|
|
186
|
+
const idx = l.indexOf("|");
|
|
187
|
+
return idx >= 0 ? l.slice(idx + 1) : l;
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
);
|
|
191
|
+
return {
|
|
192
|
+
terminationReason: "completed",
|
|
193
|
+
messages,
|
|
194
|
+
finish: { payload, metadata: finishMeta, resolved }
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
export {
|
|
199
|
+
runWarpGrep
|
|
200
|
+
};
|
|
201
|
+
//# sourceMappingURL=chunk-EAA7D24N.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/agent/runner.ts"],"sourcesContent":["import { AGENT_CONFIG, DEFAULT_MODEL } from './config.js';\nimport { getSystemPrompt } from './prompt.js';\nimport type { AgentRunResult, ChatMessage, SessionConfig, ToolCall, AgentFinish } from './types.js';\nimport { LLMResponseParser, LLMResponseParseError } from './parser.js';\nimport type { WarpGrepProvider } from '../providers/types.js';\nimport { toolRead } from '../tools/read.js';\nimport { toolAnalyse } from '../tools/analyse.js';\nimport { fetchWithRetry, withTimeout } from '../../utils/resilience.js';\nimport { formatAgentToolOutput } from './formatter.js';\nimport { GrepState, parseAndFilterGrepOutput, formatTurnGrepOutput } from './grep_helpers.js';\nimport { readFinishFiles } from '../tools/finish.js';\nimport path from 'path';\nimport fs from 'fs/promises';\n\ntype EventName =\n | 'initial_state'\n | 'round_start'\n | 'round_end'\n | 'finish'\n | 'error';\n\nexport type EventCallback = (name: EventName, payload: Record<string, unknown>) => void;\n\nconst parser = new LLMResponseParser();\n\nasync function buildInitialState(repoRoot: string, query: string): Promise<string> {\n // Summarize top-level directories and file counts\n try {\n const entries = await fs.readdir(repoRoot, { withFileTypes: true });\n const dirs = entries.filter(e => e.isDirectory()).map(d => d.name).slice(0, 50);\n const files = entries.filter(e => e.isFile()).map(f => f.name).slice(0, 50);\n const parts = [\n `<repo_root>${repoRoot}</repo_root>`,\n `<top_dirs>${dirs.join(', ')}</top_dirs>`,\n `<top_files>${files.join(', ')}</top_files>`,\n ];\n return parts.join('\\n');\n } catch {\n return `<repo_root>${repoRoot}</repo_root>`;\n }\n}\n\nfunction formatAssistantToolBlock(name: string, args: Record<string, unknown>, payload: string, isError = false): string {\n const argStr = Object.entries(args)\n .map(([k, v]) => `${k}=${JSON.stringify(v)}`)\n .join(' ');\n const prefix = isError ? 'error' : 'result';\n return `<${prefix} name=\"${name}\" ${argStr}>\\n${payload}\\n</${prefix}>`;\n}\n\nasync function callModel(messages: ChatMessage[], model: string, apiKey?: string): Promise<string> {\n const api = 'https://api.morphllm.com/v1/chat/completions';\n const fetchPromise = fetchWithRetry(\n api,\n {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${apiKey || process.env.MORPH_API_KEY || ''}`,\n },\n body: JSON.stringify({\n model,\n temperature: 0.0,\n max_tokens: 1024,\n messages,\n }),\n },\n {}\n );\n const resp = await withTimeout(fetchPromise, AGENT_CONFIG.TIMEOUT_MS, 'morph-warp-grep request timed out');\n if (!resp.ok) {\n const t = await resp.text();\n throw new Error(`morph-warp-grep error ${resp.status}: ${t}`);\n }\n const data = await resp.json();\n const content = data?.choices?.[0]?.message?.content;\n if (!content || typeof content !== 'string') {\n throw new Error('Invalid response from model');\n }\n return content;\n}\n\nexport async function runWarpGrep(config: SessionConfig & { provider: WarpGrepProvider }): Promise<AgentRunResult> {\n const repoRoot = path.resolve(config.repoRoot || process.cwd());\n const messages: ChatMessage[] = [];\n\n // system\n const systemMessage = { role: 'system' as const, content: getSystemPrompt() };\n messages.push(systemMessage);\n // user query\n const queryContent = `<query>${config.query}</query>`;\n messages.push({ role: 'user', content: queryContent });\n // initial state\n const initialState = await buildInitialState(repoRoot, config.query);\n messages.push({ role: 'user', content: initialState });\n\n const maxRounds = AGENT_CONFIG.MAX_ROUNDS;\n const model = config.model || DEFAULT_MODEL;\n const provider = config.provider;\n const errors: Array<{ message: string }> = [];\n const grepState = new GrepState();\n\n let finishMeta: AgentFinish | undefined;\n let terminationReason: AgentRunResult['terminationReason'] = 'terminated';\n\n for (let round = 1; round <= maxRounds; round += 1) {\n // call model\n const assistantContent = await callModel(messages, model, config.apiKey).catch((e: unknown) => {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n return '';\n });\n if (!assistantContent) break;\n messages.push({ role: 'assistant', content: assistantContent });\n\n // parse tool calls\n let toolCalls: ToolCall[] = [];\n try {\n toolCalls = parser.parse(assistantContent);\n } catch (e) {\n errors.push({ message: e instanceof Error ? e.message : String(e) });\n terminationReason = 'terminated';\n break;\n }\n if (toolCalls.length === 0) {\n errors.push({ message: 'No tool calls produced by the model.' });\n terminationReason = 'terminated';\n break;\n }\n\n const finishCalls = toolCalls.filter(c => c.name === 'finish');\n const grepCalls = toolCalls.filter(c => c.name === 'grep');\n const analyseCalls = toolCalls.filter(c => c.name === 'analyse');\n const readCalls = toolCalls.filter(c => c.name === 'read');\n\n const formatted: string[] = [];\n\n // Execute non-grep tools in parallel\n const otherPromises: Array<Promise<string>> = [];\n for (const c of analyseCalls) {\n const args = (c.arguments ?? {}) as { path: string; pattern?: string | null };\n otherPromises.push(\n toolAnalyse(provider, args).then(\n p => formatAgentToolOutput('analyse', args, p, { isError: false }),\n err => formatAgentToolOutput('analyse', args, String(err), { isError: true })\n )\n );\n }\n for (const c of readCalls) {\n const args = (c.arguments ?? {}) as { path: string; start?: number; end?: number };\n otherPromises.push(\n toolRead(provider, args).then(\n p => formatAgentToolOutput('read', args, p, { isError: false }),\n err => formatAgentToolOutput('read', args, String(err), { isError: true })\n )\n );\n }\n const otherResults = await Promise.all(otherPromises);\n formatted.push(...otherResults);\n\n // Execute grep calls sequentially like MCP runner to keep outputs compact\n for (const c of grepCalls) {\n const args = (c.arguments ?? {}) as { pattern: string; path: string };\n try {\n const grepRes = await provider.grep({ pattern: args.pattern, path: args.path });\n const rawOutput = Array.isArray(grepRes.lines) ? grepRes.lines.join('\\n') : '';\n const newMatches = parseAndFilterGrepOutput(rawOutput, grepState);\n let formattedPayload = formatTurnGrepOutput(newMatches);\n if (formattedPayload === \"No new matches found.\") {\n formattedPayload = \"no new matches\";\n }\n formatted.push(formatAgentToolOutput('grep', args, formattedPayload, { isError: false }));\n } catch (err) {\n formatted.push(formatAgentToolOutput('grep', args, String(err), { isError: true }));\n }\n }\n\n if (formatted.length > 0) {\n messages.push({ role: 'user', content: formatted.join('\\n') });\n }\n\n if (finishCalls.length) {\n const fc = finishCalls[0];\n const files = ((fc.arguments as any)?.files ?? []) as AgentFinish['files'];\n finishMeta = { files };\n terminationReason = 'completed';\n break;\n }\n }\n\n if (terminationReason !== 'completed' || !finishMeta) {\n return { terminationReason, messages, errors };\n }\n\n // Build finish payload\n const parts: string[] = ['Relevant context found:'];\n for (const f of finishMeta.files) {\n const ranges = f.lines.map(([s, e]) => `${s}-${e}`).join(', ');\n parts.push(`- ${f.path}: ${ranges}`);\n }\n const payload = parts.join('\\n');\n\n // Resolve file contents for returned ranges\n const resolved = await readFinishFiles(\n repoRoot,\n finishMeta.files,\n async (p: string, s: number, e: number) => {\n const rr = await provider.read({ path: p, start: s, end: e });\n // rr.lines are \"line|content\" → strip the \"line|\" prefix\n return rr.lines.map(l => {\n const idx = l.indexOf('|');\n return idx >= 0 ? l.slice(idx + 1) : l;\n });\n }\n );\n\n return {\n terminationReason: 'completed',\n messages,\n finish: { payload, metadata: finishMeta, resolved },\n };\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,OAAO,UAAU;AACjB,OAAO,QAAQ;AAWf,IAAM,SAAS,IAAI,kBAAkB;AAErC,eAAe,kBAAkB,UAAkB,OAAgC;AAEjF,MAAI;AACF,UAAM,UAAU,MAAM,GAAG,QAAQ,UAAU,EAAE,eAAe,KAAK,CAAC;AAClE,UAAM,OAAO,QAAQ,OAAO,OAAK,EAAE,YAAY,CAAC,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AAC9E,UAAM,QAAQ,QAAQ,OAAO,OAAK,EAAE,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,IAAI,EAAE,MAAM,GAAG,EAAE;AAC1E,UAAM,QAAQ;AAAA,MACZ,cAAc,QAAQ;AAAA,MACtB,aAAa,KAAK,KAAK,IAAI,CAAC;AAAA,MAC5B,cAAc,MAAM,KAAK,IAAI,CAAC;AAAA,IAChC;AACA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB,QAAQ;AACN,WAAO,cAAc,QAAQ;AAAA,EAC/B;AACF;AAUA,eAAe,UAAU,UAAyB,OAAe,QAAkC;AACjG,QAAM,MAAM;AACZ,QAAM,eAAe;AAAA,IACnB;AAAA,IACA;AAAA,MACE,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,eAAe,UAAU,UAAU,QAAQ,IAAI,iBAAiB,EAAE;AAAA,MACpE;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,aAAa;AAAA,QACb,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC;AAAA,EACH;AACA,QAAM,OAAO,MAAM,YAAY,cAAc,aAAa,YAAY,mCAAmC;AACzG,MAAI,CAAC,KAAK,IAAI;AACZ,UAAM,IAAI,MAAM,KAAK,KAAK;AAC1B,UAAM,IAAI,MAAM,yBAAyB,KAAK,MAAM,KAAK,CAAC,EAAE;AAAA,EAC9D;AACA,QAAM,OAAO,MAAM,KAAK,KAAK;AAC7B,QAAM,UAAU,MAAM,UAAU,CAAC,GAAG,SAAS;AAC7C,MAAI,CAAC,WAAW,OAAO,YAAY,UAAU;AAC3C,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AACA,SAAO;AACT;AAEA,eAAsB,YAAY,QAAiF;AACjH,QAAM,WAAW,KAAK,QAAQ,OAAO,YAAY,QAAQ,IAAI,CAAC;AAC9D,QAAM,WAA0B,CAAC;AAGjC,QAAM,gBAAgB,EAAE,MAAM,UAAmB,SAAS,gBAAgB,EAAE;AAC5E,WAAS,KAAK,aAAa;AAE3B,QAAM,eAAe,UAAU,OAAO,KAAK;AAC3C,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,eAAe,MAAM,kBAAkB,UAAU,OAAO,KAAK;AACnE,WAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,aAAa,CAAC;AAErD,QAAM,YAAY,aAAa;AAC/B,QAAM,QAAQ,OAAO,SAAS;AAC9B,QAAM,WAAW,OAAO;AACxB,QAAM,SAAqC,CAAC;AAC5C,QAAM,YAAY,IAAI,UAAU;AAEhC,MAAI;AACJ,MAAI,oBAAyD;AAE7D,WAAS,QAAQ,GAAG,SAAS,WAAW,SAAS,GAAG;AAElD,UAAM,mBAAmB,MAAM,UAAU,UAAU,OAAO,OAAO,MAAM,EAAE,MAAM,CAAC,MAAe;AAC7F,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,aAAO;AAAA,IACT,CAAC;AACD,QAAI,CAAC,iBAAkB;AACvB,aAAS,KAAK,EAAE,MAAM,aAAa,SAAS,iBAAiB,CAAC;AAG9D,QAAI,YAAwB,CAAC;AAC7B,QAAI;AACF,kBAAY,OAAO,MAAM,gBAAgB;AAAA,IAC3C,SAAS,GAAG;AACV,aAAO,KAAK,EAAE,SAAS,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,EAAE,CAAC;AACnE,0BAAoB;AACpB;AAAA,IACF;AACA,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,KAAK,EAAE,SAAS,uCAAuC,CAAC;AAC/D,0BAAoB;AACpB;AAAA,IACF;AAEA,UAAM,cAAc,UAAU,OAAO,OAAK,EAAE,SAAS,QAAQ;AAC7D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,eAAe,UAAU,OAAO,OAAK,EAAE,SAAS,SAAS;AAC/D,UAAM,YAAY,UAAU,OAAO,OAAK,EAAE,SAAS,MAAM;AAEzD,UAAM,YAAsB,CAAC;AAG7B,UAAM,gBAAwC,CAAC;AAC/C,eAAW,KAAK,cAAc;AAC5B,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,oBAAc;AAAA,QACZ,YAAY,UAAU,IAAI,EAAE;AAAA,UAC1B,OAAK,sBAAsB,WAAW,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UACjE,SAAO,sBAAsB,WAAW,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC9E;AAAA,MACF;AAAA,IACF;AACA,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,oBAAc;AAAA,QACZ,SAAS,UAAU,IAAI,EAAE;AAAA,UACvB,OAAK,sBAAsB,QAAQ,MAAM,GAAG,EAAE,SAAS,MAAM,CAAC;AAAA,UAC9D,SAAO,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AACA,UAAM,eAAe,MAAM,QAAQ,IAAI,aAAa;AACpD,cAAU,KAAK,GAAG,YAAY;AAG9B,eAAW,KAAK,WAAW;AACzB,YAAM,OAAQ,EAAE,aAAa,CAAC;AAC9B,UAAI;AACF,cAAM,UAAU,MAAM,SAAS,KAAK,EAAE,SAAS,KAAK,SAAS,MAAM,KAAK,KAAK,CAAC;AAC9E,cAAM,YAAY,MAAM,QAAQ,QAAQ,KAAK,IAAI,QAAQ,MAAM,KAAK,IAAI,IAAI;AAC5E,cAAM,aAAa,yBAAyB,WAAW,SAAS;AAChE,YAAI,mBAAmB,qBAAqB,UAAU;AACtD,YAAI,qBAAqB,yBAAyB;AAChD,6BAAmB;AAAA,QACrB;AACA,kBAAU,KAAK,sBAAsB,QAAQ,MAAM,kBAAkB,EAAE,SAAS,MAAM,CAAC,CAAC;AAAA,MAC1F,SAAS,KAAK;AACZ,kBAAU,KAAK,sBAAsB,QAAQ,MAAM,OAAO,GAAG,GAAG,EAAE,SAAS,KAAK,CAAC,CAAC;AAAA,MACpF;AAAA,IACF;AAEA,QAAI,UAAU,SAAS,GAAG;AACxB,eAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,UAAU,KAAK,IAAI,EAAE,CAAC;AAAA,IAC/D;AAEA,QAAI,YAAY,QAAQ;AACtB,YAAM,KAAK,YAAY,CAAC;AACxB,YAAM,QAAU,GAAG,WAAmB,SAAS,CAAC;AAChD,mBAAa,EAAE,MAAM;AACrB,0BAAoB;AACpB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,sBAAsB,eAAe,CAAC,YAAY;AACpD,WAAO,EAAE,mBAAmB,UAAU,OAAO;AAAA,EAC/C;AAGA,QAAM,QAAkB,CAAC,yBAAyB;AAClD,aAAW,KAAK,WAAW,OAAO;AAChC,UAAM,SAAS,EAAE,MAAM,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,KAAK,IAAI;AAC7D,UAAM,KAAK,KAAK,EAAE,IAAI,KAAK,MAAM,EAAE;AAAA,EACrC;AACA,QAAM,UAAU,MAAM,KAAK,IAAI;AAG/B,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA,WAAW;AAAA,IACX,OAAO,GAAW,GAAW,MAAc;AACzC,YAAM,KAAK,MAAM,SAAS,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,KAAK,EAAE,CAAC;AAE5D,aAAO,GAAG,MAAM,IAAI,OAAK;AACvB,cAAM,MAAM,EAAE,QAAQ,GAAG;AACzB,eAAO,OAAO,IAAI,EAAE,MAAM,MAAM,CAAC,IAAI;AAAA,MACvC,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBAAmB;AAAA,IACnB;AAAA,IACA,QAAQ,EAAE,SAAS,UAAU,YAAY,SAAS;AAAA,EACpD;AACF;","names":[]}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
// tools/warp_grep/tools/finish.ts
|
|
2
|
+
function normalizeFinishFiles(files) {
|
|
3
|
+
return files.map((f) => {
|
|
4
|
+
const merged = mergeRanges(f.lines);
|
|
5
|
+
return { path: f.path, lines: merged };
|
|
6
|
+
});
|
|
7
|
+
}
|
|
8
|
+
async function readFinishFiles(repoRoot, files, reader) {
|
|
9
|
+
const out = [];
|
|
10
|
+
for (const f of files) {
|
|
11
|
+
const ranges = mergeRanges(f.lines);
|
|
12
|
+
const chunks = [];
|
|
13
|
+
for (const [s, e] of ranges) {
|
|
14
|
+
const lines = await reader(f.path, s, e);
|
|
15
|
+
chunks.push(lines.join("\n"));
|
|
16
|
+
}
|
|
17
|
+
out.push({ path: f.path, ranges, content: chunks.join("\n") });
|
|
18
|
+
}
|
|
19
|
+
return out;
|
|
20
|
+
}
|
|
21
|
+
function mergeRanges(ranges) {
|
|
22
|
+
if (!ranges.length) return [];
|
|
23
|
+
const sorted = [...ranges].sort((a, b) => a[0] - b[0]);
|
|
24
|
+
const merged = [];
|
|
25
|
+
let [cs, ce] = sorted[0];
|
|
26
|
+
for (let i = 1; i < sorted.length; i++) {
|
|
27
|
+
const [s, e] = sorted[i];
|
|
28
|
+
if (s <= ce + 1) {
|
|
29
|
+
ce = Math.max(ce, e);
|
|
30
|
+
} else {
|
|
31
|
+
merged.push([cs, ce]);
|
|
32
|
+
cs = s;
|
|
33
|
+
ce = e;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
merged.push([cs, ce]);
|
|
37
|
+
return merged;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export {
|
|
41
|
+
normalizeFinishFiles,
|
|
42
|
+
readFinishFiles
|
|
43
|
+
};
|
|
44
|
+
//# sourceMappingURL=chunk-EK7OQPWD.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/tools/finish.ts"],"sourcesContent":["import type { FinishFileSpec } from '../agent/types.js';\nimport fs from 'fs/promises';\n\nexport function normalizeFinishFiles(files: FinishFileSpec[]): FinishFileSpec[] {\n return files.map((f) => {\n const merged = mergeRanges(f.lines);\n return { path: f.path, lines: merged };\n });\n}\n\nexport async function readFinishFiles(\n repoRoot: string,\n files: FinishFileSpec[],\n reader: (path: string, start: number, end: number) => Promise<string[]>\n): Promise<{ path: string; ranges: Array<[number, number]>; content: string }[]> {\n const out: { path: string; ranges: Array<[number, number]>; content: string }[] = [];\n for (const f of files) {\n const ranges = mergeRanges(f.lines);\n const chunks: string[] = [];\n for (const [s, e] of ranges) {\n const lines = await reader(f.path, s, e);\n chunks.push(lines.join('\\n'));\n }\n out.push({ path: f.path, ranges, content: chunks.join('\\n') });\n }\n return out;\n}\n\nfunction mergeRanges(ranges: Array<[number, number]>): Array<[number, number]> {\n if (!ranges.length) return [];\n const sorted = [...ranges].sort((a, b) => a[0] - b[0]);\n const merged: Array<[number, number]> = [];\n let [cs, ce] = sorted[0];\n for (let i = 1; i < sorted.length; i++) {\n const [s, e] = sorted[i];\n if (s <= ce + 1) {\n ce = Math.max(ce, e);\n } else {\n merged.push([cs, ce]);\n cs = s;\n ce = e;\n }\n }\n merged.push([cs, ce]);\n return merged;\n}\n\n\n"],"mappings":";AAGO,SAAS,qBAAqB,OAA2C;AAC9E,SAAO,MAAM,IAAI,CAAC,MAAM;AACtB,UAAM,SAAS,YAAY,EAAE,KAAK;AAClC,WAAO,EAAE,MAAM,EAAE,MAAM,OAAO,OAAO;AAAA,EACvC,CAAC;AACH;AAEA,eAAsB,gBACpB,UACA,OACA,QAC+E;AAC/E,QAAM,MAA4E,CAAC;AACnF,aAAW,KAAK,OAAO;AACrB,UAAM,SAAS,YAAY,EAAE,KAAK;AAClC,UAAM,SAAmB,CAAC;AAC1B,eAAW,CAAC,GAAG,CAAC,KAAK,QAAQ;AAC3B,YAAM,QAAQ,MAAM,OAAO,EAAE,MAAM,GAAG,CAAC;AACvC,aAAO,KAAK,MAAM,KAAK,IAAI,CAAC;AAAA,IAC9B;AACA,QAAI,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,SAAS,OAAO,KAAK,IAAI,EAAE,CAAC;AAAA,EAC/D;AACA,SAAO;AACT;AAEA,SAAS,YAAY,QAA0D;AAC7E,MAAI,CAAC,OAAO,OAAQ,QAAO,CAAC;AAC5B,QAAM,SAAS,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AACrD,QAAM,SAAkC,CAAC;AACzC,MAAI,CAAC,IAAI,EAAE,IAAI,OAAO,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,GAAG,CAAC,IAAI,OAAO,CAAC;AACvB,QAAI,KAAK,KAAK,GAAG;AACf,WAAK,KAAK,IAAI,IAAI,CAAC;AAAA,IACrB,OAAO;AACL,aAAO,KAAK,CAAC,IAAI,EAAE,CAAC;AACpB,WAAK;AACL,WAAK;AAAA,IACP;AAAA,EACF;AACA,SAAO,KAAK,CAAC,IAAI,EAAE,CAAC;AACpB,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import {
|
|
2
|
+
runWarpGrep
|
|
3
|
+
} from "./chunk-EAA7D24N.js";
|
|
4
|
+
import {
|
|
5
|
+
LocalRipgrepProvider
|
|
6
|
+
} from "./chunk-UYBIKZPM.js";
|
|
7
|
+
|
|
8
|
+
// tools/warp_grep/vercel.ts
|
|
9
|
+
import { tool } from "ai";
|
|
10
|
+
import { z } from "zod";
|
|
11
|
+
function createMorphWarpGrepTool(config) {
|
|
12
|
+
const schema = z.object({
|
|
13
|
+
query: z.string().describe("Free-form repository question")
|
|
14
|
+
});
|
|
15
|
+
return tool({
|
|
16
|
+
description: "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.",
|
|
17
|
+
inputSchema: schema,
|
|
18
|
+
execute: async (params) => {
|
|
19
|
+
const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
|
|
20
|
+
const result = await runWarpGrep({
|
|
21
|
+
query: params.query,
|
|
22
|
+
repoRoot: config.repoRoot,
|
|
23
|
+
provider,
|
|
24
|
+
excludes: config.excludes,
|
|
25
|
+
includes: config.includes,
|
|
26
|
+
debug: config.debug ?? false,
|
|
27
|
+
apiKey: config.apiKey
|
|
28
|
+
});
|
|
29
|
+
if (result.terminationReason !== "completed" || !result.finish?.metadata) {
|
|
30
|
+
return { success: false, error: "Search did not complete", messages: result.messages };
|
|
31
|
+
}
|
|
32
|
+
const contexts = (result.finish.resolved || []).map((r) => ({
|
|
33
|
+
file: r.path,
|
|
34
|
+
content: r.content
|
|
35
|
+
}));
|
|
36
|
+
return { success: true, contexts, summary: result.finish.payload };
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export {
|
|
42
|
+
createMorphWarpGrepTool
|
|
43
|
+
};
|
|
44
|
+
//# sourceMappingURL=chunk-FSVBNZMU.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/vercel.ts"],"sourcesContent":["import { tool } from 'ai';\nimport { z } from 'zod';\nimport { runWarpGrep } from './agent/runner.js';\nimport type { WarpGrepProvider } from './providers/types.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { AGENT_CONFIG } from './agent/config.js';\n\nexport type WarpGrepConfig = {\n repoRoot: string;\n provider?: WarpGrepProvider;\n excludes?: string[];\n includes?: string[];\n debug?: boolean;\n apiKey?: string;\n};\n\nexport function createMorphWarpGrepTool(config: WarpGrepConfig) {\n const schema = z.object({\n query: z.string().describe('Free-form repository question'),\n });\n\n return tool({\n description: 'A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.',\n inputSchema: schema,\n execute: async (params) => {\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n const result = await runWarpGrep({\n query: params.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n if (result.terminationReason !== 'completed' || !result.finish?.metadata) {\n return { success: false, error: 'Search did not complete', messages: result.messages };\n }\n const contexts = (result.finish.resolved || []).map((r: any) => ({\n file: r.path,\n content: r.content,\n }));\n return { success: true, contexts, summary: result.finish.payload };\n },\n });\n}\n\n\n"],"mappings":";;;;;;;;AAAA,SAAS,YAAY;AACrB,SAAS,SAAS;AAeX,SAAS,wBAAwB,QAAwB;AAC9D,QAAM,SAAS,EAAE,OAAO;AAAA,IACtB,OAAO,EAAE,OAAO,EAAE,SAAS,+BAA+B;AAAA,EAC5D,CAAC;AAED,SAAO,KAAK;AAAA,IACV,aAAa;AAAA,IACb,aAAa;AAAA,IACb,SAAS,OAAO,WAAW;AACzB,YAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAC7F,YAAM,SAAS,MAAM,YAAY;AAAA,QAC/B,OAAO,OAAO;AAAA,QACd,UAAU,OAAO;AAAA,QACjB;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO,SAAS;AAAA,QACvB,QAAQ,OAAO;AAAA,MACjB,CAAC;AACD,UAAI,OAAO,sBAAsB,eAAe,CAAC,OAAO,QAAQ,UAAU;AACxE,eAAO,EAAE,SAAS,OAAO,OAAO,2BAA2B,UAAU,OAAO,SAAS;AAAA,MACvF;AACA,YAAM,YAAY,OAAO,OAAO,YAAY,CAAC,GAAG,IAAI,CAAC,OAAY;AAAA,QAC/D,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AACF,aAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,OAAO,QAAQ;AAAA,IACnE;AAAA,EACF,CAAC;AACH;","names":[]}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
// tools/warp_grep/utils/files.ts
|
|
2
|
+
import fs from "fs/promises";
|
|
3
|
+
async function readAllLines(filePath) {
|
|
4
|
+
const content = await fs.readFile(filePath, "utf8");
|
|
5
|
+
return content.split(/\r?\n/);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export {
|
|
9
|
+
readAllLines
|
|
10
|
+
};
|
|
11
|
+
//# sourceMappingURL=chunk-G2RSY56Q.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/utils/files.ts"],"sourcesContent":["import fs from 'fs/promises';\n\nexport async function readAllLines(filePath: string): Promise<string[]> {\n const content = await fs.readFile(filePath, 'utf8');\n // Preserve newlines; split keeping consistency\n return content.split(/\\r?\\n/);\n}\n\n\n"],"mappings":";AAAA,OAAO,QAAQ;AAEf,eAAsB,aAAa,UAAqC;AACtE,QAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAElD,SAAO,QAAQ,MAAM,OAAO;AAC9B;","names":[]}
|