@morphllm/morphsdk 0.2.66 → 0.2.68
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic-B6my2oBx.d.ts +56 -0
- package/dist/{chunk-ZO4PPFCZ.js → chunk-223ZMZP6.js} +30 -5
- package/dist/chunk-223ZMZP6.js.map +1 -0
- package/dist/{chunk-GGYMQZXI.js → chunk-6RFT7K7F.js} +23 -3
- package/dist/chunk-6RFT7K7F.js.map +1 -0
- package/dist/chunk-AAIUUX4Y.js +80 -0
- package/dist/chunk-AAIUUX4Y.js.map +1 -0
- package/dist/chunk-BYNVJ4ON.js +57 -0
- package/dist/chunk-BYNVJ4ON.js.map +1 -0
- package/dist/{chunk-ASO5YWQ4.js → chunk-FURFQDXF.js} +7 -4
- package/dist/{chunk-ASO5YWQ4.js.map → chunk-FURFQDXF.js.map} +1 -1
- package/dist/{chunk-QZNGKOCZ.js → chunk-HBWJLKNM.js} +2 -2
- package/dist/chunk-HKJ2B2AA.js +15 -0
- package/dist/{chunk-XLSODV6H.js → chunk-JPGX6WEV.js} +3 -3
- package/dist/{chunk-AQD2JCLI.js → chunk-K3EHH3C4.js} +3 -3
- package/dist/{chunk-IZISG7DL.js → chunk-MFZP347Z.js} +10 -7
- package/dist/chunk-MFZP347Z.js.map +1 -0
- package/dist/{chunk-7V2KIZT5.js → chunk-QEGH3CWQ.js} +3 -3
- package/dist/{chunk-5QIWYEHJ.js → chunk-RTDKF6NS.js} +2 -2
- package/dist/chunk-S6KU22MU.js +396 -0
- package/dist/chunk-S6KU22MU.js.map +1 -0
- package/dist/{chunk-SU5CKH4F.js → chunk-SXE54MIC.js} +9 -9
- package/dist/chunk-VDJGZIQ5.js +96 -0
- package/dist/chunk-VDJGZIQ5.js.map +1 -0
- package/dist/{chunk-7JEL2VZO.js → chunk-VLZEBK7S.js} +3 -3
- package/dist/{chunk-IUG2FHNN.js → chunk-W76ICQKY.js} +2 -2
- package/dist/client.cjs +443 -11
- package/dist/client.cjs.map +1 -1
- package/dist/client.js +13 -12
- package/dist/{gemini-DPFWWG3k.d.ts → gemini-BIFBiIjY.d.ts} +5 -3
- package/dist/git/client.js +1 -1
- package/dist/git/config.js +1 -1
- package/dist/git/index.js +1 -1
- package/dist/index.cjs +443 -11
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +14 -13
- package/dist/modelrouter/core.js +1 -1
- package/dist/modelrouter/index.js +1 -1
- package/dist/openai-BQMeDFef.d.ts +81 -0
- package/dist/tools/browser/anthropic.cjs.map +1 -1
- package/dist/tools/browser/anthropic.d.ts +3 -50
- package/dist/tools/browser/anthropic.js +6 -65
- package/dist/tools/browser/anthropic.js.map +1 -1
- package/dist/tools/browser/core.cjs +9 -6
- package/dist/tools/browser/core.cjs.map +1 -1
- package/dist/tools/browser/core.d.ts +6 -6
- package/dist/tools/browser/core.js +2 -2
- package/dist/tools/browser/index.cjs +278 -6
- package/dist/tools/browser/index.cjs.map +1 -1
- package/dist/tools/browser/index.d.ts +125 -1
- package/dist/tools/browser/index.js +98 -2
- package/dist/tools/browser/index.js.map +1 -1
- package/dist/tools/browser/live.js +1 -1
- package/dist/tools/browser/openai.cjs.map +1 -1
- package/dist/tools/browser/openai.d.ts +3 -72
- package/dist/tools/browser/openai.js +9 -75
- package/dist/tools/browser/openai.js.map +1 -1
- package/dist/tools/browser/prompts.js +1 -1
- package/dist/tools/browser/vercel.cjs.map +1 -1
- package/dist/tools/browser/vercel.d.ts +3 -68
- package/dist/tools/browser/vercel.js +6 -42
- package/dist/tools/browser/vercel.js.map +1 -1
- package/dist/tools/codebase_search/anthropic.js +1 -1
- package/dist/tools/codebase_search/core.js +1 -1
- package/dist/tools/codebase_search/index.js +1 -1
- package/dist/tools/codebase_search/openai.js +1 -1
- package/dist/tools/codebase_search/prompts.js +1 -1
- package/dist/tools/codebase_search/vercel.js +1 -1
- package/dist/tools/fastapply/anthropic.js +2 -2
- package/dist/tools/fastapply/core.js +1 -1
- package/dist/tools/fastapply/index.js +4 -4
- package/dist/tools/fastapply/openai.js +2 -2
- package/dist/tools/fastapply/prompts.js +1 -1
- package/dist/tools/fastapply/vercel.js +2 -2
- package/dist/tools/index.js +4 -4
- package/dist/tools/utils/resilience.js +1 -1
- package/dist/tools/warp_grep/agent/config.js +1 -1
- package/dist/tools/warp_grep/agent/formatter.js +1 -1
- package/dist/tools/warp_grep/agent/parser.js +1 -1
- package/dist/tools/warp_grep/agent/prompt.js +1 -1
- package/dist/tools/warp_grep/agent/runner.js +1 -1
- package/dist/tools/warp_grep/anthropic.cjs +434 -5
- package/dist/tools/warp_grep/anthropic.cjs.map +1 -1
- package/dist/tools/warp_grep/anthropic.js +6 -5
- package/dist/tools/warp_grep/client.cjs +434 -5
- package/dist/tools/warp_grep/client.cjs.map +1 -1
- package/dist/tools/warp_grep/client.js +5 -4
- package/dist/tools/warp_grep/gemini.cjs +438 -6
- package/dist/tools/warp_grep/gemini.cjs.map +1 -1
- package/dist/tools/warp_grep/gemini.d.ts +1 -1
- package/dist/tools/warp_grep/gemini.js +8 -5
- package/dist/tools/warp_grep/harness.cjs +417 -4
- package/dist/tools/warp_grep/harness.cjs.map +1 -1
- package/dist/tools/warp_grep/harness.js +3 -2
- package/dist/tools/warp_grep/harness.js.map +1 -1
- package/dist/tools/warp_grep/index.cjs +437 -6
- package/dist/tools/warp_grep/index.cjs.map +1 -1
- package/dist/tools/warp_grep/index.d.ts +1 -1
- package/dist/tools/warp_grep/index.js +9 -8
- package/dist/tools/warp_grep/openai.cjs +434 -5
- package/dist/tools/warp_grep/openai.cjs.map +1 -1
- package/dist/tools/warp_grep/openai.js +6 -5
- package/dist/tools/warp_grep/providers/local.cjs +417 -4
- package/dist/tools/warp_grep/providers/local.cjs.map +1 -1
- package/dist/tools/warp_grep/providers/local.d.ts +6 -0
- package/dist/tools/warp_grep/providers/local.js +3 -2
- package/dist/tools/warp_grep/providers/remote.cjs +408 -1
- package/dist/tools/warp_grep/providers/remote.cjs.map +1 -1
- package/dist/tools/warp_grep/providers/remote.d.ts +7 -0
- package/dist/tools/warp_grep/providers/remote.js +3 -2
- package/dist/tools/warp_grep/utils/files.js +1 -1
- package/dist/tools/warp_grep/utils/paths.js +1 -1
- package/dist/tools/warp_grep/utils/ripgrep.js +1 -1
- package/dist/tools/warp_grep/vercel.cjs +434 -5
- package/dist/tools/warp_grep/vercel.cjs.map +1 -1
- package/dist/tools/warp_grep/vercel.js +6 -5
- package/dist/vercel-CsnNSdze.d.ts +74 -0
- package/package.json +1 -1
- package/dist/chunk-GGYMQZXI.js.map +0 -1
- package/dist/chunk-IZISG7DL.js.map +0 -1
- package/dist/chunk-PZ5AY32C.js +0 -10
- package/dist/chunk-ZO4PPFCZ.js.map +0 -1
- /package/dist/{chunk-QZNGKOCZ.js.map → chunk-HBWJLKNM.js.map} +0 -0
- /package/dist/{chunk-PZ5AY32C.js.map → chunk-HKJ2B2AA.js.map} +0 -0
- /package/dist/{chunk-XLSODV6H.js.map → chunk-JPGX6WEV.js.map} +0 -0
- /package/dist/{chunk-AQD2JCLI.js.map → chunk-K3EHH3C4.js.map} +0 -0
- /package/dist/{chunk-7V2KIZT5.js.map → chunk-QEGH3CWQ.js.map} +0 -0
- /package/dist/{chunk-5QIWYEHJ.js.map → chunk-RTDKF6NS.js.map} +0 -0
- /package/dist/{chunk-SU5CKH4F.js.map → chunk-SXE54MIC.js.map} +0 -0
- /package/dist/{chunk-7JEL2VZO.js.map → chunk-VLZEBK7S.js.map} +0 -0
- /package/dist/{chunk-IUG2FHNN.js.map → chunk-W76ICQKY.js.map} +0 -0
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { Tool } from '@anthropic-ai/sdk/resources/messages.mjs';
|
|
2
|
+
import { BrowserConfig, BrowserTaskInput, BrowserTaskResult } from './tools/browser/types.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Anthropic SDK adapter for browser automation tool
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Anthropic tool definition for browser automation
|
|
10
|
+
*/
|
|
11
|
+
declare const browserTool: Tool;
|
|
12
|
+
/**
|
|
13
|
+
* Create a configured browser tool with execute and formatResult methods
|
|
14
|
+
*
|
|
15
|
+
* @param config - Browser worker configuration
|
|
16
|
+
* @returns Tool definition with execute and formatResult methods
|
|
17
|
+
*
|
|
18
|
+
* @example
|
|
19
|
+
* ```typescript
|
|
20
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
21
|
+
* import { createBrowserTool } from 'morphsdk/tools/browser/anthropic';
|
|
22
|
+
*
|
|
23
|
+
* const tool = createBrowserTool({
|
|
24
|
+
* apiKey: process.env.MORPH_API_KEY,
|
|
25
|
+
* timeout: 180000
|
|
26
|
+
* });
|
|
27
|
+
*
|
|
28
|
+
* const client = new Anthropic();
|
|
29
|
+
*
|
|
30
|
+
* const response = await client.messages.create({
|
|
31
|
+
* model: 'claude-sonnet-4-5-20250929',
|
|
32
|
+
* tools: [tool], // tool itself is the Tool definition
|
|
33
|
+
* messages: [{
|
|
34
|
+
* role: 'user',
|
|
35
|
+
* content: 'Test the checkout flow at https://3000-abc.e2b.dev'
|
|
36
|
+
* }]
|
|
37
|
+
* });
|
|
38
|
+
*
|
|
39
|
+
* // Execute and format
|
|
40
|
+
* const result = await tool.execute(toolUseBlock.input);
|
|
41
|
+
* const formatted = tool.formatResult(result);
|
|
42
|
+
* ```
|
|
43
|
+
*/
|
|
44
|
+
declare function createBrowserTool(config?: BrowserConfig): Tool & {
|
|
45
|
+
execute: (input: BrowserTaskInput) => Promise<BrowserTaskResult>;
|
|
46
|
+
formatResult: (result: BrowserTaskResult) => string;
|
|
47
|
+
getSystemPrompt: () => string;
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
declare const anthropic_browserTool: typeof browserTool;
|
|
51
|
+
declare const anthropic_createBrowserTool: typeof createBrowserTool;
|
|
52
|
+
declare namespace anthropic {
|
|
53
|
+
export { anthropic_browserTool as browserTool, anthropic_createBrowserTool as createBrowserTool };
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export { anthropic as a, browserTool as b, createBrowserTool as c };
|
|
@@ -10,6 +10,9 @@ import {
|
|
|
10
10
|
import {
|
|
11
11
|
runRipgrep
|
|
12
12
|
} from "./chunk-TPP2UGQP.js";
|
|
13
|
+
import {
|
|
14
|
+
require_ignore
|
|
15
|
+
} from "./chunk-S6KU22MU.js";
|
|
13
16
|
import {
|
|
14
17
|
AGENT_CONFIG,
|
|
15
18
|
DEFAULT_EXCLUDES
|
|
@@ -18,11 +21,30 @@ import {
|
|
|
18
21
|
// tools/warp_grep/providers/local.ts
|
|
19
22
|
import fs from "fs/promises";
|
|
20
23
|
import path from "path";
|
|
24
|
+
var ignore = require_ignore().default || require_ignore();
|
|
21
25
|
var LocalRipgrepProvider = class {
|
|
22
26
|
constructor(repoRoot, excludes = DEFAULT_EXCLUDES) {
|
|
23
27
|
this.repoRoot = repoRoot;
|
|
24
28
|
this.excludes = excludes;
|
|
25
29
|
}
|
|
30
|
+
ignoreFilter = null;
|
|
31
|
+
/**
|
|
32
|
+
* Build an ignore filter that combines DEFAULT_EXCLUDES and .gitignore patterns.
|
|
33
|
+
* Cached after first call.
|
|
34
|
+
*/
|
|
35
|
+
async getIgnoreFilter() {
|
|
36
|
+
if (this.ignoreFilter) return this.ignoreFilter;
|
|
37
|
+
const ig = ignore();
|
|
38
|
+
ig.add(this.excludes);
|
|
39
|
+
try {
|
|
40
|
+
const gitignorePath = path.join(this.repoRoot, ".gitignore");
|
|
41
|
+
const gitignoreContent = await fs.readFile(gitignorePath, "utf-8");
|
|
42
|
+
ig.add(gitignoreContent);
|
|
43
|
+
} catch {
|
|
44
|
+
}
|
|
45
|
+
this.ignoreFilter = ig;
|
|
46
|
+
return ig;
|
|
47
|
+
}
|
|
26
48
|
async grep(params) {
|
|
27
49
|
let abs;
|
|
28
50
|
try {
|
|
@@ -150,9 +172,11 @@ Details: ${res.stderr}` : ""}`
|
|
|
150
172
|
const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;
|
|
151
173
|
const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;
|
|
152
174
|
const regex = params.pattern ? new RegExp(params.pattern) : null;
|
|
175
|
+
const ig = await this.getIgnoreFilter();
|
|
153
176
|
const results = [];
|
|
154
177
|
let timedOut = false;
|
|
155
178
|
const startTime = Date.now();
|
|
179
|
+
const repoRoot = this.repoRoot;
|
|
156
180
|
async function walk(dir, depth) {
|
|
157
181
|
if (Date.now() - startTime > AGENT_CONFIG.LIST_TIMEOUT_MS) {
|
|
158
182
|
timedOut = true;
|
|
@@ -163,17 +187,18 @@ Details: ${res.stderr}` : ""}`
|
|
|
163
187
|
for (const entry of entries) {
|
|
164
188
|
if (timedOut || results.length >= maxResults) break;
|
|
165
189
|
const full = path.join(dir, entry.name);
|
|
166
|
-
const
|
|
167
|
-
|
|
190
|
+
const relToRepo = toRepoRelative(repoRoot, full).replace(/^[.][/\\]?/, "");
|
|
191
|
+
const isDir = entry.isDirectory();
|
|
192
|
+
if (ig.ignores(relToRepo) || isDir && ig.ignores(relToRepo + "/")) continue;
|
|
168
193
|
if (regex && !regex.test(entry.name)) continue;
|
|
169
194
|
results.push({
|
|
170
195
|
name: entry.name,
|
|
171
196
|
path: toRepoRelative(path.resolve(""), full),
|
|
172
197
|
// relative display
|
|
173
|
-
type:
|
|
198
|
+
type: isDir ? "dir" : "file",
|
|
174
199
|
depth
|
|
175
200
|
});
|
|
176
|
-
if (
|
|
201
|
+
if (isDir) {
|
|
177
202
|
await walk(full, depth + 1);
|
|
178
203
|
}
|
|
179
204
|
}
|
|
@@ -186,4 +211,4 @@ Details: ${res.stderr}` : ""}`
|
|
|
186
211
|
export {
|
|
187
212
|
LocalRipgrepProvider
|
|
188
213
|
};
|
|
189
|
-
//# sourceMappingURL=chunk-
|
|
214
|
+
//# sourceMappingURL=chunk-223ZMZP6.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/providers/local.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport fssync from 'fs';\nimport path from 'path';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nconst ignore = require('ignore').default || require('ignore');\ntype Ignore = ReturnType<typeof ignore>;\nimport { runRipgrep } from '../utils/ripgrep.js';\nimport { ensureWithinRepo, resolveUnderRepo, toRepoRelative, isSymlink, isTextualFile } from '../utils/paths.js';\nimport type { WarpGrepProvider, GrepResult, ReadResult, ListDirectoryEntry } from './types.js';\nimport { readAllLines } from '../utils/files.js';\nimport { DEFAULT_EXCLUDES, AGENT_CONFIG } from '../agent/config.js';\n\nexport class LocalRipgrepProvider implements WarpGrepProvider {\n private ignoreFilter: Ignore | null = null;\n\n constructor(private readonly repoRoot: string, private readonly excludes: string[] = DEFAULT_EXCLUDES) {}\n\n /**\n * Build an ignore filter that combines DEFAULT_EXCLUDES and .gitignore patterns.\n * Cached after first call.\n */\n private async getIgnoreFilter(): Promise<Ignore> {\n if (this.ignoreFilter) return this.ignoreFilter;\n\n const ig = ignore();\n \n // Add default excludes (node_modules, .git, build outputs, etc.)\n ig.add(this.excludes);\n\n // Try to read .gitignore from repo root\n try {\n const gitignorePath = path.join(this.repoRoot, '.gitignore');\n const gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');\n ig.add(gitignoreContent);\n } catch {\n // No .gitignore or unreadable - that's fine\n }\n\n this.ignoreFilter = ig;\n return ig;\n }\n\n async grep(params: { pattern: string; path: string; glob?: string }): Promise<GrepResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat) return { lines: [] };\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--no-heading',\n '--with-filename',\n '--line-number',\n '--color=never',\n '--trim',\n '--max-columns=400',\n '-C', '1',\n ...(params.glob ? ['--glob', params.glob] : []),\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n params.pattern,\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n \n // Gracefully handle ripgrep not being available\n if (res.exitCode === -1) {\n return {\n lines: [],\n error: `[RIPGREP NOT AVAILABLE] ripgrep (rg) is required but failed to execute. Please install it:\\n` +\n ` • macOS: brew install ripgrep\\n` +\n ` • Ubuntu/Debian: apt install ripgrep\\n` +\n ` • Windows: choco install ripgrep\\n` +\n ` • Or visit: https://github.com/BurntSushi/ripgrep#installation\\n` +\n `Exit code: ${res.exitCode}${res.stderr ? `\\nDetails: ${res.stderr}` : ''}`,\n };\n }\n \n // Handle other ripgrep errors gracefully\n if (res.exitCode !== 0 && res.exitCode !== 1) {\n return {\n lines: [],\n error: `[RIPGREP ERROR] grep failed with exit code ${res.exitCode}${res.stderr ? `: ${res.stderr}` : ''}`,\n };\n }\n \n const lines = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return {\n lines: [],\n error: 'query not specific enough, tool tried to return too much context and failed',\n };\n }\n \n return { lines };\n }\n\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch (err) {\n return {\n lines: [],\n error: `[PATH ERROR] ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const stat = await fs.stat(abs).catch(() => null);\n \n // Gracefully handle file not found / not a file\n if (!stat || !stat.isFile()) {\n return {\n lines: [],\n error: `[FILE NOT FOUND] You tried to read \"${params.path}\" but there is no file at this path. ` +\n `Double-check the path exists and is spelled correctly.`,\n };\n }\n \n // Gracefully handle symlinks\n if (isSymlink(abs)) {\n return {\n lines: [],\n error: `[SYMLINK] You tried to read \"${params.path}\" but this is a symlink. ` +\n `Try reading the actual file it points to instead.`,\n };\n }\n \n // Gracefully handle non-text or too-large files\n if (!isTextualFile(abs)) {\n return {\n lines: [],\n error: `[UNREADABLE FILE] You tried to read \"${params.path}\" but this file is either too large ` +\n `or not a text file, so it cannot be read. Try a different file.`,\n };\n }\n \n let lines: string[];\n try {\n lines = await readAllLines(abs);\n } catch (err) {\n return {\n lines: [],\n error: `[READ ERROR] Failed to read \"${params.path}\": ${err instanceof Error ? err.message : String(err)}`,\n };\n }\n const total = lines.length;\n let s = params.start ?? 1;\n let e = Math.min(params.end ?? total, total);\n if (s > total && total > 0) {\n // Model hallucinated range - fallback to full file\n s = 1;\n e = total;\n }\n const out: string[] = [];\n for (let i = s; i <= e; i += 1) {\n const content = lines[i - 1] ?? '';\n out.push(`${i}|${content}`);\n }\n if (out.length > AGENT_CONFIG.MAX_READ_LINES) {\n const truncated = out.slice(0, AGENT_CONFIG.MAX_READ_LINES);\n truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${out.length} lines]`);\n return { lines: truncated };\n }\n \n return { lines: out };\n }\n\n async listDirectory(params: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }): Promise<ListDirectoryEntry[]> {\n let abs: string;\n try {\n abs = resolveUnderRepo(this.repoRoot, params.path);\n } catch {\n // Path outside repo - return empty (graceful failure)\n return [];\n }\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat || !stat.isDirectory()) {\n return [];\n }\n const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;\n const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;\n const regex = params.pattern ? new RegExp(params.pattern) : null;\n\n // Get the ignore filter (combines DEFAULT_EXCLUDES + .gitignore)\n const ig = await this.getIgnoreFilter();\n\n const results: ListDirectoryEntry[] = [];\n let timedOut = false;\n const startTime = Date.now();\n const repoRoot = this.repoRoot;\n \n async function walk(dir: string, depth: number) {\n if (Date.now() - startTime > AGENT_CONFIG.LIST_TIMEOUT_MS) {\n timedOut = true;\n return;\n }\n if (depth > maxDepth || results.length >= maxResults) return;\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (timedOut || results.length >= maxResults) break;\n const full = path.join(dir, entry.name);\n \n // Get path relative to repo root for ignore matching\n const relToRepo = toRepoRelative(repoRoot, full).replace(/^[.][/\\\\]?/, '');\n \n // Use ignore filter - handles globs, .gitignore patterns, etc.\n // For directories, also check with trailing slash for proper gitignore semantics\n const isDir = entry.isDirectory();\n if (ig.ignores(relToRepo) || (isDir && ig.ignores(relToRepo + '/'))) continue;\n \n if (regex && !regex.test(entry.name)) continue;\n results.push({\n name: entry.name,\n path: toRepoRelative(path.resolve(''), full), // relative display\n type: isDir ? 'dir' : 'file',\n depth,\n });\n if (isDir) {\n await walk(full, depth + 1);\n }\n }\n }\n await walk(abs, 0);\n return results;\n }\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,OAAO,QAAQ;AAEf,OAAO,UAAU;AAEjB,IAAM,SAAS,iBAAkB,WAAW;AAQrC,IAAM,uBAAN,MAAuD;AAAA,EAG5D,YAA6B,UAAmC,WAAqB,kBAAkB;AAA1E;AAAmC;AAAA,EAAwC;AAAA,EAFhG,eAA8B;AAAA;AAAA;AAAA;AAAA;AAAA,EAQtC,MAAc,kBAAmC;AAC/C,QAAI,KAAK,aAAc,QAAO,KAAK;AAEnC,UAAM,KAAK,OAAO;AAGlB,OAAG,IAAI,KAAK,QAAQ;AAGpB,QAAI;AACF,YAAM,gBAAgB,KAAK,KAAK,KAAK,UAAU,YAAY;AAC3D,YAAM,mBAAmB,MAAM,GAAG,SAAS,eAAe,OAAO;AACjE,SAAG,IAAI,gBAAgB;AAAA,IACzB,QAAQ;AAAA,IAER;AAEA,SAAK,eAAe;AACpB,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,KAAK,QAA+E;AACxF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,KAAM,QAAO,EAAE,OAAO,CAAC,EAAE;AAC9B,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAAM;AAAA,MACN,GAAI,OAAO,OAAO,CAAC,UAAU,OAAO,IAAI,IAAI,CAAC;AAAA,MAC7C,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AAGzD,QAAI,IAAI,aAAa,IAAI;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,aAKc,IAAI,QAAQ,GAAG,IAAI,SAAS;AAAA,WAAc,IAAI,MAAM,KAAK,EAAE;AAAA,MAClF;AAAA,IACF;AAGA,QAAI,IAAI,aAAa,KAAK,IAAI,aAAa,GAAG;AAC5C,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,8CAA8C,IAAI,QAAQ,GAAG,IAAI,SAAS,KAAK,IAAI,MAAM,KAAK,EAAE;AAAA,MACzG;AAAA,IACF;AAEA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,QAAI,MAAM,SAAS,aAAa,kBAAkB;AAChD,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAA6E;AACtF,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzE;AAAA,IACF;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAGhD,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,GAAG;AAC3B,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,uCAAuC,OAAO,IAAI;AAAA,MAE3D;AAAA,IACF;AAGA,QAAI,UAAU,GAAG,GAAG;AAClB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI;AAAA,MAEpD;AAAA,IACF;AAGA,QAAI,CAAC,cAAc,GAAG,GAAG;AACvB,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,wCAAwC,OAAO,IAAI;AAAA,MAE5D;AAAA,IACF;AAEA,QAAI;AACJ,QAAI;AACF,cAAQ,MAAM,aAAa,GAAG;AAAA,IAChC,SAAS,KAAK;AACZ,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gCAAgC,OAAO,IAAI,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1G;AAAA,IACF;AACA,UAAM,QAAQ,MAAM;AACpB,QAAI,IAAI,OAAO,SAAS;AACxB,QAAI,IAAI,KAAK,IAAI,OAAO,OAAO,OAAO,KAAK;AAC3C,QAAI,IAAI,SAAS,QAAQ,GAAG;AAE1B,UAAI;AACJ,UAAI;AAAA,IACN;AACA,UAAM,MAAgB,CAAC;AACvB,aAAS,IAAI,GAAG,KAAK,GAAG,KAAK,GAAG;AAC9B,YAAM,UAAU,MAAM,IAAI,CAAC,KAAK;AAChC,UAAI,KAAK,GAAG,CAAC,IAAI,OAAO,EAAE;AAAA,IAC5B;AACA,QAAI,IAAI,SAAS,aAAa,gBAAgB;AAC5C,YAAM,YAAY,IAAI,MAAM,GAAG,aAAa,cAAc;AAC1D,gBAAU,KAAK,2BAA2B,aAAa,cAAc,OAAO,IAAI,MAAM,SAAS;AAC/F,aAAO,EAAE,OAAO,UAAU;AAAA,IAC5B;AAEA,WAAO,EAAE,OAAO,IAAI;AAAA,EACtB;AAAA,EAEA,MAAM,cAAc,QAA0H;AAC5I,QAAI;AACJ,QAAI;AACF,YAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AAAA,IACnD,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AACA,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AACA,UAAM,aAAa,OAAO,cAAc,aAAa;AACrD,UAAM,WAAW,OAAO,YAAY,aAAa;AACjD,UAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAG5D,UAAM,KAAK,MAAM,KAAK,gBAAgB;AAEtC,UAAM,UAAgC,CAAC;AACvC,QAAI,WAAW;AACf,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,WAAW,KAAK;AAEtB,mBAAe,KAAK,KAAa,OAAe;AAC9C,UAAI,KAAK,IAAI,IAAI,YAAY,aAAa,iBAAiB;AACzD,mBAAW;AACX;AAAA,MACF;AACA,UAAI,QAAQ,YAAY,QAAQ,UAAU,WAAY;AACtD,YAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC7D,iBAAW,SAAS,SAAS;AAC3B,YAAI,YAAY,QAAQ,UAAU,WAAY;AAC9C,cAAM,OAAO,KAAK,KAAK,KAAK,MAAM,IAAI;AAGtC,cAAM,YAAY,eAAe,UAAU,IAAI,EAAE,QAAQ,cAAc,EAAE;AAIzE,cAAM,QAAQ,MAAM,YAAY;AAChC,YAAI,GAAG,QAAQ,SAAS,KAAM,SAAS,GAAG,QAAQ,YAAY,GAAG,EAAI;AAErE,YAAI,SAAS,CAAC,MAAM,KAAK,MAAM,IAAI,EAAG;AACtC,gBAAQ,KAAK;AAAA,UACX,MAAM,MAAM;AAAA,UACZ,MAAM,eAAe,KAAK,QAAQ,EAAE,GAAG,IAAI;AAAA;AAAA,UAC3C,MAAM,QAAQ,QAAQ;AAAA,UACtB;AAAA,QACF,CAAC;AACD,YAAI,OAAO;AACT,gBAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,KAAK,CAAC;AACjB,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -1,13 +1,31 @@
|
|
|
1
1
|
import {
|
|
2
|
-
|
|
2
|
+
require_ignore
|
|
3
|
+
} from "./chunk-S6KU22MU.js";
|
|
4
|
+
import {
|
|
5
|
+
AGENT_CONFIG,
|
|
6
|
+
DEFAULT_EXCLUDES
|
|
3
7
|
} from "./chunk-XT5ZO6ES.js";
|
|
4
8
|
|
|
5
9
|
// tools/warp_grep/providers/remote.ts
|
|
10
|
+
var ignore = require_ignore().default || require_ignore();
|
|
6
11
|
var RemoteCommandsProvider = class {
|
|
7
12
|
constructor(repoRoot, commands) {
|
|
8
13
|
this.repoRoot = repoRoot;
|
|
9
14
|
this.commands = commands;
|
|
10
15
|
}
|
|
16
|
+
ignoreFilter = null;
|
|
17
|
+
/**
|
|
18
|
+
* Build an ignore filter from DEFAULT_EXCLUDES.
|
|
19
|
+
* Note: For remote providers, we can't read .gitignore from the remote sandbox,
|
|
20
|
+
* so we only use the built-in excludes.
|
|
21
|
+
*/
|
|
22
|
+
getIgnoreFilter() {
|
|
23
|
+
if (this.ignoreFilter) return this.ignoreFilter;
|
|
24
|
+
const ig = ignore();
|
|
25
|
+
ig.add(DEFAULT_EXCLUDES);
|
|
26
|
+
this.ignoreFilter = ig;
|
|
27
|
+
return ig;
|
|
28
|
+
}
|
|
11
29
|
/**
|
|
12
30
|
* Run grep command and parse ripgrep output
|
|
13
31
|
*/
|
|
@@ -61,6 +79,7 @@ var RemoteCommandsProvider = class {
|
|
|
61
79
|
async listDirectory(params) {
|
|
62
80
|
const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;
|
|
63
81
|
const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;
|
|
82
|
+
const ig = this.getIgnoreFilter();
|
|
64
83
|
try {
|
|
65
84
|
const stdout = await this.commands.listDir(params.path, maxDepth);
|
|
66
85
|
const paths = (stdout || "").trim().split(/\r?\n/).filter((p) => p.length > 0);
|
|
@@ -69,11 +88,12 @@ var RemoteCommandsProvider = class {
|
|
|
69
88
|
for (const fullPath of paths) {
|
|
70
89
|
if (fullPath === params.path || fullPath === this.repoRoot) continue;
|
|
71
90
|
const name = fullPath.split("/").pop() || "";
|
|
72
|
-
if (regex && !regex.test(name)) continue;
|
|
73
91
|
let relativePath = fullPath;
|
|
74
92
|
if (fullPath.startsWith(this.repoRoot)) {
|
|
75
93
|
relativePath = fullPath.slice(this.repoRoot.length).replace(/^\//, "");
|
|
76
94
|
}
|
|
95
|
+
if (ig.ignores(relativePath)) continue;
|
|
96
|
+
if (regex && !regex.test(name)) continue;
|
|
77
97
|
const depth = relativePath.split("/").filter(Boolean).length - 1;
|
|
78
98
|
const hasExtension = name.includes(".") && !name.startsWith(".");
|
|
79
99
|
const type = hasExtension ? "file" : "dir";
|
|
@@ -95,4 +115,4 @@ var RemoteCommandsProvider = class {
|
|
|
95
115
|
export {
|
|
96
116
|
RemoteCommandsProvider
|
|
97
117
|
};
|
|
98
|
-
//# sourceMappingURL=chunk-
|
|
118
|
+
//# sourceMappingURL=chunk-6RFT7K7F.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/providers/remote.ts"],"sourcesContent":["/**\n * RemoteCommandsProvider - wraps simple RemoteCommands into WarpGrepProvider\n * \n * Handles parsing of raw stdout from grep/read/listDir commands.\n * Users just return stdout, SDK handles all format conversion.\n */\n\nimport type { WarpGrepProvider, GrepResult, ReadResult, ListDirectoryEntry } from './types.js';\nimport type { RemoteCommands } from '../types.js';\nimport { AGENT_CONFIG, DEFAULT_EXCLUDES } from '../agent/config.js';\n\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nconst ignore = require('ignore').default || require('ignore');\ntype Ignore = ReturnType<typeof ignore>;\n\n/**\n * Wraps simple RemoteCommands functions into a full WarpGrepProvider.\n * \n * This allows users to provide three simple functions that return raw stdout,\n * and the SDK handles all parsing internally.\n * \n * @example\n * ```typescript\n * const provider = new RemoteCommandsProvider('/home/repo', {\n * grep: async (pattern, path) => {\n * const r = await sandbox.run(`rg '${pattern}' '${path}'`);\n * return r.stdout;\n * },\n * read: async (path, start, end) => {\n * const r = await sandbox.run(`sed -n '${start},${end}p' '${path}'`);\n * return r.stdout;\n * },\n * listDir: async (path, maxDepth) => {\n * const r = await sandbox.run(`find '${path}' -maxdepth ${maxDepth}`);\n * return r.stdout;\n * },\n * });\n * ```\n */\nexport class RemoteCommandsProvider implements WarpGrepProvider {\n private ignoreFilter: Ignore | null = null;\n\n constructor(\n private readonly repoRoot: string,\n private readonly commands: RemoteCommands\n ) {}\n\n /**\n * Build an ignore filter from DEFAULT_EXCLUDES.\n * Note: For remote providers, we can't read .gitignore from the remote sandbox,\n * so we only use the built-in excludes.\n */\n private getIgnoreFilter(): Ignore {\n if (this.ignoreFilter) return this.ignoreFilter;\n const ig = ignore();\n ig.add(DEFAULT_EXCLUDES);\n this.ignoreFilter = ig;\n return ig;\n }\n\n /**\n * Run grep command and parse ripgrep output\n */\n async grep(params: { pattern: string; path: string; glob?: string }): Promise<GrepResult> {\n try {\n const stdout = await this.commands.grep(params.pattern, params.path, params.glob);\n \n // Parse ripgrep output: each line is \"path:line:content\" or \"path-line-content\" for context\n const lines = (stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n \n // Check if output is too large\n if (lines.length > AGENT_CONFIG.MAX_OUTPUT_LINES) {\n return {\n lines: [],\n error: 'Query not specific enough - too many results returned. Try a more specific pattern.',\n };\n }\n \n return { lines };\n } catch (error) {\n return {\n lines: [],\n error: `[GREP ERROR] ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n }\n\n /**\n * Read file and add line numbers\n */\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n const start = params.start ?? 1;\n const end = params.end ?? 1_000_000;\n \n try {\n const stdout = await this.commands.read(params.path, start, end);\n \n // Split content into lines and add line numbers\n const contentLines = (stdout || '').split('\\n');\n \n // Remove trailing empty line if present (common with sed output)\n if (contentLines.length > 0 && contentLines[contentLines.length - 1] === '') {\n contentLines.pop();\n }\n \n // Format as \"lineNumber|content\"\n const lines = contentLines.map((content, idx) => `${start + idx}|${content}`);\n \n // Check if output is too large\n if (lines.length > AGENT_CONFIG.MAX_READ_LINES) {\n const truncated = lines.slice(0, AGENT_CONFIG.MAX_READ_LINES);\n truncated.push(`... [truncated: showing ${AGENT_CONFIG.MAX_READ_LINES} of ${lines.length} lines]`);\n return { lines: truncated };\n }\n \n return { lines };\n } catch (error) {\n return {\n lines: [],\n error: `[READ ERROR] ${error instanceof Error ? error.message : String(error)}`,\n };\n }\n }\n\n /**\n * List directory and parse find output\n */\n async listDirectory(params: { \n path: string; \n pattern?: string | null; \n maxResults?: number; \n maxDepth?: number;\n }): Promise<ListDirectoryEntry[]> {\n const maxDepth = params.maxDepth ?? AGENT_CONFIG.MAX_LIST_DEPTH;\n const maxResults = params.maxResults ?? AGENT_CONFIG.MAX_OUTPUT_LINES;\n \n // Get the ignore filter (DEFAULT_EXCLUDES)\n const ig = this.getIgnoreFilter();\n \n try {\n const stdout = await this.commands.listDir(params.path, maxDepth);\n \n // Parse find output: one path per line\n const paths = (stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((p) => p.length > 0);\n \n const regex = params.pattern ? new RegExp(params.pattern) : null;\n const entries: ListDirectoryEntry[] = [];\n \n for (const fullPath of paths) {\n // Skip the root path itself\n if (fullPath === params.path || fullPath === this.repoRoot) continue;\n \n const name = fullPath.split('/').pop() || '';\n \n // Determine relative path\n let relativePath = fullPath;\n if (fullPath.startsWith(this.repoRoot)) {\n relativePath = fullPath.slice(this.repoRoot.length).replace(/^\\//, '');\n }\n \n // Filter using ignore rules (DEFAULT_EXCLUDES)\n if (ig.ignores(relativePath)) continue;\n \n // Apply pattern filter if provided\n if (regex && !regex.test(name)) continue;\n \n // Calculate depth based on path separators\n const depth = relativePath.split('/').filter(Boolean).length - 1;\n \n // Determine if it's a directory (ends with / or infer from path structure)\n // Note: Most sandbox find commands don't indicate type, so we guess based on extension\n const hasExtension = name.includes('.') && !name.startsWith('.');\n const type: 'file' | 'dir' = hasExtension ? 'file' : 'dir';\n \n entries.push({\n name,\n path: relativePath,\n type,\n depth: Math.max(0, depth),\n });\n \n if (entries.length >= maxResults) break;\n }\n \n return entries;\n } catch (error) {\n // Return empty array on error (consistent with LocalRipgrepProvider)\n return [];\n }\n }\n}\n\n\n\n\n"],"mappings":";;;;;;;;;AAYA,IAAM,SAAS,iBAAkB,WAAW;AA2BrC,IAAM,yBAAN,MAAyD;AAAA,EAG9D,YACmB,UACA,UACjB;AAFiB;AACA;AAAA,EAChB;AAAA,EALK,eAA8B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAY9B,kBAA0B;AAChC,QAAI,KAAK,aAAc,QAAO,KAAK;AACnC,UAAM,KAAK,OAAO;AAClB,OAAG,IAAI,gBAAgB;AACvB,SAAK,eAAe;AACpB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAA+E;AACxF,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,KAAK,OAAO,SAAS,OAAO,MAAM,OAAO,IAAI;AAGhF,YAAM,SAAS,UAAU,IACtB,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAG7B,UAAI,MAAM,SAAS,aAAa,kBAAkB;AAChD,eAAO;AAAA,UACL,OAAO,CAAC;AAAA,UACR,OAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO,EAAE,MAAM;AAAA,IACjB,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,QAA6E;AACtF,UAAM,QAAQ,OAAO,SAAS;AAC9B,UAAM,MAAM,OAAO,OAAO;AAE1B,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,KAAK,OAAO,MAAM,OAAO,GAAG;AAG/D,YAAM,gBAAgB,UAAU,IAAI,MAAM,IAAI;AAG9C,UAAI,aAAa,SAAS,KAAK,aAAa,aAAa,SAAS,CAAC,MAAM,IAAI;AAC3E,qBAAa,IAAI;AAAA,MACnB;AAGA,YAAM,QAAQ,aAAa,IAAI,CAAC,SAAS,QAAQ,GAAG,QAAQ,GAAG,IAAI,OAAO,EAAE;AAG5E,UAAI,MAAM,SAAS,aAAa,gBAAgB;AAC9C,cAAM,YAAY,MAAM,MAAM,GAAG,aAAa,cAAc;AAC5D,kBAAU,KAAK,2BAA2B,aAAa,cAAc,OAAO,MAAM,MAAM,SAAS;AACjG,eAAO,EAAE,OAAO,UAAU;AAAA,MAC5B;AAEA,aAAO,EAAE,MAAM;AAAA,IACjB,SAAS,OAAO;AACd,aAAO;AAAA,QACL,OAAO,CAAC;AAAA,QACR,OAAO,gBAAgB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,QAKc;AAChC,UAAM,WAAW,OAAO,YAAY,aAAa;AACjD,UAAM,aAAa,OAAO,cAAc,aAAa;AAGrD,UAAM,KAAK,KAAK,gBAAgB;AAEhC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,SAAS,QAAQ,OAAO,MAAM,QAAQ;AAGhE,YAAM,SAAS,UAAU,IACtB,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAE7B,YAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAC5D,YAAM,UAAgC,CAAC;AAEvC,iBAAW,YAAY,OAAO;AAE5B,YAAI,aAAa,OAAO,QAAQ,aAAa,KAAK,SAAU;AAE5D,cAAM,OAAO,SAAS,MAAM,GAAG,EAAE,IAAI,KAAK;AAG1C,YAAI,eAAe;AACnB,YAAI,SAAS,WAAW,KAAK,QAAQ,GAAG;AACtC,yBAAe,SAAS,MAAM,KAAK,SAAS,MAAM,EAAE,QAAQ,OAAO,EAAE;AAAA,QACvE;AAGA,YAAI,GAAG,QAAQ,YAAY,EAAG;AAG9B,YAAI,SAAS,CAAC,MAAM,KAAK,IAAI,EAAG;AAGhC,cAAM,QAAQ,aAAa,MAAM,GAAG,EAAE,OAAO,OAAO,EAAE,SAAS;AAI/D,cAAM,eAAe,KAAK,SAAS,GAAG,KAAK,CAAC,KAAK,WAAW,GAAG;AAC/D,cAAM,OAAuB,eAAe,SAAS;AAErD,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA,OAAO,KAAK,IAAI,GAAG,KAAK;AAAA,QAC1B,CAAC;AAED,YAAI,QAAQ,UAAU,WAAY;AAAA,MACpC;AAEA,aAAO;AAAA,IACT,SAAS,OAAO;AAEd,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import {
|
|
2
|
+
BROWSER_SYSTEM_PROMPT,
|
|
3
|
+
BROWSER_TOOL_DESCRIPTION
|
|
4
|
+
} from "./chunk-EI4UKP24.js";
|
|
5
|
+
import {
|
|
6
|
+
executeBrowserTask
|
|
7
|
+
} from "./chunk-MFZP347Z.js";
|
|
8
|
+
import {
|
|
9
|
+
__export
|
|
10
|
+
} from "./chunk-HKJ2B2AA.js";
|
|
11
|
+
|
|
12
|
+
// tools/browser/anthropic.ts
|
|
13
|
+
var anthropic_exports = {};
|
|
14
|
+
__export(anthropic_exports, {
|
|
15
|
+
browserTool: () => browserTool,
|
|
16
|
+
createBrowserTool: () => createBrowserTool
|
|
17
|
+
});
|
|
18
|
+
var browserTool = {
|
|
19
|
+
name: "browser_task",
|
|
20
|
+
description: BROWSER_TOOL_DESCRIPTION,
|
|
21
|
+
input_schema: {
|
|
22
|
+
type: "object",
|
|
23
|
+
properties: {
|
|
24
|
+
task: {
|
|
25
|
+
type: "string",
|
|
26
|
+
description: 'Natural language description of what to do (e.g., "Test checkout flow for buying a pineapple")'
|
|
27
|
+
},
|
|
28
|
+
url: {
|
|
29
|
+
type: "string",
|
|
30
|
+
description: "Starting URL (e.g., https://3000-xyz.e2b.dev). Required if navigating to a specific page."
|
|
31
|
+
},
|
|
32
|
+
max_steps: {
|
|
33
|
+
type: "number",
|
|
34
|
+
description: "Maximum number of browser actions to take (1-50). Default: 10. Use 15-30 for complex flows.",
|
|
35
|
+
default: 10
|
|
36
|
+
},
|
|
37
|
+
region: {
|
|
38
|
+
type: "string",
|
|
39
|
+
enum: ["sfo", "lon"],
|
|
40
|
+
description: "Browserless region: sfo (US West Coast) or lon (Europe). Default: sfo.",
|
|
41
|
+
default: "sfo"
|
|
42
|
+
}
|
|
43
|
+
},
|
|
44
|
+
required: ["task"]
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
function formatResult(result) {
|
|
48
|
+
if (result.success) {
|
|
49
|
+
const parts = [
|
|
50
|
+
"\u2705 Browser task completed successfully",
|
|
51
|
+
`Steps taken: ${result.steps_taken ?? 0}`,
|
|
52
|
+
result.execution_time_ms ? `Execution time: ${result.execution_time_ms}ms` : null,
|
|
53
|
+
"",
|
|
54
|
+
"Result:",
|
|
55
|
+
result.result || "Task completed"
|
|
56
|
+
];
|
|
57
|
+
return parts.filter(Boolean).join("\n");
|
|
58
|
+
}
|
|
59
|
+
return `\u274C Browser task failed: ${result.error || "Unknown error"}`;
|
|
60
|
+
}
|
|
61
|
+
function createBrowserTool(config) {
|
|
62
|
+
return Object.assign({}, browserTool, {
|
|
63
|
+
execute: async (input) => {
|
|
64
|
+
return executeBrowserTask(input, config);
|
|
65
|
+
},
|
|
66
|
+
formatResult: (result) => {
|
|
67
|
+
return formatResult(result);
|
|
68
|
+
},
|
|
69
|
+
getSystemPrompt: () => {
|
|
70
|
+
return BROWSER_SYSTEM_PROMPT;
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export {
|
|
76
|
+
browserTool,
|
|
77
|
+
createBrowserTool,
|
|
78
|
+
anthropic_exports
|
|
79
|
+
};
|
|
80
|
+
//# sourceMappingURL=chunk-AAIUUX4Y.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/browser/anthropic.ts"],"sourcesContent":["/**\n * Anthropic SDK adapter for browser automation tool\n */\n\nimport type { Tool } from '@anthropic-ai/sdk/resources/messages.mjs';\nimport { executeBrowserTask } from './core.js';\nimport type {\n BrowserConfig,\n BrowserTaskInput,\n BrowserTaskResult,\n} from './types.js';\nimport { BROWSER_TOOL_DESCRIPTION, BROWSER_SYSTEM_PROMPT } from './prompts.js';\n\n/**\n * Anthropic tool definition for browser automation\n */\nexport const browserTool: Tool = {\n name: 'browser_task',\n description: BROWSER_TOOL_DESCRIPTION,\n input_schema: {\n type: 'object',\n properties: {\n task: {\n type: 'string',\n description: 'Natural language description of what to do (e.g., \"Test checkout flow for buying a pineapple\")',\n },\n url: {\n type: 'string',\n description: 'Starting URL (e.g., https://3000-xyz.e2b.dev). Required if navigating to a specific page.',\n },\n max_steps: {\n type: 'number',\n description: 'Maximum number of browser actions to take (1-50). Default: 10. Use 15-30 for complex flows.',\n default: 10,\n },\n region: {\n type: 'string',\n enum: ['sfo', 'lon'],\n description: 'Browserless region: sfo (US West Coast) or lon (Europe). Default: sfo.',\n default: 'sfo',\n },\n },\n required: ['task'],\n },\n};\n\n/**\n * Format browser task result for Anthropic tool result\n * \n * Returns a concise summary suitable for agent context. The full result object\n * (with urls, errors, action_history, judgement, etc.) is available when calling\n * execute() directly, but this formatted string omits those details to save tokens.\n * \n * @param result - Browser task result with full history data\n * @returns Formatted string summary for tool result\n */\nfunction formatResult(result: BrowserTaskResult): string {\n if (result.success) {\n const parts = [\n '✅ Browser task completed successfully',\n `Steps taken: ${result.steps_taken ?? 0}`,\n result.execution_time_ms ? `Execution time: ${result.execution_time_ms}ms` : null,\n '',\n 'Result:',\n result.result || 'Task completed',\n ];\n return parts.filter(Boolean).join('\\n');\n }\n\n return `❌ Browser task failed: ${result.error || 'Unknown error'}`;\n}\n\n/**\n * Create a configured browser tool with execute and formatResult methods\n * \n * @param config - Browser worker configuration\n * @returns Tool definition with execute and formatResult methods\n * \n * @example\n * ```typescript\n * import Anthropic from '@anthropic-ai/sdk';\n * import { createBrowserTool } from 'morphsdk/tools/browser/anthropic';\n * \n * const tool = createBrowserTool({\n * apiKey: process.env.MORPH_API_KEY,\n * timeout: 180000\n * });\n * \n * const client = new Anthropic();\n * \n * const response = await client.messages.create({\n * model: 'claude-sonnet-4-5-20250929',\n * tools: [tool], // tool itself is the Tool definition\n * messages: [{\n * role: 'user',\n * content: 'Test the checkout flow at https://3000-abc.e2b.dev'\n * }]\n * });\n * \n * // Execute and format\n * const result = await tool.execute(toolUseBlock.input);\n * const formatted = tool.formatResult(result);\n * ```\n */\nexport function createBrowserTool(config?: BrowserConfig) {\n return Object.assign({}, browserTool, {\n execute: async (input: BrowserTaskInput): Promise<BrowserTaskResult> => {\n return executeBrowserTask(input, config);\n },\n formatResult: (result: BrowserTaskResult): string => {\n return formatResult(result);\n },\n getSystemPrompt: (): string => {\n return BROWSER_SYSTEM_PROMPT;\n },\n });\n}\n\n"],"mappings":";;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAgBO,IAAM,cAAoB;AAAA,EAC/B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,cAAc;AAAA,IACZ,MAAM;AAAA,IACN,YAAY;AAAA,MACV,MAAM;AAAA,QACJ,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,MACA,KAAK;AAAA,QACH,MAAM;AAAA,QACN,aAAa;AAAA,MACf;AAAA,MACA,WAAW;AAAA,QACT,MAAM;AAAA,QACN,aAAa;AAAA,QACb,SAAS;AAAA,MACX;AAAA,MACA,QAAQ;AAAA,QACN,MAAM;AAAA,QACN,MAAM,CAAC,OAAO,KAAK;AAAA,QACnB,aAAa;AAAA,QACb,SAAS;AAAA,MACX;AAAA,IACF;AAAA,IACA,UAAU,CAAC,MAAM;AAAA,EACnB;AACF;AAYA,SAAS,aAAa,QAAmC;AACvD,MAAI,OAAO,SAAS;AAClB,UAAM,QAAQ;AAAA,MACZ;AAAA,MACA,gBAAgB,OAAO,eAAe,CAAC;AAAA,MACvC,OAAO,oBAAoB,mBAAmB,OAAO,iBAAiB,OAAO;AAAA,MAC7E;AAAA,MACA;AAAA,MACA,OAAO,UAAU;AAAA,IACnB;AACA,WAAO,MAAM,OAAO,OAAO,EAAE,KAAK,IAAI;AAAA,EACxC;AAEA,SAAO,+BAA0B,OAAO,SAAS,eAAe;AAClE;AAkCO,SAAS,kBAAkB,QAAwB;AACxD,SAAO,OAAO,OAAO,CAAC,GAAG,aAAa;AAAA,IACpC,SAAS,OAAO,UAAwD;AACtE,aAAO,mBAAmB,OAAO,MAAM;AAAA,IACzC;AAAA,IACA,cAAc,CAAC,WAAsC;AACnD,aAAO,aAAa,MAAM;AAAA,IAC5B;AAAA,IACA,iBAAiB,MAAc;AAC7B,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;","names":[]}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import {
|
|
2
|
+
BROWSER_TOOL_DESCRIPTION
|
|
3
|
+
} from "./chunk-EI4UKP24.js";
|
|
4
|
+
import {
|
|
5
|
+
executeBrowserTask
|
|
6
|
+
} from "./chunk-MFZP347Z.js";
|
|
7
|
+
import {
|
|
8
|
+
__export
|
|
9
|
+
} from "./chunk-HKJ2B2AA.js";
|
|
10
|
+
|
|
11
|
+
// tools/browser/vercel.ts
|
|
12
|
+
var vercel_exports = {};
|
|
13
|
+
__export(vercel_exports, {
|
|
14
|
+
browserTool: () => browserTool,
|
|
15
|
+
createBrowserTool: () => createBrowserTool
|
|
16
|
+
});
|
|
17
|
+
import { tool as createTool } from "ai";
|
|
18
|
+
import { z } from "zod";
|
|
19
|
+
function createBrowserTool(config) {
|
|
20
|
+
const schema = z.object({
|
|
21
|
+
task: z.string().describe('Natural language description of what to do (e.g., "Test checkout flow for buying a pineapple")'),
|
|
22
|
+
url: z.string().optional().describe("Starting URL (e.g., https://3000-xyz.e2b.dev)"),
|
|
23
|
+
max_steps: z.number().min(1).max(50).default(10).describe("Maximum number of browser actions to take"),
|
|
24
|
+
region: z.enum(["sfo", "lon"]).default("sfo").describe("Browserless region: sfo (US West) or lon (Europe)")
|
|
25
|
+
});
|
|
26
|
+
return createTool({
|
|
27
|
+
description: BROWSER_TOOL_DESCRIPTION,
|
|
28
|
+
inputSchema: schema,
|
|
29
|
+
execute: async (params) => {
|
|
30
|
+
const { task, url, max_steps, region } = params;
|
|
31
|
+
const result = await executeBrowserTask(
|
|
32
|
+
{ task, url, max_steps, region },
|
|
33
|
+
config
|
|
34
|
+
);
|
|
35
|
+
if (result.success) {
|
|
36
|
+
return {
|
|
37
|
+
success: true,
|
|
38
|
+
result: result.result,
|
|
39
|
+
steps_taken: result.steps_taken,
|
|
40
|
+
execution_time_ms: result.execution_time_ms
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
return {
|
|
44
|
+
success: false,
|
|
45
|
+
error: result.error
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
var browserTool = createBrowserTool();
|
|
51
|
+
|
|
52
|
+
export {
|
|
53
|
+
createBrowserTool,
|
|
54
|
+
browserTool,
|
|
55
|
+
vercel_exports
|
|
56
|
+
};
|
|
57
|
+
//# sourceMappingURL=chunk-BYNVJ4ON.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/browser/vercel.ts"],"sourcesContent":["/**\n * Vercel AI SDK adapter for browser automation tool\n */\n\nimport { tool as createTool } from 'ai';\nimport { z } from 'zod';\nimport { executeBrowserTask } from './core.js';\nimport type { BrowserConfig } from './types.js';\nimport { BROWSER_TOOL_DESCRIPTION } from './prompts.js';\n\n/**\n * Create Vercel AI SDK tool for browser automation\n * \n * @param config - Optional browser worker configuration\n * @returns Vercel AI SDK tool\n * \n * @example\n * ```typescript\n * import { generateText } from 'ai';\n * import { anthropic } from '@ai-sdk/anthropic';\n * import { createBrowserTool } from 'morphsdk/tools/browser/vercel';\n * \n * const browserTool = createBrowserTool({\n * apiUrl: 'https://browser-worker.example.com'\n * });\n * \n * const result = await generateText({\n * model: anthropic('claude-sonnet-4-5-20250929'),\n * tools: { browserTask: browserTool },\n * prompt: 'Test the checkout flow at https://3000-abc.e2b.dev',\n * maxSteps: 5\n * });\n * ```\n */\nexport function createBrowserTool(config?: BrowserConfig) {\n const schema = z.object({\n task: z.string().describe('Natural language description of what to do (e.g., \"Test checkout flow for buying a pineapple\")'),\n url: z.string().optional().describe('Starting URL (e.g., https://3000-xyz.e2b.dev)'),\n max_steps: z.number().min(1).max(50).default(10).describe('Maximum number of browser actions to take'),\n region: z.enum(['sfo', 'lon']).default('sfo').describe('Browserless region: sfo (US West) or lon (Europe)'),\n });\n\n return createTool({\n description: BROWSER_TOOL_DESCRIPTION,\n inputSchema: schema,\n execute: async (params) => {\n const { task, url, max_steps, region } = params;\n const result = await executeBrowserTask(\n { task, url, max_steps, region },\n config\n );\n\n // Return minimal summary for agent context (to save tokens)\n // Full result with urls, errors, action_history, judgement, etc. is available\n // when calling executeBrowserTask() directly outside of agent tools\n if (result.success) {\n return {\n success: true,\n result: result.result,\n steps_taken: result.steps_taken,\n execution_time_ms: result.execution_time_ms,\n };\n }\n\n return {\n success: false,\n error: result.error,\n };\n },\n });\n}\n\n/**\n * Default browser tool for Vercel AI SDK\n */\nexport const browserTool = createBrowserTool();\n\n"],"mappings":";;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAIA,SAAS,QAAQ,kBAAkB;AACnC,SAAS,SAAS;AA6BX,SAAS,kBAAkB,QAAwB;AACxD,QAAM,SAAS,EAAE,OAAO;AAAA,IACtB,MAAM,EAAE,OAAO,EAAE,SAAS,gGAAgG;AAAA,IAC1H,KAAK,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,+CAA+C;AAAA,IACnF,WAAW,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,EAAE,QAAQ,EAAE,EAAE,SAAS,2CAA2C;AAAA,IACrG,QAAQ,EAAE,KAAK,CAAC,OAAO,KAAK,CAAC,EAAE,QAAQ,KAAK,EAAE,SAAS,mDAAmD;AAAA,EAC5G,CAAC;AAED,SAAO,WAAW;AAAA,IAChB,aAAa;AAAA,IACb,aAAa;AAAA,IACb,SAAS,OAAO,WAAW;AACzB,YAAM,EAAE,MAAM,KAAK,WAAW,OAAO,IAAI;AACzC,YAAM,SAAS,MAAM;AAAA,QACnB,EAAE,MAAM,KAAK,WAAW,OAAO;AAAA,QAC/B;AAAA,MACF;AAKA,UAAI,OAAO,SAAS;AAClB,eAAO;AAAA,UACL,SAAS;AAAA,UACT,QAAQ,OAAO;AAAA,UACf,aAAa,OAAO;AAAA,UACpB,mBAAmB,OAAO;AAAA,QAC5B;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAKO,IAAM,cAAc,kBAAkB;","names":[]}
|
|
@@ -5,18 +5,19 @@ import {
|
|
|
5
5
|
import {
|
|
6
6
|
executeToolCall,
|
|
7
7
|
formatResult
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-VLZEBK7S.js";
|
|
9
9
|
import {
|
|
10
10
|
getSystemPrompt
|
|
11
11
|
} from "./chunk-Q5AHGIQO.js";
|
|
12
12
|
import {
|
|
13
13
|
__export
|
|
14
|
-
} from "./chunk-
|
|
14
|
+
} from "./chunk-HKJ2B2AA.js";
|
|
15
15
|
|
|
16
16
|
// tools/warp_grep/gemini.ts
|
|
17
17
|
var gemini_exports = {};
|
|
18
18
|
__export(gemini_exports, {
|
|
19
19
|
createMorphWarpGrepTool: () => createMorphWarpGrepTool,
|
|
20
|
+
createWarpGrepTool: () => createWarpGrepTool,
|
|
20
21
|
default: () => gemini_default,
|
|
21
22
|
execute: () => execute,
|
|
22
23
|
formatResult: () => formatResult,
|
|
@@ -42,7 +43,7 @@ var warpGrepFunctionDeclaration = {
|
|
|
42
43
|
async function execute(input, config) {
|
|
43
44
|
return executeToolCall(input, config);
|
|
44
45
|
}
|
|
45
|
-
function
|
|
46
|
+
function createWarpGrepTool(config) {
|
|
46
47
|
const declaration = {
|
|
47
48
|
name: config.name ?? WARP_GREP_TOOL_NAME,
|
|
48
49
|
description: config.description ?? WARP_GREP_DESCRIPTION,
|
|
@@ -60,13 +61,15 @@ function createMorphWarpGrepTool(config) {
|
|
|
60
61
|
}
|
|
61
62
|
});
|
|
62
63
|
}
|
|
64
|
+
var createMorphWarpGrepTool = createWarpGrepTool;
|
|
63
65
|
var gemini_default = warpGrepFunctionDeclaration;
|
|
64
66
|
|
|
65
67
|
export {
|
|
66
68
|
warpGrepFunctionDeclaration,
|
|
67
69
|
execute,
|
|
70
|
+
createWarpGrepTool,
|
|
68
71
|
createMorphWarpGrepTool,
|
|
69
72
|
gemini_default,
|
|
70
73
|
gemini_exports
|
|
71
74
|
};
|
|
72
|
-
//# sourceMappingURL=chunk-
|
|
75
|
+
//# sourceMappingURL=chunk-FURFQDXF.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../tools/warp_grep/gemini.ts"],"sourcesContent":["/**\n * Google Gemini SDK adapter for morph-warp-grep tool\n */\n\nimport type { FunctionDeclaration, FunctionDeclarationSchema } from '@google/generative-ai';\nimport { SchemaType } from '@google/generative-ai';\nimport { executeToolCall, formatResult } from './client.js';\nimport { WARP_GREP_DESCRIPTION, WARP_GREP_TOOL_NAME, getSystemPrompt } from './prompts.js';\nimport type { WarpGrepToolConfig, WarpGrepResult } from './types.js';\n\n/**\n * Parameter schema for the warp grep tool (Gemini format)\n */\nconst TOOL_PARAMETERS: FunctionDeclarationSchema = {\n type: SchemaType.OBJECT,\n properties: {\n query: { \n type: SchemaType.STRING, \n description: 'Free-form repository question' \n },\n },\n required: ['query'],\n};\n\n/**\n * Gemini-native warp grep function declaration\n * \n * @example\n * ```typescript\n * import { GoogleGenerativeAI } from '@google/generative-ai';\n * import { warpGrepFunctionDeclaration, execute } from '@morphllm/morphsdk/tools/warp-grep/gemini';\n * \n * const genAI = new GoogleGenerativeAI(process.env.GOOGLE_API_KEY);\n * const model = genAI.getGenerativeModel({\n * model: 'gemini-2.0-flash',\n * tools: [{ functionDeclarations: [warpGrepFunctionDeclaration] }]\n * });\n * \n * const chat = model.startChat();\n * const result = await chat.sendMessage('Find authentication middleware');\n * \n * // Handle function call\n * const call = result.response.functionCalls()?.[0];\n * if (call) {\n * const searchResult = await execute(call.args, { repoRoot: '.' });\n * console.log(searchResult);\n * }\n * ```\n */\nexport const warpGrepFunctionDeclaration: FunctionDeclaration = {\n name: WARP_GREP_TOOL_NAME,\n description: WARP_GREP_DESCRIPTION,\n parameters: TOOL_PARAMETERS,\n};\n\n/**\n * Execute warp grep search\n * \n * @param input - Tool input with query\n * @param config - Configuration with repoRoot and optional provider\n * @returns Search results\n */\nexport async function execute(\n input: { query: string } | string,\n config: WarpGrepToolConfig\n): Promise<WarpGrepResult> {\n return executeToolCall(input, config);\n}\n\n// Re-export formatResult and getSystemPrompt for convenience\nexport { formatResult, getSystemPrompt };\n\n/**\n * Gemini tool with execute method attached\n */\nexport interface GeminiWarpGrepTool extends FunctionDeclaration {\n execute: (input: unknown) => Promise<WarpGrepResult>;\n formatResult: (result: WarpGrepResult) => string;\n getSystemPrompt: () => string;\n}\n\n/**\n * Create a custom warp grep tool with configuration and methods\n * \n * @param config - Configuration options\n * @returns Function declaration with execute and formatResult methods\n * \n * @example Local usage\n * ```typescript\n * import { GoogleGenerativeAI } from '@google/generative-ai';\n * import { createMorphWarpGrepTool } from '@morphllm/morphsdk/tools/warp-grep/gemini';\n * \n * const tool = createMorphWarpGrepTool({ repoRoot: '.' });\n * \n * const genAI = new GoogleGenerativeAI(process.env.GOOGLE_API_KEY);\n * const model = genAI.getGenerativeModel({\n * model: 'gemini-2.0-flash',\n * tools: [{ functionDeclarations: [tool] }]\n * });\n * \n * const chat = model.startChat();\n * const result = await chat.sendMessage('Find authentication middleware');\n * \n * // Handle function call\n * const call = result.response.functionCalls()?.[0];\n * if (call && call.name === tool.name) {\n * const searchResult = await tool.execute(call.args);\n * console.log(tool.formatResult(searchResult));\n * \n * // Send result back to model\n * await chat.sendMessage([{\n * functionResponse: {\n * name: call.name,\n * response: { result: tool.formatResult(searchResult) }\n * }\n * }]);\n * }\n * ```\n * \n * @example Remote sandbox (E2B, Modal, etc.)\n * ```typescript\n * const tool = createMorphWarpGrepTool({\n * repoRoot: '/home/repo',\n * remoteCommands: {\n * grep: async (pattern, path) => (await sandbox.run(`rg '${pattern}' '${path}'`)).stdout,\n * read: async (path, start, end) => (await sandbox.run(`sed -n '${start},${end}p' '${path}'`)).stdout,\n * listDir: async (path, maxDepth) => (await sandbox.run(`find '${path}' -maxdepth ${maxDepth}`)).stdout,\n * },\n * });\n * ```\n */\nexport function
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/gemini.ts"],"sourcesContent":["/**\n * Google Gemini SDK adapter for morph-warp-grep tool\n */\n\nimport type { FunctionDeclaration, FunctionDeclarationSchema } from '@google/generative-ai';\nimport { SchemaType } from '@google/generative-ai';\nimport { executeToolCall, formatResult } from './client.js';\nimport { WARP_GREP_DESCRIPTION, WARP_GREP_TOOL_NAME, getSystemPrompt } from './prompts.js';\nimport type { WarpGrepToolConfig, WarpGrepResult } from './types.js';\n\n/**\n * Parameter schema for the warp grep tool (Gemini format)\n */\nconst TOOL_PARAMETERS: FunctionDeclarationSchema = {\n type: SchemaType.OBJECT,\n properties: {\n query: { \n type: SchemaType.STRING, \n description: 'Free-form repository question' \n },\n },\n required: ['query'],\n};\n\n/**\n * Gemini-native warp grep function declaration\n * \n * @example\n * ```typescript\n * import { GoogleGenerativeAI } from '@google/generative-ai';\n * import { warpGrepFunctionDeclaration, execute } from '@morphllm/morphsdk/tools/warp-grep/gemini';\n * \n * const genAI = new GoogleGenerativeAI(process.env.GOOGLE_API_KEY);\n * const model = genAI.getGenerativeModel({\n * model: 'gemini-2.0-flash',\n * tools: [{ functionDeclarations: [warpGrepFunctionDeclaration] }]\n * });\n * \n * const chat = model.startChat();\n * const result = await chat.sendMessage('Find authentication middleware');\n * \n * // Handle function call\n * const call = result.response.functionCalls()?.[0];\n * if (call) {\n * const searchResult = await execute(call.args, { repoRoot: '.' });\n * console.log(searchResult);\n * }\n * ```\n */\nexport const warpGrepFunctionDeclaration: FunctionDeclaration = {\n name: WARP_GREP_TOOL_NAME,\n description: WARP_GREP_DESCRIPTION,\n parameters: TOOL_PARAMETERS,\n};\n\n/**\n * Execute warp grep search\n * \n * @param input - Tool input with query\n * @param config - Configuration with repoRoot and optional provider\n * @returns Search results\n */\nexport async function execute(\n input: { query: string } | string,\n config: WarpGrepToolConfig\n): Promise<WarpGrepResult> {\n return executeToolCall(input, config);\n}\n\n// Re-export formatResult and getSystemPrompt for convenience\nexport { formatResult, getSystemPrompt };\n\n/**\n * Gemini tool with execute method attached\n */\nexport interface GeminiWarpGrepTool extends FunctionDeclaration {\n execute: (input: unknown) => Promise<WarpGrepResult>;\n formatResult: (result: WarpGrepResult) => string;\n getSystemPrompt: () => string;\n}\n\n/**\n * Create a custom warp grep tool with configuration and methods\n * \n * @param config - Configuration options\n * @returns Function declaration with execute and formatResult methods\n * \n * @example Local usage\n * ```typescript\n * import { GoogleGenerativeAI } from '@google/generative-ai';\n * import { createMorphWarpGrepTool } from '@morphllm/morphsdk/tools/warp-grep/gemini';\n * \n * const tool = createMorphWarpGrepTool({ repoRoot: '.' });\n * \n * const genAI = new GoogleGenerativeAI(process.env.GOOGLE_API_KEY);\n * const model = genAI.getGenerativeModel({\n * model: 'gemini-2.0-flash',\n * tools: [{ functionDeclarations: [tool] }]\n * });\n * \n * const chat = model.startChat();\n * const result = await chat.sendMessage('Find authentication middleware');\n * \n * // Handle function call\n * const call = result.response.functionCalls()?.[0];\n * if (call && call.name === tool.name) {\n * const searchResult = await tool.execute(call.args);\n * console.log(tool.formatResult(searchResult));\n * \n * // Send result back to model\n * await chat.sendMessage([{\n * functionResponse: {\n * name: call.name,\n * response: { result: tool.formatResult(searchResult) }\n * }\n * }]);\n * }\n * ```\n * \n * @example Remote sandbox (E2B, Modal, etc.)\n * ```typescript\n * const tool = createMorphWarpGrepTool({\n * repoRoot: '/home/repo',\n * remoteCommands: {\n * grep: async (pattern, path) => (await sandbox.run(`rg '${pattern}' '${path}'`)).stdout,\n * read: async (path, start, end) => (await sandbox.run(`sed -n '${start},${end}p' '${path}'`)).stdout,\n * listDir: async (path, maxDepth) => (await sandbox.run(`find '${path}' -maxdepth ${maxDepth}`)).stdout,\n * },\n * });\n * ```\n */\nexport function createWarpGrepTool(config: WarpGrepToolConfig): GeminiWarpGrepTool {\n const declaration: FunctionDeclaration = {\n name: config.name ?? WARP_GREP_TOOL_NAME,\n description: config.description ?? WARP_GREP_DESCRIPTION,\n parameters: TOOL_PARAMETERS,\n };\n\n return Object.assign(declaration, {\n execute: async (input: unknown): Promise<WarpGrepResult> => {\n return executeToolCall(input as { query: string } | string, config);\n },\n formatResult: (result: WarpGrepResult): string => {\n return formatResult(result);\n },\n getSystemPrompt: (): string => {\n return getSystemPrompt();\n },\n });\n}\n\n// Legacy alias for backwards compatibility\nexport const createMorphWarpGrepTool = createWarpGrepTool;\n\nexport default warpGrepFunctionDeclaration;\n\n"],"mappings":";;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,SAAS,kBAAkB;AAQ3B,IAAM,kBAA6C;AAAA,EACjD,MAAM,WAAW;AAAA,EACjB,YAAY;AAAA,IACV,OAAO;AAAA,MACL,MAAM,WAAW;AAAA,MACjB,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,OAAO;AACpB;AA2BO,IAAM,8BAAmD;AAAA,EAC9D,MAAM;AAAA,EACN,aAAa;AAAA,EACb,YAAY;AACd;AASA,eAAsB,QACpB,OACA,QACyB;AACzB,SAAO,gBAAgB,OAAO,MAAM;AACtC;AAgEO,SAAS,mBAAmB,QAAgD;AACjF,QAAM,cAAmC;AAAA,IACvC,MAAM,OAAO,QAAQ;AAAA,IACrB,aAAa,OAAO,eAAe;AAAA,IACnC,YAAY;AAAA,EACd;AAEA,SAAO,OAAO,OAAO,aAAa;AAAA,IAChC,SAAS,OAAO,UAA4C;AAC1D,aAAO,gBAAgB,OAAqC,MAAM;AAAA,IACpE;AAAA,IACA,cAAc,CAAC,WAAmC;AAChD,aAAO,aAAa,MAAM;AAAA,IAC5B;AAAA,IACA,iBAAiB,MAAc;AAC7B,aAAO,gBAAgB;AAAA,IACzB;AAAA,EACF,CAAC;AACH;AAGO,IAAM,0BAA0B;AAEvC,IAAO,iBAAQ;","names":[]}
|
|
@@ -7,7 +7,7 @@ import {
|
|
|
7
7
|
} from "./chunk-63WE2C5R.js";
|
|
8
8
|
import {
|
|
9
9
|
__export
|
|
10
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-HKJ2B2AA.js";
|
|
11
11
|
|
|
12
12
|
// tools/fastapply/vercel.ts
|
|
13
13
|
var vercel_exports = {};
|
|
@@ -86,4 +86,4 @@ export {
|
|
|
86
86
|
vercel_default,
|
|
87
87
|
vercel_exports
|
|
88
88
|
};
|
|
89
|
-
//# sourceMappingURL=chunk-
|
|
89
|
+
//# sourceMappingURL=chunk-HBWJLKNM.js.map
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
3
|
+
var __commonJS = (cb, mod) => function __require() {
|
|
4
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
5
|
+
};
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
export {
|
|
12
|
+
__commonJS,
|
|
13
|
+
__export
|
|
14
|
+
};
|
|
15
|
+
//# sourceMappingURL=chunk-HKJ2B2AA.js.map
|
|
@@ -5,13 +5,13 @@ import {
|
|
|
5
5
|
import {
|
|
6
6
|
executeToolCall,
|
|
7
7
|
formatResult
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-VLZEBK7S.js";
|
|
9
9
|
import {
|
|
10
10
|
getSystemPrompt
|
|
11
11
|
} from "./chunk-Q5AHGIQO.js";
|
|
12
12
|
import {
|
|
13
13
|
__export
|
|
14
|
-
} from "./chunk-
|
|
14
|
+
} from "./chunk-HKJ2B2AA.js";
|
|
15
15
|
|
|
16
16
|
// tools/warp_grep/openai.ts
|
|
17
17
|
var openai_exports = {};
|
|
@@ -71,4 +71,4 @@ export {
|
|
|
71
71
|
openai_default,
|
|
72
72
|
openai_exports
|
|
73
73
|
};
|
|
74
|
-
//# sourceMappingURL=chunk-
|
|
74
|
+
//# sourceMappingURL=chunk-JPGX6WEV.js.map
|
|
@@ -5,13 +5,13 @@ import {
|
|
|
5
5
|
import {
|
|
6
6
|
executeToolCall,
|
|
7
7
|
formatResult
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-VLZEBK7S.js";
|
|
9
9
|
import {
|
|
10
10
|
getSystemPrompt
|
|
11
11
|
} from "./chunk-Q5AHGIQO.js";
|
|
12
12
|
import {
|
|
13
13
|
__export
|
|
14
|
-
} from "./chunk-
|
|
14
|
+
} from "./chunk-HKJ2B2AA.js";
|
|
15
15
|
|
|
16
16
|
// tools/warp_grep/anthropic.ts
|
|
17
17
|
var anthropic_exports = {};
|
|
@@ -62,4 +62,4 @@ export {
|
|
|
62
62
|
createWarpGrepTool,
|
|
63
63
|
anthropic_exports
|
|
64
64
|
};
|
|
65
|
-
//# sourceMappingURL=chunk-
|
|
65
|
+
//# sourceMappingURL=chunk-K3EHH3C4.js.map
|
|
@@ -245,14 +245,17 @@ async function executeWithRecording(input, config = {}) {
|
|
|
245
245
|
recording
|
|
246
246
|
};
|
|
247
247
|
} catch (error) {
|
|
248
|
+
const errorRecording = {
|
|
249
|
+
id: taskResult.recording_id,
|
|
250
|
+
status: "ERROR",
|
|
251
|
+
error: error instanceof Error ? error.message : String(error),
|
|
252
|
+
created_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
253
|
+
getWebp: (options) => getWebp(taskResult.recording_id, config, options),
|
|
254
|
+
getErrors: () => getErrors(taskResult.recording_id, config)
|
|
255
|
+
};
|
|
248
256
|
return {
|
|
249
257
|
...taskResult,
|
|
250
|
-
recording:
|
|
251
|
-
id: taskResult.recording_id,
|
|
252
|
-
status: "ERROR",
|
|
253
|
-
error: error instanceof Error ? error.message : String(error),
|
|
254
|
-
created_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
255
|
-
}
|
|
258
|
+
recording: errorRecording
|
|
256
259
|
};
|
|
257
260
|
}
|
|
258
261
|
}
|
|
@@ -499,4 +502,4 @@ export {
|
|
|
499
502
|
getWebp,
|
|
500
503
|
checkHealth
|
|
501
504
|
};
|
|
502
|
-
//# sourceMappingURL=chunk-
|
|
505
|
+
//# sourceMappingURL=chunk-MFZP347Z.js.map
|