@morphllm/morphsdk 0.2.21 → 0.2.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-BILUTNBC.js → chunk-4A7UBGLS.js} +2 -2
- package/dist/chunk-73RQWOQC.js +16 -0
- package/dist/chunk-73RQWOQC.js.map +1 -0
- package/dist/chunk-AFEPUNAO.js +15 -0
- package/dist/chunk-AFEPUNAO.js.map +1 -0
- package/dist/{chunk-VJK4PH5V.js → chunk-AG3ICTC5.js} +5 -2
- package/dist/chunk-AG3ICTC5.js.map +1 -0
- package/dist/chunk-EAA7D24N.js +201 -0
- package/dist/chunk-EAA7D24N.js.map +1 -0
- package/dist/chunk-EK7OQPWD.js +44 -0
- package/dist/chunk-EK7OQPWD.js.map +1 -0
- package/dist/chunk-G2RSY56Q.js +11 -0
- package/dist/chunk-G2RSY56Q.js.map +1 -0
- package/dist/chunk-GTOXMAF2.js +140 -0
- package/dist/chunk-GTOXMAF2.js.map +1 -0
- package/dist/chunk-HKZB23U7.js +85 -0
- package/dist/chunk-HKZB23U7.js.map +1 -0
- package/dist/{chunk-34F3D6JD.js → chunk-HWJN5AMA.js} +8 -8
- package/dist/chunk-LN4CTQZG.js +56 -0
- package/dist/chunk-LN4CTQZG.js.map +1 -0
- package/dist/chunk-NDZO5IPV.js +121 -0
- package/dist/chunk-NDZO5IPV.js.map +1 -0
- package/dist/chunk-RSLIOCOE.js +26 -0
- package/dist/chunk-RSLIOCOE.js.map +1 -0
- package/dist/chunk-S3GI7HNR.js +44 -0
- package/dist/chunk-S3GI7HNR.js.map +1 -0
- package/dist/chunk-SMGZ6A64.js +53 -0
- package/dist/chunk-SMGZ6A64.js.map +1 -0
- package/dist/chunk-TICMYDII.js +81 -0
- package/dist/chunk-TICMYDII.js.map +1 -0
- package/dist/chunk-UYBIKZPM.js +135 -0
- package/dist/chunk-UYBIKZPM.js.map +1 -0
- package/dist/chunk-VBBJGWHY.js +73 -0
- package/dist/chunk-VBBJGWHY.js.map +1 -0
- package/dist/chunk-W5CHJ6OX.js +53 -0
- package/dist/chunk-W5CHJ6OX.js.map +1 -0
- package/dist/{chunk-WXBUVKYL.js → chunk-WHZQDTM6.js} +2 -2
- package/dist/chunk-XYPMN4A3.js +1 -0
- package/dist/chunk-XYPMN4A3.js.map +1 -0
- package/dist/{chunk-YVGRWE7D.js → chunk-Y2IY7NYY.js} +2 -2
- package/dist/chunk-Z2FBMSNE.js +10 -0
- package/dist/chunk-Z2FBMSNE.js.map +1 -0
- package/dist/client.cjs +4 -1
- package/dist/client.cjs.map +1 -1
- package/dist/client.js +5 -5
- package/dist/index.cjs +4 -1
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +9 -9
- package/dist/tools/codebase_search/anthropic.cjs +4 -1
- package/dist/tools/codebase_search/anthropic.cjs.map +1 -1
- package/dist/tools/codebase_search/anthropic.js +2 -2
- package/dist/tools/codebase_search/core.cjs +4 -1
- package/dist/tools/codebase_search/core.cjs.map +1 -1
- package/dist/tools/codebase_search/core.js +1 -1
- package/dist/tools/codebase_search/index.cjs +4 -1
- package/dist/tools/codebase_search/index.cjs.map +1 -1
- package/dist/tools/codebase_search/index.js +6 -6
- package/dist/tools/codebase_search/openai.cjs +4 -1
- package/dist/tools/codebase_search/openai.cjs.map +1 -1
- package/dist/tools/codebase_search/openai.js +2 -2
- package/dist/tools/codebase_search/types.cjs.map +1 -1
- package/dist/tools/codebase_search/vercel.cjs +4 -1
- package/dist/tools/codebase_search/vercel.cjs.map +1 -1
- package/dist/tools/codebase_search/vercel.js +2 -2
- package/dist/tools/warp_grep/agent/config.cjs +41 -0
- package/dist/tools/warp_grep/agent/config.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/config.js +12 -0
- package/dist/tools/warp_grep/agent/config.js.map +1 -0
- package/dist/tools/warp_grep/agent/formatter.cjs +106 -0
- package/dist/tools/warp_grep/agent/formatter.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/formatter.js +10 -0
- package/dist/tools/warp_grep/agent/formatter.js.map +1 -0
- package/dist/tools/warp_grep/agent/grep_helpers.cjs +148 -0
- package/dist/tools/warp_grep/agent/grep_helpers.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/grep_helpers.js +14 -0
- package/dist/tools/warp_grep/agent/grep_helpers.js.map +1 -0
- package/dist/tools/warp_grep/agent/parser.cjs +165 -0
- package/dist/tools/warp_grep/agent/parser.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/parser.js +10 -0
- package/dist/tools/warp_grep/agent/parser.js.map +1 -0
- package/dist/tools/warp_grep/agent/prompt.cjs +110 -0
- package/dist/tools/warp_grep/agent/prompt.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/prompt.js +10 -0
- package/dist/tools/warp_grep/agent/prompt.js.map +1 -0
- package/dist/tools/warp_grep/agent/runner.cjs +744 -0
- package/dist/tools/warp_grep/agent/runner.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/runner.js +17 -0
- package/dist/tools/warp_grep/agent/runner.js.map +1 -0
- package/dist/tools/warp_grep/agent/types.cjs +19 -0
- package/dist/tools/warp_grep/agent/types.cjs.map +1 -0
- package/dist/tools/warp_grep/agent/types.js +2 -0
- package/dist/tools/warp_grep/agent/types.js.map +1 -0
- package/dist/tools/warp_grep/anthropic.cjs +977 -0
- package/dist/tools/warp_grep/anthropic.cjs.map +1 -0
- package/dist/tools/warp_grep/anthropic.js +22 -0
- package/dist/tools/warp_grep/anthropic.js.map +1 -0
- package/dist/tools/warp_grep/index.cjs +1136 -0
- package/dist/tools/warp_grep/index.cjs.map +1 -0
- package/dist/tools/warp_grep/index.js +48 -0
- package/dist/tools/warp_grep/index.js.map +1 -0
- package/dist/tools/warp_grep/openai.cjs +980 -0
- package/dist/tools/warp_grep/openai.cjs.map +1 -0
- package/dist/tools/warp_grep/openai.js +22 -0
- package/dist/tools/warp_grep/openai.js.map +1 -0
- package/dist/tools/warp_grep/providers/command.cjs +98 -0
- package/dist/tools/warp_grep/providers/command.cjs.map +1 -0
- package/dist/tools/warp_grep/providers/command.js +9 -0
- package/dist/tools/warp_grep/providers/command.js.map +1 -0
- package/dist/tools/warp_grep/providers/local.cjs +232 -0
- package/dist/tools/warp_grep/providers/local.cjs.map +1 -0
- package/dist/tools/warp_grep/providers/local.js +12 -0
- package/dist/tools/warp_grep/providers/local.js.map +1 -0
- package/dist/tools/warp_grep/providers/types.cjs +19 -0
- package/dist/tools/warp_grep/providers/types.cjs.map +1 -0
- package/dist/tools/warp_grep/providers/types.js +1 -0
- package/dist/tools/warp_grep/providers/types.js.map +1 -0
- package/dist/tools/warp_grep/tools/analyse.cjs +40 -0
- package/dist/tools/warp_grep/tools/analyse.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/analyse.js +8 -0
- package/dist/tools/warp_grep/tools/analyse.js.map +1 -0
- package/dist/tools/warp_grep/tools/finish.cjs +69 -0
- package/dist/tools/warp_grep/tools/finish.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/finish.js +10 -0
- package/dist/tools/warp_grep/tools/finish.js.map +1 -0
- package/dist/tools/warp_grep/tools/grep.cjs +35 -0
- package/dist/tools/warp_grep/tools/grep.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/grep.js +12 -0
- package/dist/tools/warp_grep/tools/grep.js.map +1 -0
- package/dist/tools/warp_grep/tools/read.cjs +34 -0
- package/dist/tools/warp_grep/tools/read.cjs.map +1 -0
- package/dist/tools/warp_grep/tools/read.js +8 -0
- package/dist/tools/warp_grep/tools/read.js.map +1 -0
- package/dist/tools/warp_grep/utils/files.cjs +45 -0
- package/dist/tools/warp_grep/utils/files.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/files.js +8 -0
- package/dist/tools/warp_grep/utils/files.js.map +1 -0
- package/dist/tools/warp_grep/utils/format.cjs +42 -0
- package/dist/tools/warp_grep/utils/format.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/format.js +18 -0
- package/dist/tools/warp_grep/utils/format.js.map +1 -0
- package/dist/tools/warp_grep/utils/paths.cjs +91 -0
- package/dist/tools/warp_grep/utils/paths.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/paths.js +16 -0
- package/dist/tools/warp_grep/utils/paths.js.map +1 -0
- package/dist/tools/warp_grep/utils/ripgrep.cjs +50 -0
- package/dist/tools/warp_grep/utils/ripgrep.cjs.map +1 -0
- package/dist/tools/warp_grep/utils/ripgrep.js +8 -0
- package/dist/tools/warp_grep/utils/ripgrep.js.map +1 -0
- package/dist/tools/warp_grep/vercel.cjs +968 -0
- package/dist/tools/warp_grep/vercel.cjs.map +1 -0
- package/dist/tools/warp_grep/vercel.js +22 -0
- package/dist/tools/warp_grep/vercel.js.map +1 -0
- package/package.json +23 -3
- package/dist/anthropic-CknfcMoO.d.ts +0 -64
- package/dist/chunk-VJK4PH5V.js.map +0 -1
- package/dist/client.d.ts +0 -114
- package/dist/git/client.d.ts +0 -255
- package/dist/git/config.d.ts +0 -11
- package/dist/git/index.d.ts +0 -5
- package/dist/git/types.d.ts +0 -102
- package/dist/index.d.ts +0 -14
- package/dist/modelrouter/core.d.ts +0 -56
- package/dist/modelrouter/index.d.ts +0 -2
- package/dist/modelrouter/types.d.ts +0 -35
- package/dist/openai-BkKsS30n.d.ts +0 -111
- package/dist/tools/browser/anthropic.d.ts +0 -51
- package/dist/tools/browser/core.d.ts +0 -196
- package/dist/tools/browser/index.d.ts +0 -72
- package/dist/tools/browser/openai.d.ts +0 -69
- package/dist/tools/browser/prompts.d.ts +0 -7
- package/dist/tools/browser/types.d.ts +0 -227
- package/dist/tools/browser/vercel.d.ts +0 -69
- package/dist/tools/codebase_search/anthropic.d.ts +0 -40
- package/dist/tools/codebase_search/core.d.ts +0 -40
- package/dist/tools/codebase_search/index.d.ts +0 -10
- package/dist/tools/codebase_search/openai.d.ts +0 -87
- package/dist/tools/codebase_search/prompts.d.ts +0 -7
- package/dist/tools/codebase_search/types.d.ts +0 -46
- package/dist/tools/codebase_search/vercel.d.ts +0 -65
- package/dist/tools/fastapply/anthropic.d.ts +0 -4
- package/dist/tools/fastapply/core.d.ts +0 -41
- package/dist/tools/fastapply/index.d.ts +0 -10
- package/dist/tools/fastapply/openai.d.ts +0 -4
- package/dist/tools/fastapply/prompts.d.ts +0 -7
- package/dist/tools/fastapply/types.d.ts +0 -77
- package/dist/tools/fastapply/vercel.d.ts +0 -4
- package/dist/tools/index.d.ts +0 -10
- package/dist/tools/utils/resilience.d.ts +0 -58
- package/dist/vercel-B1GZ_g9N.d.ts +0 -69
- /package/dist/{chunk-BILUTNBC.js.map → chunk-4A7UBGLS.js.map} +0 -0
- /package/dist/{chunk-34F3D6JD.js.map → chunk-HWJN5AMA.js.map} +0 -0
- /package/dist/{chunk-WXBUVKYL.js.map → chunk-WHZQDTM6.js.map} +0 -0
- /package/dist/{chunk-YVGRWE7D.js.map → chunk-Y2IY7NYY.js.map} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/agent/prompt.ts"],"sourcesContent":["export const SYSTEM_PROMPT = `You are a code search agent. Your task is to find relevant code snippets based on a search query.\n\n<workflow>\nYou operate in exactly 3 rounds of tool exploration, followed by a final answer:\n\n1. In each round, you can make MULTIPLE tool calls (up to 8) to search in parallel. All tool results will be returned together after each round.\n2. After your third round of tool calls, your next turn MUST be a single call to the \\`finish\\` tool with all the context you have found.\n</workflow>\n\n<tool_calling>\nYou have tools at your disposal to solve the coding task. Follow these rules regarding tool calls:\n\n### 1. \\`analyse\\` - Explore Directories\nExplore directory structure in a tree-like format.\n**Syntax:** \\`analyse <path> [pattern]\\`\n- \\`<path>\\`: Directory path to analyze (defaults to \\`.\\`)\n- \\`[pattern]\\`: Optional regex pattern to filter names\n\nFor example:\n\\`\\`\\`\nanalyse src/api\nanalyse . \"test\"\n\\`\\`\\`\n\n### 2. \\`read\\` - Read File Contents\nRead entire files or specific line ranges.\n**Syntax:** \\`read <path>[:start-end]\\`\n- \\`<path>\\`: File path to read\n- \\`[:start-end]\\`: Optional 1-based, inclusive line range\n\nFor example:\n\\`\\`\\`\nread src/main.py\nread src/database/connection.py:10-50\n\\`\\`\\`\n\n### 3. \\`grep\\` - Search with Regex\nSearch for regex patterns across files using ripgrep.\n**Syntax:** \\`grep '<pattern>' <path>\\`\n- \\`'<pattern>'\\`: Regex pattern (always wrap in single quotes)\n- \\`<path>\\`: Directory or file to search (use \\`.\\` for the repo root)\n\nFor example:\n\\`\\`\\`\ngrep 'create_user' .\ngrep 'import.*requests' src/api\ngrep 'class\\\\\\\\s+AuthService' controllers/auth.py\n\\`\\`\\`\n\n### 4. \\`finish\\` - Submit Final Answer\nSubmit your findings when complete.\n**Syntax:** \\`finish <file1:range1,range2...> [file2:range3...]\\`\n- Provide file paths with colon-separated, comma-separated line ranges\n\nFor example:\n\\`\\`\\`\nfinish src/api/auth.py:25-50,75-80 src/models/user.py:10-15\n\\`\\`\\`\n</tool_calling>\n\n<strategy>\n- Use the \\`analyse\\`, \\`grep\\`, and \\`read\\` tools to gather information about the codebase.\n- Leverage the tools smartly to make full use of their potential\n- Make parallel tool calls within each round to investigate multiple paths or files efficiently\n- Be systematic and thorough within your 3-round limit\n</strategy>\n\n<output_format>\n- Only output tool calls themselves\n- Do not include explanatory text, reasoning, or commentary\n- Each tool call should be on its own line\n- After 3 rounds of exploration, call \\`finish\\` with all relevant code snippets you found\n</output_format>\n\nBegin your exploration now to find code relevant to the query.`;\n\nexport function getSystemPrompt(): string {\n\treturn SYSTEM_PROMPT;\n}\n\n\n"],"mappings":";AAAO,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA4EtB,SAAS,kBAA0B;AACzC,SAAO;AACR;","names":[]}
|
|
@@ -1,6 +1,12 @@
|
|
|
1
|
+
import {
|
|
2
|
+
BrowserClient
|
|
3
|
+
} from "./chunk-GPFHYUKV.js";
|
|
4
|
+
import {
|
|
5
|
+
FastApplyClient
|
|
6
|
+
} from "./chunk-Q7USYY6R.js";
|
|
1
7
|
import {
|
|
2
8
|
CodebaseSearchClient
|
|
3
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-AG3ICTC5.js";
|
|
4
10
|
import {
|
|
5
11
|
AnthropicRouter,
|
|
6
12
|
GeminiRouter,
|
|
@@ -9,12 +15,6 @@ import {
|
|
|
9
15
|
import {
|
|
10
16
|
MorphGit
|
|
11
17
|
} from "./chunk-DF2ZOO7R.js";
|
|
12
|
-
import {
|
|
13
|
-
FastApplyClient
|
|
14
|
-
} from "./chunk-Q7USYY6R.js";
|
|
15
|
-
import {
|
|
16
|
-
BrowserClient
|
|
17
|
-
} from "./chunk-GPFHYUKV.js";
|
|
18
18
|
|
|
19
19
|
// client.ts
|
|
20
20
|
var MorphClient = class {
|
|
@@ -94,4 +94,4 @@ var MorphClient = class {
|
|
|
94
94
|
export {
|
|
95
95
|
MorphClient
|
|
96
96
|
};
|
|
97
|
-
//# sourceMappingURL=chunk-
|
|
97
|
+
//# sourceMappingURL=chunk-HWJN5AMA.js.map
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import {
|
|
2
|
+
runWarpGrep
|
|
3
|
+
} from "./chunk-EAA7D24N.js";
|
|
4
|
+
import {
|
|
5
|
+
LocalRipgrepProvider
|
|
6
|
+
} from "./chunk-UYBIKZPM.js";
|
|
7
|
+
|
|
8
|
+
// tools/warp_grep/openai.ts
|
|
9
|
+
import { z } from "zod";
|
|
10
|
+
var INPUT_SCHEMA = z.object({
|
|
11
|
+
query: z.string().describe("Free-form repository question")
|
|
12
|
+
});
|
|
13
|
+
function createMorphWarpGrepTool(config) {
|
|
14
|
+
const tool = {
|
|
15
|
+
type: "function",
|
|
16
|
+
function: {
|
|
17
|
+
name: "morph-warp-grep",
|
|
18
|
+
description: config.description ?? "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.",
|
|
19
|
+
parameters: {
|
|
20
|
+
type: "object",
|
|
21
|
+
properties: {
|
|
22
|
+
query: { type: "string", description: "Free-form repository question" }
|
|
23
|
+
},
|
|
24
|
+
required: ["query"]
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
return Object.assign(tool, {
|
|
29
|
+
execute: async (input) => {
|
|
30
|
+
const parsed = INPUT_SCHEMA.parse(typeof input === "string" ? JSON.parse(input) : input);
|
|
31
|
+
const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
|
|
32
|
+
const result = await runWarpGrep({
|
|
33
|
+
query: parsed.query,
|
|
34
|
+
repoRoot: config.repoRoot,
|
|
35
|
+
provider,
|
|
36
|
+
excludes: config.excludes,
|
|
37
|
+
includes: config.includes,
|
|
38
|
+
debug: config.debug ?? false,
|
|
39
|
+
apiKey: config.apiKey
|
|
40
|
+
});
|
|
41
|
+
if (result.terminationReason !== "completed" || !result.finish?.metadata) {
|
|
42
|
+
return { success: false, error: "Search did not complete", messages: result.messages };
|
|
43
|
+
}
|
|
44
|
+
const contexts = (result.finish.resolved || []).map((r) => ({
|
|
45
|
+
file: r.path,
|
|
46
|
+
content: r.content
|
|
47
|
+
}));
|
|
48
|
+
return { success: true, contexts, summary: result.finish.payload };
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export {
|
|
54
|
+
createMorphWarpGrepTool
|
|
55
|
+
};
|
|
56
|
+
//# sourceMappingURL=chunk-LN4CTQZG.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/openai.ts"],"sourcesContent":["import type { ChatCompletionTool } from 'openai/resources/chat/completions';\nimport { z } from 'zod';\nimport { runWarpGrep } from './agent/runner.js';\nimport type { WarpGrepProvider } from './providers/types.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { AGENT_CONFIG } from './agent/config.js';\n\nexport type WarpGrepConfig = {\n repoRoot: string;\n provider?: WarpGrepProvider;\n excludes?: string[];\n includes?: string[];\n debug?: boolean;\n apiKey?: string; // Morph API key (defaults to env)\n description?: string;\n};\n\nconst INPUT_SCHEMA = z.object({\n query: z.string().describe('Free-form repository question'),\n});\n\nexport function createMorphWarpGrepTool(config: WarpGrepConfig) {\n const tool: ChatCompletionTool = {\n type: 'function',\n function: {\n name: 'morph-warp-grep',\n description: config.description ?? 'A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.',\n parameters: {\n type: 'object',\n properties: {\n query: { type: 'string', description: 'Free-form repository question' },\n },\n required: ['query'],\n },\n },\n };\n\n return Object.assign(tool, {\n execute: async (input: unknown) => {\n const parsed = INPUT_SCHEMA.parse(typeof input === 'string' ? JSON.parse(input) : input);\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n const result = await runWarpGrep({\n query: parsed.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n if (result.terminationReason !== 'completed' || !result.finish?.metadata) {\n return { success: false, error: 'Search did not complete', messages: result.messages };\n }\n const contexts = (result.finish.resolved || []).map((r: any) => ({\n file: r.path,\n content: r.content,\n }));\n return { success: true, contexts, summary: result.finish.payload };\n },\n });\n}\n\n\n"],"mappings":";;;;;;;;AACA,SAAS,SAAS;AAgBlB,IAAM,eAAe,EAAE,OAAO;AAAA,EAC5B,OAAO,EAAE,OAAO,EAAE,SAAS,+BAA+B;AAC5D,CAAC;AAEM,SAAS,wBAAwB,QAAwB;AAC9D,QAAM,OAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM;AAAA,MACN,aAAa,OAAO,eAAe;AAAA,MACnC,YAAY;AAAA,QACV,MAAM;AAAA,QACN,YAAY;AAAA,UACV,OAAO,EAAE,MAAM,UAAU,aAAa,gCAAgC;AAAA,QACxE;AAAA,QACA,UAAU,CAAC,OAAO;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,OAAO,MAAM;AAAA,IACzB,SAAS,OAAO,UAAmB;AACjC,YAAM,SAAS,aAAa,MAAM,OAAO,UAAU,WAAW,KAAK,MAAM,KAAK,IAAI,KAAK;AACvF,YAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAC7F,YAAM,SAAS,MAAM,YAAY;AAAA,QAC/B,OAAO,OAAO;AAAA,QACd,UAAU,OAAO;AAAA,QACjB;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO,SAAS;AAAA,QACvB,QAAQ,OAAO;AAAA,MACjB,CAAC;AACD,UAAI,OAAO,sBAAsB,eAAe,CAAC,OAAO,QAAQ,UAAU;AACxE,eAAO,EAAE,SAAS,OAAO,OAAO,2BAA2B,UAAU,OAAO,SAAS;AAAA,MACvF;AACA,YAAM,YAAY,OAAO,OAAO,YAAY,CAAC,GAAG,IAAI,CAAC,OAAY;AAAA,QAC/D,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AACF,aAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,OAAO,QAAQ;AAAA,IACnE;AAAA,EACF,CAAC;AACH;","names":[]}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
// tools/warp_grep/agent/grep_helpers.ts
|
|
2
|
+
var GrepState = class {
|
|
3
|
+
seenLines = /* @__PURE__ */ new Set();
|
|
4
|
+
isNew(path, lineNumber) {
|
|
5
|
+
const key = this.makeKey(path, lineNumber);
|
|
6
|
+
return !this.seenLines.has(key);
|
|
7
|
+
}
|
|
8
|
+
add(path, lineNumber) {
|
|
9
|
+
this.seenLines.add(this.makeKey(path, lineNumber));
|
|
10
|
+
}
|
|
11
|
+
makeKey(path, lineNumber) {
|
|
12
|
+
return `${path}:${lineNumber}`;
|
|
13
|
+
}
|
|
14
|
+
};
|
|
15
|
+
var MAX_GREP_OUTPUT_CHARS_PER_TURN = 6e4;
|
|
16
|
+
function extractMatchFields(payload) {
|
|
17
|
+
const text = payload.replace(/\r?\n$/, "");
|
|
18
|
+
if (!text || text.startsWith("[error]")) {
|
|
19
|
+
return null;
|
|
20
|
+
}
|
|
21
|
+
const firstSep = text.indexOf(":");
|
|
22
|
+
if (firstSep === -1) {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
let filePath = text.slice(0, firstSep).trim();
|
|
26
|
+
if (!filePath) {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
if (filePath.startsWith("./") || filePath.startsWith(".\\")) {
|
|
30
|
+
filePath = filePath.slice(2);
|
|
31
|
+
}
|
|
32
|
+
const remainder = text.slice(firstSep + 1);
|
|
33
|
+
const secondSep = remainder.indexOf(":");
|
|
34
|
+
if (secondSep === -1) {
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
const linePart = remainder.slice(0, secondSep);
|
|
38
|
+
const lineNumber = Number.parseInt(linePart, 10);
|
|
39
|
+
if (!Number.isInteger(lineNumber) || lineNumber <= 0) {
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
let contentSegment = remainder.slice(secondSep + 1);
|
|
43
|
+
const columnSep = contentSegment.indexOf(":");
|
|
44
|
+
if (columnSep !== -1 && /^\d+$/.test(contentSegment.slice(0, columnSep))) {
|
|
45
|
+
contentSegment = contentSegment.slice(columnSep + 1);
|
|
46
|
+
}
|
|
47
|
+
const content = contentSegment.trim();
|
|
48
|
+
if (!content) {
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
return { path: filePath, lineNumber, content };
|
|
52
|
+
}
|
|
53
|
+
function parseAndFilterGrepOutput(rawOutput, state) {
|
|
54
|
+
const matches = [];
|
|
55
|
+
if (typeof rawOutput !== "string" || !rawOutput.trim()) {
|
|
56
|
+
return matches;
|
|
57
|
+
}
|
|
58
|
+
for (const line of rawOutput.split(/\r?\n/)) {
|
|
59
|
+
const fields = extractMatchFields(line);
|
|
60
|
+
if (!fields) {
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
if (state.isNew(fields.path, fields.lineNumber)) {
|
|
64
|
+
matches.push(fields);
|
|
65
|
+
state.add(fields.path, fields.lineNumber);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return matches;
|
|
69
|
+
}
|
|
70
|
+
function truncateOutput(payload, maxChars) {
|
|
71
|
+
if (payload.length <= maxChars) {
|
|
72
|
+
return payload;
|
|
73
|
+
}
|
|
74
|
+
const note = "... (output truncated)";
|
|
75
|
+
const available = maxChars - note.length - 1;
|
|
76
|
+
if (available <= 0) {
|
|
77
|
+
return note;
|
|
78
|
+
}
|
|
79
|
+
if (payload.length <= available) {
|
|
80
|
+
return `${payload.slice(0, available).replace(/\n$/, "")}
|
|
81
|
+
${note}`;
|
|
82
|
+
}
|
|
83
|
+
const core = payload.slice(0, Math.max(0, available - 1));
|
|
84
|
+
const trimmed = core.replace(/\n$/, "").replace(/\s+$/, "");
|
|
85
|
+
const snippet = trimmed ? `${trimmed}\u2026` : "\u2026";
|
|
86
|
+
return `${snippet}
|
|
87
|
+
${note}`;
|
|
88
|
+
}
|
|
89
|
+
function formatTurnGrepOutput(matches, maxChars = MAX_GREP_OUTPUT_CHARS_PER_TURN) {
|
|
90
|
+
if (!matches || matches.length === 0) {
|
|
91
|
+
return "No new matches found.";
|
|
92
|
+
}
|
|
93
|
+
const matchesByFile = /* @__PURE__ */ new Map();
|
|
94
|
+
for (const match of matches) {
|
|
95
|
+
if (!matchesByFile.has(match.path)) {
|
|
96
|
+
matchesByFile.set(match.path, []);
|
|
97
|
+
}
|
|
98
|
+
matchesByFile.get(match.path).push(match);
|
|
99
|
+
}
|
|
100
|
+
const lines = [];
|
|
101
|
+
const sortedPaths = Array.from(matchesByFile.keys()).sort();
|
|
102
|
+
sortedPaths.forEach((filePath, index) => {
|
|
103
|
+
if (index > 0) {
|
|
104
|
+
lines.push("");
|
|
105
|
+
}
|
|
106
|
+
lines.push(filePath);
|
|
107
|
+
const sortedMatches = matchesByFile.get(filePath).slice().sort((a, b) => a.lineNumber - b.lineNumber);
|
|
108
|
+
for (const match of sortedMatches) {
|
|
109
|
+
lines.push(`${match.lineNumber}:${match.content}`);
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
return truncateOutput(lines.join("\n"), maxChars);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
export {
|
|
116
|
+
GrepState,
|
|
117
|
+
MAX_GREP_OUTPUT_CHARS_PER_TURN,
|
|
118
|
+
parseAndFilterGrepOutput,
|
|
119
|
+
formatTurnGrepOutput
|
|
120
|
+
};
|
|
121
|
+
//# sourceMappingURL=chunk-NDZO5IPV.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/agent/grep_helpers.ts"],"sourcesContent":["export interface GrepMatch {\n\tpath: string;\n\tlineNumber: number;\n\tcontent: string;\n}\n\nexport class GrepState {\n\tprivate readonly seenLines = new Set<string>();\n\n\tisNew(path: string, lineNumber: number): boolean {\n\t\tconst key = this.makeKey(path, lineNumber);\n\t\treturn !this.seenLines.has(key);\n\t}\n\n\tadd(path: string, lineNumber: number): void {\n\t\tthis.seenLines.add(this.makeKey(path, lineNumber));\n\t}\n\n\tprivate makeKey(path: string, lineNumber: number): string {\n\t\treturn `${path}:${lineNumber}`;\n\t}\n}\n\nexport const MAX_GREP_OUTPUT_CHARS_PER_TURN = 60_000;\n\nfunction extractMatchFields(payload: string): GrepMatch | null {\n\tconst text = payload.replace(/\\r?\\n$/, \"\");\n\tif (!text || text.startsWith(\"[error]\")) {\n\t\treturn null;\n\t}\n\n\tconst firstSep = text.indexOf(\":\");\n\tif (firstSep === -1) {\n\t\treturn null;\n\t}\n\tlet filePath = text.slice(0, firstSep).trim();\n\tif (!filePath) {\n\t\treturn null;\n\t}\n\tif (filePath.startsWith(\"./\") || filePath.startsWith(\".\\\\\")) {\n\t\tfilePath = filePath.slice(2);\n\t}\n\n\tconst remainder = text.slice(firstSep + 1);\n\tconst secondSep = remainder.indexOf(\":\");\n\tif (secondSep === -1) {\n\t\treturn null;\n\t}\n\n\tconst linePart = remainder.slice(0, secondSep);\n\tconst lineNumber = Number.parseInt(linePart, 10);\n\tif (!Number.isInteger(lineNumber) || lineNumber <= 0) {\n\t\treturn null;\n\t}\n\n\tlet contentSegment = remainder.slice(secondSep + 1);\n\tconst columnSep = contentSegment.indexOf(\":\");\n\tif (columnSep !== -1 && /^\\d+$/.test(contentSegment.slice(0, columnSep))) {\n\t\tcontentSegment = contentSegment.slice(columnSep + 1);\n\t}\n\n\tconst content = contentSegment.trim();\n\tif (!content) {\n\t\treturn null;\n\t}\n\n\treturn { path: filePath, lineNumber, content };\n}\n\nexport function parseAndFilterGrepOutput(rawOutput: string, state: GrepState): GrepMatch[] {\n\tconst matches: GrepMatch[] = [];\n\tif (typeof rawOutput !== \"string\" || !rawOutput.trim()) {\n\t\treturn matches;\n\t}\n\n\tfor (const line of rawOutput.split(/\\r?\\n/)) {\n\t\tconst fields = extractMatchFields(line);\n\t\tif (!fields) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (state.isNew(fields.path, fields.lineNumber)) {\n\t\t\tmatches.push(fields);\n\t\t\tstate.add(fields.path, fields.lineNumber);\n\t\t}\n\t}\n\n\treturn matches;\n}\n\nfunction truncateOutput(payload: string, maxChars: number): string {\n\tif (payload.length <= maxChars) {\n\t\treturn payload;\n\t}\n\n\tconst note = \"... (output truncated)\";\n\tconst available = maxChars - note.length - 1;\n\tif (available <= 0) {\n\t\treturn note;\n\t}\n\n\tif (payload.length <= available) {\n\t\treturn `${payload.slice(0, available).replace(/\\n$/, \"\")}\\n${note}`;\n\t}\n\n\tconst core = payload.slice(0, Math.max(0, available - 1));\n\tconst trimmed = core.replace(/\\n$/, \"\").replace(/\\s+$/, \"\");\n\tconst snippet = trimmed ? `${trimmed}…` : \"…\";\n\treturn `${snippet}\\n${note}`;\n}\n\nexport function formatTurnGrepOutput(\n\tmatches: GrepMatch[],\n\tmaxChars: number = MAX_GREP_OUTPUT_CHARS_PER_TURN\n): string {\n\tif (!matches || matches.length === 0) {\n\t\treturn \"No new matches found.\";\n\t}\n\n\tconst matchesByFile = new Map<string, GrepMatch[]>();\n\tfor (const match of matches) {\n\t\tif (!matchesByFile.has(match.path)) {\n\t\t\tmatchesByFile.set(match.path, []);\n\t\t}\n\t\tmatchesByFile.get(match.path)!.push(match);\n\t}\n\n\tconst lines: string[] = [];\n\tconst sortedPaths = Array.from(matchesByFile.keys()).sort();\n\tsortedPaths.forEach((filePath, index) => {\n\t\tif (index > 0) {\n\t\t\tlines.push(\"\");\n\t\t}\n\t\tlines.push(filePath);\n\t\tconst sortedMatches = matchesByFile\n\t\t\t.get(filePath)!\n\t\t\t.slice()\n\t\t\t.sort((a, b) => a.lineNumber - b.lineNumber);\n\t\tfor (const match of sortedMatches) {\n\t\t\tlines.push(`${match.lineNumber}:${match.content}`);\n\t\t}\n\t});\n\n\treturn truncateOutput(lines.join(\"\\n\"), maxChars);\n}\n\n\n"],"mappings":";AAMO,IAAM,YAAN,MAAgB;AAAA,EACL,YAAY,oBAAI,IAAY;AAAA,EAE7C,MAAM,MAAc,YAA6B;AAChD,UAAM,MAAM,KAAK,QAAQ,MAAM,UAAU;AACzC,WAAO,CAAC,KAAK,UAAU,IAAI,GAAG;AAAA,EAC/B;AAAA,EAEA,IAAI,MAAc,YAA0B;AAC3C,SAAK,UAAU,IAAI,KAAK,QAAQ,MAAM,UAAU,CAAC;AAAA,EAClD;AAAA,EAEQ,QAAQ,MAAc,YAA4B;AACzD,WAAO,GAAG,IAAI,IAAI,UAAU;AAAA,EAC7B;AACD;AAEO,IAAM,iCAAiC;AAE9C,SAAS,mBAAmB,SAAmC;AAC9D,QAAM,OAAO,QAAQ,QAAQ,UAAU,EAAE;AACzC,MAAI,CAAC,QAAQ,KAAK,WAAW,SAAS,GAAG;AACxC,WAAO;AAAA,EACR;AAEA,QAAM,WAAW,KAAK,QAAQ,GAAG;AACjC,MAAI,aAAa,IAAI;AACpB,WAAO;AAAA,EACR;AACA,MAAI,WAAW,KAAK,MAAM,GAAG,QAAQ,EAAE,KAAK;AAC5C,MAAI,CAAC,UAAU;AACd,WAAO;AAAA,EACR;AACA,MAAI,SAAS,WAAW,IAAI,KAAK,SAAS,WAAW,KAAK,GAAG;AAC5D,eAAW,SAAS,MAAM,CAAC;AAAA,EAC5B;AAEA,QAAM,YAAY,KAAK,MAAM,WAAW,CAAC;AACzC,QAAM,YAAY,UAAU,QAAQ,GAAG;AACvC,MAAI,cAAc,IAAI;AACrB,WAAO;AAAA,EACR;AAEA,QAAM,WAAW,UAAU,MAAM,GAAG,SAAS;AAC7C,QAAM,aAAa,OAAO,SAAS,UAAU,EAAE;AAC/C,MAAI,CAAC,OAAO,UAAU,UAAU,KAAK,cAAc,GAAG;AACrD,WAAO;AAAA,EACR;AAEA,MAAI,iBAAiB,UAAU,MAAM,YAAY,CAAC;AAClD,QAAM,YAAY,eAAe,QAAQ,GAAG;AAC5C,MAAI,cAAc,MAAM,QAAQ,KAAK,eAAe,MAAM,GAAG,SAAS,CAAC,GAAG;AACzE,qBAAiB,eAAe,MAAM,YAAY,CAAC;AAAA,EACpD;AAEA,QAAM,UAAU,eAAe,KAAK;AACpC,MAAI,CAAC,SAAS;AACb,WAAO;AAAA,EACR;AAEA,SAAO,EAAE,MAAM,UAAU,YAAY,QAAQ;AAC9C;AAEO,SAAS,yBAAyB,WAAmB,OAA+B;AAC1F,QAAM,UAAuB,CAAC;AAC9B,MAAI,OAAO,cAAc,YAAY,CAAC,UAAU,KAAK,GAAG;AACvD,WAAO;AAAA,EACR;AAEA,aAAW,QAAQ,UAAU,MAAM,OAAO,GAAG;AAC5C,UAAM,SAAS,mBAAmB,IAAI;AACtC,QAAI,CAAC,QAAQ;AACZ;AAAA,IACD;AACA,QAAI,MAAM,MAAM,OAAO,MAAM,OAAO,UAAU,GAAG;AAChD,cAAQ,KAAK,MAAM;AACnB,YAAM,IAAI,OAAO,MAAM,OAAO,UAAU;AAAA,IACzC;AAAA,EACD;AAEA,SAAO;AACR;AAEA,SAAS,eAAe,SAAiB,UAA0B;AAClE,MAAI,QAAQ,UAAU,UAAU;AAC/B,WAAO;AAAA,EACR;AAEA,QAAM,OAAO;AACb,QAAM,YAAY,WAAW,KAAK,SAAS;AAC3C,MAAI,aAAa,GAAG;AACnB,WAAO;AAAA,EACR;AAEA,MAAI,QAAQ,UAAU,WAAW;AAChC,WAAO,GAAG,QAAQ,MAAM,GAAG,SAAS,EAAE,QAAQ,OAAO,EAAE,CAAC;AAAA,EAAK,IAAI;AAAA,EAClE;AAEA,QAAM,OAAO,QAAQ,MAAM,GAAG,KAAK,IAAI,GAAG,YAAY,CAAC,CAAC;AACxD,QAAM,UAAU,KAAK,QAAQ,OAAO,EAAE,EAAE,QAAQ,QAAQ,EAAE;AAC1D,QAAM,UAAU,UAAU,GAAG,OAAO,WAAM;AAC1C,SAAO,GAAG,OAAO;AAAA,EAAK,IAAI;AAC3B;AAEO,SAAS,qBACf,SACA,WAAmB,gCACV;AACT,MAAI,CAAC,WAAW,QAAQ,WAAW,GAAG;AACrC,WAAO;AAAA,EACR;AAEA,QAAM,gBAAgB,oBAAI,IAAyB;AACnD,aAAW,SAAS,SAAS;AAC5B,QAAI,CAAC,cAAc,IAAI,MAAM,IAAI,GAAG;AACnC,oBAAc,IAAI,MAAM,MAAM,CAAC,CAAC;AAAA,IACjC;AACA,kBAAc,IAAI,MAAM,IAAI,EAAG,KAAK,KAAK;AAAA,EAC1C;AAEA,QAAM,QAAkB,CAAC;AACzB,QAAM,cAAc,MAAM,KAAK,cAAc,KAAK,CAAC,EAAE,KAAK;AAC1D,cAAY,QAAQ,CAAC,UAAU,UAAU;AACxC,QAAI,QAAQ,GAAG;AACd,YAAM,KAAK,EAAE;AAAA,IACd;AACA,UAAM,KAAK,QAAQ;AACnB,UAAM,gBAAgB,cACpB,IAAI,QAAQ,EACZ,MAAM,EACN,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAC5C,eAAW,SAAS,eAAe;AAClC,YAAM,KAAK,GAAG,MAAM,UAAU,IAAI,MAAM,OAAO,EAAE;AAAA,IAClD;AAAA,EACD,CAAC;AAED,SAAO,eAAe,MAAM,KAAK,IAAI,GAAG,QAAQ;AACjD;","names":[]}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
// tools/warp_grep/utils/ripgrep.ts
|
|
2
|
+
import { spawn } from "child_process";
|
|
3
|
+
function runRipgrep(args, opts) {
|
|
4
|
+
return new Promise((resolve) => {
|
|
5
|
+
const child = spawn("rg", args, {
|
|
6
|
+
cwd: opts?.cwd,
|
|
7
|
+
env: { ...process.env, ...opts?.env || {} },
|
|
8
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
9
|
+
});
|
|
10
|
+
let stdout = "";
|
|
11
|
+
let stderr = "";
|
|
12
|
+
child.stdout.on("data", (d) => stdout += d.toString());
|
|
13
|
+
child.stderr.on("data", (d) => stderr += d.toString());
|
|
14
|
+
child.on("close", (code) => {
|
|
15
|
+
resolve({ stdout, stderr, exitCode: typeof code === "number" ? code : -1 });
|
|
16
|
+
});
|
|
17
|
+
child.on("error", () => {
|
|
18
|
+
resolve({ stdout: "", stderr: "Failed to spawn ripgrep (rg). Ensure it is installed.", exitCode: -1 });
|
|
19
|
+
});
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export {
|
|
24
|
+
runRipgrep
|
|
25
|
+
};
|
|
26
|
+
//# sourceMappingURL=chunk-RSLIOCOE.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/utils/ripgrep.ts"],"sourcesContent":["import { spawn } from 'child_process';\n\nexport type ExecResult = { stdout: string; stderr: string; exitCode: number };\n\nexport function runRipgrep(args: string[], opts?: { cwd?: string; env?: NodeJS.ProcessEnv }): Promise<ExecResult> {\n return new Promise((resolve) => {\n const child = spawn('rg', args, {\n cwd: opts?.cwd,\n env: { ...process.env, ...(opts?.env || {}) },\n stdio: ['ignore', 'pipe', 'pipe'],\n });\n let stdout = '';\n let stderr = '';\n child.stdout.on('data', (d) => (stdout += d.toString()));\n child.stderr.on('data', (d) => (stderr += d.toString()));\n child.on('close', (code) => {\n resolve({ stdout, stderr, exitCode: typeof code === 'number' ? code : -1 });\n });\n child.on('error', () => {\n resolve({ stdout: '', stderr: 'Failed to spawn ripgrep (rg). Ensure it is installed.', exitCode: -1 });\n });\n });\n}\n\n\n"],"mappings":";AAAA,SAAS,aAAa;AAIf,SAAS,WAAW,MAAgB,MAAuE;AAChH,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,QAAQ,MAAM,MAAM,MAAM;AAAA,MAC9B,KAAK,MAAM;AAAA,MACX,KAAK,EAAE,GAAG,QAAQ,KAAK,GAAI,MAAM,OAAO,CAAC,EAAG;AAAA,MAC5C,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,IAClC,CAAC;AACD,QAAI,SAAS;AACb,QAAI,SAAS;AACb,UAAM,OAAO,GAAG,QAAQ,CAAC,MAAO,UAAU,EAAE,SAAS,CAAE;AACvD,UAAM,OAAO,GAAG,QAAQ,CAAC,MAAO,UAAU,EAAE,SAAS,CAAE;AACvD,UAAM,GAAG,SAAS,CAAC,SAAS;AAC1B,cAAQ,EAAE,QAAQ,QAAQ,UAAU,OAAO,SAAS,WAAW,OAAO,GAAG,CAAC;AAAA,IAC5E,CAAC;AACD,UAAM,GAAG,SAAS,MAAM;AACtB,cAAQ,EAAE,QAAQ,IAAI,QAAQ,yDAAyD,UAAU,GAAG,CAAC;AAAA,IACvG,CAAC;AAAA,EACH,CAAC;AACH;","names":[]}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import {
|
|
2
|
+
runWarpGrep
|
|
3
|
+
} from "./chunk-EAA7D24N.js";
|
|
4
|
+
import {
|
|
5
|
+
LocalRipgrepProvider
|
|
6
|
+
} from "./chunk-UYBIKZPM.js";
|
|
7
|
+
|
|
8
|
+
// tools/warp_grep/vercel.ts
|
|
9
|
+
import { tool } from "ai";
|
|
10
|
+
import { z } from "zod";
|
|
11
|
+
function createMorphWarpGrepTool(config) {
|
|
12
|
+
const schema = z.object({
|
|
13
|
+
query: z.string().describe("Free-form repository question")
|
|
14
|
+
});
|
|
15
|
+
return tool({
|
|
16
|
+
description: config.description ?? "A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.",
|
|
17
|
+
inputSchema: schema,
|
|
18
|
+
execute: async (params) => {
|
|
19
|
+
const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);
|
|
20
|
+
const result = await runWarpGrep({
|
|
21
|
+
query: params.query,
|
|
22
|
+
repoRoot: config.repoRoot,
|
|
23
|
+
provider,
|
|
24
|
+
excludes: config.excludes,
|
|
25
|
+
includes: config.includes,
|
|
26
|
+
debug: config.debug ?? false,
|
|
27
|
+
apiKey: config.apiKey
|
|
28
|
+
});
|
|
29
|
+
if (result.terminationReason !== "completed" || !result.finish?.metadata) {
|
|
30
|
+
return { success: false, error: "Search did not complete", messages: result.messages };
|
|
31
|
+
}
|
|
32
|
+
const contexts = (result.finish.resolved || []).map((r) => ({
|
|
33
|
+
file: r.path,
|
|
34
|
+
content: r.content
|
|
35
|
+
}));
|
|
36
|
+
return { success: true, contexts, summary: result.finish.payload };
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export {
|
|
42
|
+
createMorphWarpGrepTool
|
|
43
|
+
};
|
|
44
|
+
//# sourceMappingURL=chunk-S3GI7HNR.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/vercel.ts"],"sourcesContent":["import { tool } from 'ai';\nimport { z } from 'zod';\nimport { runWarpGrep } from './agent/runner.js';\nimport type { WarpGrepProvider } from './providers/types.js';\nimport { LocalRipgrepProvider } from './providers/local.js';\nimport { AGENT_CONFIG } from './agent/config.js';\n\nexport type WarpGrepConfig = {\n repoRoot: string;\n provider?: WarpGrepProvider;\n excludes?: string[];\n includes?: string[];\n debug?: boolean;\n apiKey?: string;\n description?: string;\n};\n\nexport function createMorphWarpGrepTool(config: WarpGrepConfig) {\n const schema = z.object({\n query: z.string().describe('Free-form repository question'),\n });\n\n return tool({\n description: config.description ?? 'A fast and accurate tool that can search for all relevant context in a codebase. You must use this tool to save time and avoid context pollution.',\n inputSchema: schema,\n execute: async (params) => {\n const provider = config.provider ?? new LocalRipgrepProvider(config.repoRoot, config.excludes);\n const result = await runWarpGrep({\n query: params.query,\n repoRoot: config.repoRoot,\n provider,\n excludes: config.excludes,\n includes: config.includes,\n debug: config.debug ?? false,\n apiKey: config.apiKey,\n });\n if (result.terminationReason !== 'completed' || !result.finish?.metadata) {\n return { success: false, error: 'Search did not complete', messages: result.messages };\n }\n const contexts = (result.finish.resolved || []).map((r: any) => ({\n file: r.path,\n content: r.content,\n }));\n return { success: true, contexts, summary: result.finish.payload };\n },\n });\n}\n\n\n"],"mappings":";;;;;;;;AAAA,SAAS,YAAY;AACrB,SAAS,SAAS;AAgBX,SAAS,wBAAwB,QAAwB;AAC9D,QAAM,SAAS,EAAE,OAAO;AAAA,IACtB,OAAO,EAAE,OAAO,EAAE,SAAS,+BAA+B;AAAA,EAC5D,CAAC;AAED,SAAO,KAAK;AAAA,IACV,aAAa,OAAO,eAAe;AAAA,IACnC,aAAa;AAAA,IACb,SAAS,OAAO,WAAW;AACzB,YAAM,WAAW,OAAO,YAAY,IAAI,qBAAqB,OAAO,UAAU,OAAO,QAAQ;AAC7F,YAAM,SAAS,MAAM,YAAY;AAAA,QAC/B,OAAO,OAAO;AAAA,QACd,UAAU,OAAO;AAAA,QACjB;AAAA,QACA,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO,SAAS;AAAA,QACvB,QAAQ,OAAO;AAAA,MACjB,CAAC;AACD,UAAI,OAAO,sBAAsB,eAAe,CAAC,OAAO,QAAQ,UAAU;AACxE,eAAO,EAAE,SAAS,OAAO,OAAO,2BAA2B,UAAU,OAAO,SAAS;AAAA,MACvF;AACA,YAAM,YAAY,OAAO,OAAO,YAAY,CAAC,GAAG,IAAI,CAAC,OAAY;AAAA,QAC/D,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,MACb,EAAE;AACF,aAAO,EAAE,SAAS,MAAM,UAAU,SAAS,OAAO,OAAO,QAAQ;AAAA,IACnE;AAAA,EACF,CAAC;AACH;","names":[]}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
// tools/warp_grep/utils/paths.ts
|
|
2
|
+
import fs from "fs";
|
|
3
|
+
import path from "path";
|
|
4
|
+
function resolveUnderRepo(repoRoot, targetPath) {
|
|
5
|
+
const absRoot = path.resolve(repoRoot);
|
|
6
|
+
const resolved = path.resolve(absRoot, targetPath);
|
|
7
|
+
ensureWithinRepo(absRoot, resolved);
|
|
8
|
+
return resolved;
|
|
9
|
+
}
|
|
10
|
+
function ensureWithinRepo(repoRoot, absTarget) {
|
|
11
|
+
const rel = path.relative(path.resolve(repoRoot), path.resolve(absTarget));
|
|
12
|
+
if (rel.startsWith("..") || path.isAbsolute(rel)) {
|
|
13
|
+
throw new Error(`Path outside repository root: ${absTarget}`);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
function toRepoRelative(repoRoot, absPath) {
|
|
17
|
+
return path.relative(path.resolve(repoRoot), path.resolve(absPath));
|
|
18
|
+
}
|
|
19
|
+
function isSymlink(p) {
|
|
20
|
+
try {
|
|
21
|
+
const st = fs.lstatSync(p);
|
|
22
|
+
return st.isSymbolicLink();
|
|
23
|
+
} catch {
|
|
24
|
+
return false;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
function isTextualFile(filePath, maxBytes = 2e6) {
|
|
28
|
+
try {
|
|
29
|
+
const st = fs.statSync(filePath);
|
|
30
|
+
if (!st.isFile()) return false;
|
|
31
|
+
if (st.size > maxBytes) return false;
|
|
32
|
+
const fd = fs.openSync(filePath, "r");
|
|
33
|
+
const buf = Buffer.alloc(512);
|
|
34
|
+
const read = fs.readSync(fd, buf, 0, buf.length, 0);
|
|
35
|
+
fs.closeSync(fd);
|
|
36
|
+
for (let i = 0; i < read; i++) {
|
|
37
|
+
const c = buf[i];
|
|
38
|
+
if (c === 0) return false;
|
|
39
|
+
}
|
|
40
|
+
return true;
|
|
41
|
+
} catch {
|
|
42
|
+
return false;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export {
|
|
47
|
+
resolveUnderRepo,
|
|
48
|
+
ensureWithinRepo,
|
|
49
|
+
toRepoRelative,
|
|
50
|
+
isSymlink,
|
|
51
|
+
isTextualFile
|
|
52
|
+
};
|
|
53
|
+
//# sourceMappingURL=chunk-SMGZ6A64.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/utils/paths.ts"],"sourcesContent":["import fs from 'fs';\nimport path from 'path';\n\nexport function resolveUnderRepo(repoRoot: string, targetPath: string): string {\n const absRoot = path.resolve(repoRoot);\n const resolved = path.resolve(absRoot, targetPath);\n ensureWithinRepo(absRoot, resolved);\n return resolved;\n}\n\nexport function ensureWithinRepo(repoRoot: string, absTarget: string): void {\n const rel = path.relative(path.resolve(repoRoot), path.resolve(absTarget));\n if (rel.startsWith('..') || path.isAbsolute(rel)) {\n throw new Error(`Path outside repository root: ${absTarget}`);\n }\n}\n\nexport function toRepoRelative(repoRoot: string, absPath: string): string {\n return path.relative(path.resolve(repoRoot), path.resolve(absPath));\n}\n\nexport function isSymlink(p: string): boolean {\n try {\n const st = fs.lstatSync(p);\n return st.isSymbolicLink();\n } catch {\n return false;\n }\n}\n\nexport function isTextualFile(filePath: string, maxBytes = 2_000_000): boolean {\n try {\n const st = fs.statSync(filePath);\n if (!st.isFile()) return false;\n if (st.size > maxBytes) return false;\n const fd = fs.openSync(filePath, 'r');\n const buf = Buffer.alloc(512);\n const read = fs.readSync(fd, buf, 0, buf.length, 0);\n fs.closeSync(fd);\n for (let i = 0; i < read; i++) {\n const c = buf[i];\n if (c === 0) return false; // binary heuristic\n }\n return true;\n } catch {\n return false;\n }\n}\n\n\n"],"mappings":";AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAEV,SAAS,iBAAiB,UAAkB,YAA4B;AAC7E,QAAM,UAAU,KAAK,QAAQ,QAAQ;AACrC,QAAM,WAAW,KAAK,QAAQ,SAAS,UAAU;AACjD,mBAAiB,SAAS,QAAQ;AAClC,SAAO;AACT;AAEO,SAAS,iBAAiB,UAAkB,WAAyB;AAC1E,QAAM,MAAM,KAAK,SAAS,KAAK,QAAQ,QAAQ,GAAG,KAAK,QAAQ,SAAS,CAAC;AACzE,MAAI,IAAI,WAAW,IAAI,KAAK,KAAK,WAAW,GAAG,GAAG;AAChD,UAAM,IAAI,MAAM,iCAAiC,SAAS,EAAE;AAAA,EAC9D;AACF;AAEO,SAAS,eAAe,UAAkB,SAAyB;AACxE,SAAO,KAAK,SAAS,KAAK,QAAQ,QAAQ,GAAG,KAAK,QAAQ,OAAO,CAAC;AACpE;AAEO,SAAS,UAAU,GAAoB;AAC5C,MAAI;AACF,UAAM,KAAK,GAAG,UAAU,CAAC;AACzB,WAAO,GAAG,eAAe;AAAA,EAC3B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,SAAS,cAAc,UAAkB,WAAW,KAAoB;AAC7E,MAAI;AACF,UAAM,KAAK,GAAG,SAAS,QAAQ;AAC/B,QAAI,CAAC,GAAG,OAAO,EAAG,QAAO;AACzB,QAAI,GAAG,OAAO,SAAU,QAAO;AAC/B,UAAM,KAAK,GAAG,SAAS,UAAU,GAAG;AACpC,UAAM,MAAM,OAAO,MAAM,GAAG;AAC5B,UAAM,OAAO,GAAG,SAAS,IAAI,KAAK,GAAG,IAAI,QAAQ,CAAC;AAClD,OAAG,UAAU,EAAE;AACf,aAAS,IAAI,GAAG,IAAI,MAAM,KAAK;AAC7B,YAAM,IAAI,IAAI,CAAC;AACf,UAAI,MAAM,EAAG,QAAO;AAAA,IACtB;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;","names":[]}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
// tools/warp_grep/agent/formatter.ts
|
|
2
|
+
var ToolOutputFormatter = class {
|
|
3
|
+
format(toolName, args, output, options = {}) {
|
|
4
|
+
const name = (toolName ?? "").trim();
|
|
5
|
+
if (!name) {
|
|
6
|
+
return "";
|
|
7
|
+
}
|
|
8
|
+
const payload = output?.toString?.()?.trim?.() ?? "";
|
|
9
|
+
const isError = Boolean(options.isError);
|
|
10
|
+
const safeArgs = args ?? {};
|
|
11
|
+
if (!payload && !isError) {
|
|
12
|
+
return "";
|
|
13
|
+
}
|
|
14
|
+
switch (name) {
|
|
15
|
+
case "read":
|
|
16
|
+
return this.formatRead(safeArgs, payload, isError);
|
|
17
|
+
case "analyse":
|
|
18
|
+
return this.formatAnalyse(safeArgs, payload, isError);
|
|
19
|
+
case "grep":
|
|
20
|
+
return this.formatGrep(safeArgs, payload, isError);
|
|
21
|
+
default:
|
|
22
|
+
return payload ? `<tool_output>
|
|
23
|
+
${payload}
|
|
24
|
+
</tool_output>` : "";
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
formatRead(args, payload, isError) {
|
|
28
|
+
if (isError) {
|
|
29
|
+
return payload;
|
|
30
|
+
}
|
|
31
|
+
const path = this.asString(args.path) || "...";
|
|
32
|
+
return `<file path="${path}">
|
|
33
|
+
${payload}
|
|
34
|
+
</file>`;
|
|
35
|
+
}
|
|
36
|
+
formatAnalyse(args, payload, isError) {
|
|
37
|
+
const path = this.asString(args.path) || ".";
|
|
38
|
+
if (isError) {
|
|
39
|
+
return `<analyse_results path="${path}" status="error">
|
|
40
|
+
${payload}
|
|
41
|
+
</analyse_results>`;
|
|
42
|
+
}
|
|
43
|
+
return `<analyse_results path="${path}">
|
|
44
|
+
${payload}
|
|
45
|
+
</analyse_results>`;
|
|
46
|
+
}
|
|
47
|
+
formatGrep(args, payload, isError) {
|
|
48
|
+
const pattern = this.asString(args.pattern);
|
|
49
|
+
const path = this.asString(args.path);
|
|
50
|
+
const attributes = [];
|
|
51
|
+
if (pattern !== void 0) {
|
|
52
|
+
attributes.push(`pattern="${pattern}"`);
|
|
53
|
+
}
|
|
54
|
+
if (path !== void 0) {
|
|
55
|
+
attributes.push(`path="${path}"`);
|
|
56
|
+
}
|
|
57
|
+
if (isError) {
|
|
58
|
+
attributes.push('status="error"');
|
|
59
|
+
}
|
|
60
|
+
const attrText = attributes.length ? ` ${attributes.join(" ")}` : "";
|
|
61
|
+
return `<grep_output${attrText}>
|
|
62
|
+
${payload}
|
|
63
|
+
</grep_output>`;
|
|
64
|
+
}
|
|
65
|
+
asString(value) {
|
|
66
|
+
if (value === null || value === void 0) {
|
|
67
|
+
return void 0;
|
|
68
|
+
}
|
|
69
|
+
return String(value);
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
var sharedFormatter = new ToolOutputFormatter();
|
|
73
|
+
function formatAgentToolOutput(toolName, args, output, options = {}) {
|
|
74
|
+
return sharedFormatter.format(toolName, args, output, options);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export {
|
|
78
|
+
ToolOutputFormatter,
|
|
79
|
+
formatAgentToolOutput
|
|
80
|
+
};
|
|
81
|
+
//# sourceMappingURL=chunk-TICMYDII.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/agent/formatter.ts"],"sourcesContent":["export class ToolOutputFormatter {\n\tformat(\n\t\ttoolName: string,\n\t\targs: Record<string, unknown> | null | undefined,\n\t\toutput: string,\n\t\toptions: { isError?: boolean } = {}\n\t): string {\n\t\tconst name = (toolName ?? \"\").trim();\n\t\tif (!name) {\n\t\t\treturn \"\";\n\t\t}\n\t\tconst payload = (output as any)?.toString?.()?.trim?.() ?? \"\";\n\t\tconst isError = Boolean(options.isError);\n\t\tconst safeArgs = args ?? {};\n\n\t\tif (!payload && !isError) {\n\t\t\treturn \"\";\n\t\t}\n\n\t\tswitch (name) {\n\t\t\tcase \"read\":\n\t\t\t\treturn this.formatRead(safeArgs, payload, isError);\n\t\t\tcase \"analyse\":\n\t\t\t\treturn this.formatAnalyse(safeArgs, payload, isError);\n\t\t\tcase \"grep\":\n\t\t\t\treturn this.formatGrep(safeArgs, payload, isError);\n\t\t\tdefault:\n\t\t\t\treturn payload ? `<tool_output>\\n${payload}\\n</tool_output>` : \"\";\n\t\t}\n\t}\n\n\tprivate formatRead(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tif (isError) {\n\t\t\treturn payload;\n\t\t}\n\t\tconst path = this.asString(args.path) || \"...\";\n\t\treturn `<file path=\"${path}\">\\n${payload}\\n</file>`;\n\t}\n\n\tprivate formatAnalyse(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst path = this.asString(args.path) || \".\";\n\t\tif (isError) {\n\t\t\treturn `<analyse_results path=\"${path}\" status=\"error\">\\n${payload}\\n</analyse_results>`;\n\t\t}\n\t\treturn `<analyse_results path=\"${path}\">\\n${payload}\\n</analyse_results>`;\n\t}\n\n\tprivate formatGrep(args: Record<string, unknown>, payload: string, isError: boolean): string {\n\t\tconst pattern = this.asString(args.pattern);\n\t\tconst path = this.asString(args.path);\n\t\tconst attributes: string[] = [];\n\t\tif (pattern !== undefined) {\n\t\t\tattributes.push(`pattern=\"${pattern}\"`);\n\t\t}\n\t\tif (path !== undefined) {\n\t\t\tattributes.push(`path=\"${path}\"`);\n\t\t}\n\t\tif (isError) {\n\t\t\tattributes.push('status=\"error\"');\n\t\t}\n\t\tconst attrText = attributes.length ? ` ${attributes.join(\" \")}` : \"\";\n\t\treturn `<grep_output${attrText}>\\n${payload}\\n</grep_output>`;\n\t}\n\n\tprivate asString(value: unknown): string | undefined {\n\t\tif (value === null || value === undefined) {\n\t\t\treturn undefined;\n\t\t}\n\t\treturn String(value);\n\t}\n}\n\nconst sharedFormatter = new ToolOutputFormatter();\n\nexport function formatAgentToolOutput(\n\ttoolName: string,\n\targs: Record<string, unknown> | null | undefined,\n\toutput: string,\n\toptions: { isError?: boolean } = {}\n): string {\n\treturn sharedFormatter.format(toolName, args, output, options);\n}\n\n"],"mappings":";AAAO,IAAM,sBAAN,MAA0B;AAAA,EAChC,OACC,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,UAAM,QAAQ,YAAY,IAAI,KAAK;AACnC,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AACA,UAAM,UAAW,QAAgB,WAAW,GAAG,OAAO,KAAK;AAC3D,UAAM,UAAU,QAAQ,QAAQ,OAAO;AACvC,UAAM,WAAW,QAAQ,CAAC;AAE1B,QAAI,CAAC,WAAW,CAAC,SAAS;AACzB,aAAO;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACb,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD,KAAK;AACJ,eAAO,KAAK,cAAc,UAAU,SAAS,OAAO;AAAA,MACrD,KAAK;AACJ,eAAO,KAAK,WAAW,UAAU,SAAS,OAAO;AAAA,MAClD;AACC,eAAO,UAAU;AAAA,EAAkB,OAAO;AAAA,kBAAqB;AAAA,IACjE;AAAA,EACD;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,QAAI,SAAS;AACZ,aAAO;AAAA,IACR;AACA,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,WAAO,eAAe,IAAI;AAAA,EAAO,OAAO;AAAA;AAAA,EACzC;AAAA,EAEQ,cAAc,MAA+B,SAAiB,SAA0B;AAC/F,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI,KAAK;AACzC,QAAI,SAAS;AACZ,aAAO,0BAA0B,IAAI;AAAA,EAAsB,OAAO;AAAA;AAAA,IACnE;AACA,WAAO,0BAA0B,IAAI;AAAA,EAAO,OAAO;AAAA;AAAA,EACpD;AAAA,EAEQ,WAAW,MAA+B,SAAiB,SAA0B;AAC5F,UAAM,UAAU,KAAK,SAAS,KAAK,OAAO;AAC1C,UAAM,OAAO,KAAK,SAAS,KAAK,IAAI;AACpC,UAAM,aAAuB,CAAC;AAC9B,QAAI,YAAY,QAAW;AAC1B,iBAAW,KAAK,YAAY,OAAO,GAAG;AAAA,IACvC;AACA,QAAI,SAAS,QAAW;AACvB,iBAAW,KAAK,SAAS,IAAI,GAAG;AAAA,IACjC;AACA,QAAI,SAAS;AACZ,iBAAW,KAAK,gBAAgB;AAAA,IACjC;AACA,UAAM,WAAW,WAAW,SAAS,IAAI,WAAW,KAAK,GAAG,CAAC,KAAK;AAClE,WAAO,eAAe,QAAQ;AAAA,EAAM,OAAO;AAAA;AAAA,EAC5C;AAAA,EAEQ,SAAS,OAAoC;AACpD,QAAI,UAAU,QAAQ,UAAU,QAAW;AAC1C,aAAO;AAAA,IACR;AACA,WAAO,OAAO,KAAK;AAAA,EACpB;AACD;AAEA,IAAM,kBAAkB,IAAI,oBAAoB;AAEzC,SAAS,sBACf,UACA,MACA,QACA,UAAiC,CAAC,GACzB;AACT,SAAO,gBAAgB,OAAO,UAAU,MAAM,QAAQ,OAAO;AAC9D;","names":[]}
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import {
|
|
2
|
+
readAllLines
|
|
3
|
+
} from "./chunk-G2RSY56Q.js";
|
|
4
|
+
import {
|
|
5
|
+
isSymlink,
|
|
6
|
+
isTextualFile,
|
|
7
|
+
resolveUnderRepo,
|
|
8
|
+
toRepoRelative
|
|
9
|
+
} from "./chunk-SMGZ6A64.js";
|
|
10
|
+
import {
|
|
11
|
+
runRipgrep
|
|
12
|
+
} from "./chunk-RSLIOCOE.js";
|
|
13
|
+
import {
|
|
14
|
+
DEFAULT_EXCLUDES
|
|
15
|
+
} from "./chunk-AFEPUNAO.js";
|
|
16
|
+
|
|
17
|
+
// tools/warp_grep/providers/local.ts
|
|
18
|
+
import fs from "fs/promises";
|
|
19
|
+
import path from "path";
|
|
20
|
+
var LocalRipgrepProvider = class {
|
|
21
|
+
constructor(repoRoot, excludes = DEFAULT_EXCLUDES) {
|
|
22
|
+
this.repoRoot = repoRoot;
|
|
23
|
+
this.excludes = excludes;
|
|
24
|
+
}
|
|
25
|
+
async grep(params) {
|
|
26
|
+
const abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
27
|
+
const stat = await fs.stat(abs).catch(() => null);
|
|
28
|
+
if (!stat) return { lines: [] };
|
|
29
|
+
const targetArg = abs === path.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
|
|
30
|
+
const args = [
|
|
31
|
+
"--no-config",
|
|
32
|
+
"--no-heading",
|
|
33
|
+
"--with-filename",
|
|
34
|
+
"--line-number",
|
|
35
|
+
"--color=never",
|
|
36
|
+
"--trim",
|
|
37
|
+
"--max-columns=400",
|
|
38
|
+
...this.excludes.flatMap((e) => ["-g", `!${e}`]),
|
|
39
|
+
params.pattern,
|
|
40
|
+
targetArg || "."
|
|
41
|
+
];
|
|
42
|
+
const res = await runRipgrep(args, { cwd: this.repoRoot });
|
|
43
|
+
if (res.exitCode === -1) {
|
|
44
|
+
throw new Error(res.stderr || "ripgrep (rg) execution failed.");
|
|
45
|
+
}
|
|
46
|
+
if (res.exitCode !== 0 && res.exitCode !== 1) {
|
|
47
|
+
throw new Error(res.stderr || `ripgrep failed with code ${res.exitCode}`);
|
|
48
|
+
}
|
|
49
|
+
const lines = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
|
|
50
|
+
return { lines };
|
|
51
|
+
}
|
|
52
|
+
async glob(params) {
|
|
53
|
+
const abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
54
|
+
const targetArg = abs === path.resolve(this.repoRoot) ? "." : toRepoRelative(this.repoRoot, abs);
|
|
55
|
+
const args = [
|
|
56
|
+
"--no-config",
|
|
57
|
+
"--files",
|
|
58
|
+
"-g",
|
|
59
|
+
params.pattern,
|
|
60
|
+
...this.excludes.flatMap((e) => ["-g", `!${e}`]),
|
|
61
|
+
targetArg || "."
|
|
62
|
+
];
|
|
63
|
+
const res = await runRipgrep(args, { cwd: this.repoRoot });
|
|
64
|
+
if (res.exitCode === -1) {
|
|
65
|
+
throw new Error(res.stderr || "ripgrep (rg) execution failed.");
|
|
66
|
+
}
|
|
67
|
+
const files = (res.stdout || "").trim().split(/\r?\n/).filter((l) => l.length > 0);
|
|
68
|
+
return { files };
|
|
69
|
+
}
|
|
70
|
+
async read(params) {
|
|
71
|
+
const abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
72
|
+
const stat = await fs.stat(abs).catch(() => null);
|
|
73
|
+
if (!stat || !stat.isFile()) {
|
|
74
|
+
throw new Error(`Path is not a file: ${params.path}`);
|
|
75
|
+
}
|
|
76
|
+
if (isSymlink(abs)) {
|
|
77
|
+
throw new Error(`Refusing to read symlink: ${params.path}`);
|
|
78
|
+
}
|
|
79
|
+
if (!isTextualFile(abs)) {
|
|
80
|
+
throw new Error(`Non-text or too-large file: ${params.path}`);
|
|
81
|
+
}
|
|
82
|
+
const lines = await readAllLines(abs);
|
|
83
|
+
const total = lines.length;
|
|
84
|
+
const s = params.start ?? 1;
|
|
85
|
+
const e = Math.min(params.end ?? total, total);
|
|
86
|
+
if (s > total && total > 0) {
|
|
87
|
+
throw new Error(`start ${s} exceeds file length (${total})`);
|
|
88
|
+
}
|
|
89
|
+
const out = [];
|
|
90
|
+
for (let i = s; i <= e; i += 1) {
|
|
91
|
+
const content = lines[i - 1] ?? "";
|
|
92
|
+
out.push(`${i}|${content}`);
|
|
93
|
+
}
|
|
94
|
+
return { lines: out };
|
|
95
|
+
}
|
|
96
|
+
async analyse(params) {
|
|
97
|
+
const abs = resolveUnderRepo(this.repoRoot, params.path);
|
|
98
|
+
const stat = await fs.stat(abs).catch(() => null);
|
|
99
|
+
if (!stat || !stat.isDirectory()) {
|
|
100
|
+
return [];
|
|
101
|
+
}
|
|
102
|
+
const maxResults = params.maxResults ?? 100;
|
|
103
|
+
const maxDepth = params.maxDepth ?? 2;
|
|
104
|
+
const regex = params.pattern ? new RegExp(params.pattern) : null;
|
|
105
|
+
const results = [];
|
|
106
|
+
async function walk(dir, depth) {
|
|
107
|
+
if (depth > maxDepth || results.length >= maxResults) return;
|
|
108
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
109
|
+
for (const entry of entries) {
|
|
110
|
+
const full = path.join(dir, entry.name);
|
|
111
|
+
const rel = toRepoRelative(abs, full).replace(/^[.][/\\]?/, "");
|
|
112
|
+
if (DEFAULT_EXCLUDES.some((ex) => rel.split(path.sep).includes(ex))) continue;
|
|
113
|
+
if (regex && !regex.test(entry.name)) continue;
|
|
114
|
+
if (results.length >= maxResults) break;
|
|
115
|
+
results.push({
|
|
116
|
+
name: entry.name,
|
|
117
|
+
path: toRepoRelative(path.resolve(""), full),
|
|
118
|
+
// relative display
|
|
119
|
+
type: entry.isDirectory() ? "dir" : "file",
|
|
120
|
+
depth
|
|
121
|
+
});
|
|
122
|
+
if (entry.isDirectory()) {
|
|
123
|
+
await walk(full, depth + 1);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
await walk(abs, 0);
|
|
128
|
+
return results;
|
|
129
|
+
}
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
export {
|
|
133
|
+
LocalRipgrepProvider
|
|
134
|
+
};
|
|
135
|
+
//# sourceMappingURL=chunk-UYBIKZPM.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../tools/warp_grep/providers/local.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport fssync from 'fs';\nimport path from 'path';\nimport { runRipgrep } from '../utils/ripgrep.js';\nimport { ensureWithinRepo, resolveUnderRepo, toRepoRelative, isSymlink, isTextualFile } from '../utils/paths.js';\nimport type { WarpGrepProvider, GrepResult, ReadResult, AnalyseEntry } from './types.js';\nimport { readAllLines } from '../utils/files.js';\nimport { DEFAULT_EXCLUDES } from '../agent/config.js';\n\nexport class LocalRipgrepProvider implements WarpGrepProvider {\n constructor(private readonly repoRoot: string, private readonly excludes: string[] = DEFAULT_EXCLUDES) {}\n\n async grep(params: { pattern: string; path: string }): Promise<GrepResult> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat) return { lines: [] };\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--no-heading',\n '--with-filename',\n '--line-number',\n '--color=never',\n '--trim',\n '--max-columns=400',\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n params.pattern,\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n if (res.exitCode === -1) {\n throw new Error(res.stderr || 'ripgrep (rg) execution failed.');\n }\n if (res.exitCode !== 0 && res.exitCode !== 1) {\n throw new Error(res.stderr || `ripgrep failed with code ${res.exitCode}`);\n }\n const lines = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n return { lines };\n }\n\n async glob(params: { pattern: string; path: string }): Promise<{ files: string[] }> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const targetArg = abs === path.resolve(this.repoRoot) ? '.' : toRepoRelative(this.repoRoot, abs);\n const args = [\n '--no-config',\n '--files',\n '-g',\n params.pattern,\n ...this.excludes.flatMap((e) => ['-g', `!${e}`]),\n targetArg || '.',\n ];\n const res = await runRipgrep(args, { cwd: this.repoRoot });\n if (res.exitCode === -1) {\n throw new Error(res.stderr || 'ripgrep (rg) execution failed.');\n }\n const files = (res.stdout || '')\n .trim()\n .split(/\\r?\\n/)\n .filter((l) => l.length > 0);\n return { files };\n }\n\n async read(params: { path: string; start?: number; end?: number }): Promise<ReadResult> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat || !stat.isFile()) {\n throw new Error(`Path is not a file: ${params.path}`);\n }\n if (isSymlink(abs)) {\n throw new Error(`Refusing to read symlink: ${params.path}`);\n }\n if (!isTextualFile(abs)) {\n throw new Error(`Non-text or too-large file: ${params.path}`);\n }\n const lines = await readAllLines(abs);\n const total = lines.length;\n const s = params.start ?? 1;\n const e = Math.min(params.end ?? total, total);\n if (s > total && total > 0) {\n throw new Error(`start ${s} exceeds file length (${total})`);\n }\n const out: string[] = [];\n for (let i = s; i <= e; i += 1) {\n const content = lines[i - 1] ?? '';\n out.push(`${i}|${content}`);\n }\n return { lines: out };\n }\n\n async analyse(params: { path: string; pattern?: string | null; maxResults?: number; maxDepth?: number }): Promise<AnalyseEntry[]> {\n const abs = resolveUnderRepo(this.repoRoot, params.path);\n const stat = await fs.stat(abs).catch(() => null);\n if (!stat || !stat.isDirectory()) {\n return [];\n }\n const maxResults = params.maxResults ?? 100;\n const maxDepth = params.maxDepth ?? 2;\n const regex = params.pattern ? new RegExp(params.pattern) : null;\n\n const results: AnalyseEntry[] = [];\n async function walk(dir: string, depth: number) {\n if (depth > maxDepth || results.length >= maxResults) return;\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const full = path.join(dir, entry.name);\n const rel = toRepoRelative(abs, full).replace(/^[.][/\\\\]?/, '');\n if (DEFAULT_EXCLUDES.some((ex) => rel.split(path.sep).includes(ex))) continue;\n if (regex && !regex.test(entry.name)) continue;\n if (results.length >= maxResults) break;\n results.push({\n name: entry.name,\n path: toRepoRelative(path.resolve(''), full), // relative display\n type: entry.isDirectory() ? 'dir' : 'file',\n depth,\n });\n if (entry.isDirectory()) {\n await walk(full, depth + 1);\n }\n }\n }\n await walk(abs, 0);\n return results;\n }\n}\n\n\n"],"mappings":";;;;;;;;;;;;;;;;;AAAA,OAAO,QAAQ;AAEf,OAAO,UAAU;AAOV,IAAM,uBAAN,MAAuD;AAAA,EAC5D,YAA6B,UAAmC,WAAqB,kBAAkB;AAA1E;AAAmC;AAAA,EAAwC;AAAA,EAExG,MAAM,KAAK,QAAgE;AACzE,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,KAAM,QAAO,EAAE,OAAO,CAAC,EAAE;AAC9B,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AACzD,QAAI,IAAI,aAAa,IAAI;AACvB,YAAM,IAAI,MAAM,IAAI,UAAU,gCAAgC;AAAA,IAChE;AACA,QAAI,IAAI,aAAa,KAAK,IAAI,aAAa,GAAG;AAC5C,YAAM,IAAI,MAAM,IAAI,UAAU,4BAA4B,IAAI,QAAQ,EAAE;AAAA,IAC1E;AACA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAAyE;AAClF,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,YAAY,QAAQ,KAAK,QAAQ,KAAK,QAAQ,IAAI,MAAM,eAAe,KAAK,UAAU,GAAG;AAC/F,UAAM,OAAO;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO;AAAA,MACP,GAAG,KAAK,SAAS,QAAQ,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,EAAE,CAAC;AAAA,MAC/C,aAAa;AAAA,IACf;AACA,UAAM,MAAM,MAAM,WAAW,MAAM,EAAE,KAAK,KAAK,SAAS,CAAC;AACzD,QAAI,IAAI,aAAa,IAAI;AACvB,YAAM,IAAI,MAAM,IAAI,UAAU,gCAAgC;AAAA,IAChE;AACA,UAAM,SAAS,IAAI,UAAU,IAC1B,KAAK,EACL,MAAM,OAAO,EACb,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAC7B,WAAO,EAAE,MAAM;AAAA,EACjB;AAAA,EAEA,MAAM,KAAK,QAA6E;AACtF,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,GAAG;AAC3B,YAAM,IAAI,MAAM,uBAAuB,OAAO,IAAI,EAAE;AAAA,IACtD;AACA,QAAI,UAAU,GAAG,GAAG;AAClB,YAAM,IAAI,MAAM,6BAA6B,OAAO,IAAI,EAAE;AAAA,IAC5D;AACA,QAAI,CAAC,cAAc,GAAG,GAAG;AACvB,YAAM,IAAI,MAAM,+BAA+B,OAAO,IAAI,EAAE;AAAA,IAC9D;AACA,UAAM,QAAQ,MAAM,aAAa,GAAG;AACpC,UAAM,QAAQ,MAAM;AACpB,UAAM,IAAI,OAAO,SAAS;AAC1B,UAAM,IAAI,KAAK,IAAI,OAAO,OAAO,OAAO,KAAK;AAC7C,QAAI,IAAI,SAAS,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,SAAS,CAAC,yBAAyB,KAAK,GAAG;AAAA,IAC7D;AACA,UAAM,MAAgB,CAAC;AACvB,aAAS,IAAI,GAAG,KAAK,GAAG,KAAK,GAAG;AAC9B,YAAM,UAAU,MAAM,IAAI,CAAC,KAAK;AAChC,UAAI,KAAK,GAAG,CAAC,IAAI,OAAO,EAAE;AAAA,IAC5B;AACA,WAAO,EAAE,OAAO,IAAI;AAAA,EACtB;AAAA,EAEA,MAAM,QAAQ,QAAoH;AAChI,UAAM,MAAM,iBAAiB,KAAK,UAAU,OAAO,IAAI;AACvD,UAAM,OAAO,MAAM,GAAG,KAAK,GAAG,EAAE,MAAM,MAAM,IAAI;AAChD,QAAI,CAAC,QAAQ,CAAC,KAAK,YAAY,GAAG;AAChC,aAAO,CAAC;AAAA,IACV;AACA,UAAM,aAAa,OAAO,cAAc;AACxC,UAAM,WAAW,OAAO,YAAY;AACpC,UAAM,QAAQ,OAAO,UAAU,IAAI,OAAO,OAAO,OAAO,IAAI;AAE5D,UAAM,UAA0B,CAAC;AACjC,mBAAe,KAAK,KAAa,OAAe;AAC9C,UAAI,QAAQ,YAAY,QAAQ,UAAU,WAAY;AACtD,YAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC7D,iBAAW,SAAS,SAAS;AAC3B,cAAM,OAAO,KAAK,KAAK,KAAK,MAAM,IAAI;AACtC,cAAM,MAAM,eAAe,KAAK,IAAI,EAAE,QAAQ,cAAc,EAAE;AAC9D,YAAI,iBAAiB,KAAK,CAAC,OAAO,IAAI,MAAM,KAAK,GAAG,EAAE,SAAS,EAAE,CAAC,EAAG;AACrE,YAAI,SAAS,CAAC,MAAM,KAAK,MAAM,IAAI,EAAG;AACtC,YAAI,QAAQ,UAAU,WAAY;AAClC,gBAAQ,KAAK;AAAA,UACX,MAAM,MAAM;AAAA,UACZ,MAAM,eAAe,KAAK,QAAQ,EAAE,GAAG,IAAI;AAAA;AAAA,UAC3C,MAAM,MAAM,YAAY,IAAI,QAAQ;AAAA,UACpC;AAAA,QACF,CAAC;AACD,YAAI,MAAM,YAAY,GAAG;AACvB,gBAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,KAAK,CAAC;AACjB,WAAO;AAAA,EACT;AACF;","names":[]}
|