@howaboua/opencode-chat 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +116 -0
- package/dist/config.d.ts +13 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +65 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +44 -0
- package/dist/script/download-model.d.ts +2 -0
- package/dist/script/download-model.d.ts.map +1 -0
- package/dist/script/download-model.js +39 -0
- package/dist/script/semantic-index.d.ts +2 -0
- package/dist/script/semantic-index.d.ts.map +1 -0
- package/dist/script/semantic-index.js +63 -0
- package/dist/semantic/chunker.d.ts +8 -0
- package/dist/semantic/chunker.d.ts.map +1 -0
- package/dist/semantic/chunker.js +163 -0
- package/dist/semantic/embedder.d.ts +12 -0
- package/dist/semantic/embedder.d.ts.map +1 -0
- package/dist/semantic/embedder.js +54 -0
- package/dist/semantic/index.d.ts +41 -0
- package/dist/semantic/index.d.ts.map +1 -0
- package/dist/semantic/index.js +178 -0
- package/dist/system.d.ts +5 -0
- package/dist/system.d.ts.map +1 -0
- package/dist/system.js +93 -0
- package/dist/tools/bash.d.ts +22 -0
- package/dist/tools/bash.d.ts.map +1 -0
- package/dist/tools/bash.js +59 -0
- package/dist/tools/batch.d.ts +25 -0
- package/dist/tools/batch.d.ts.map +1 -0
- package/dist/tools/batch.js +49 -0
- package/dist/tools/edit.d.ts +25 -0
- package/dist/tools/edit.d.ts.map +1 -0
- package/dist/tools/edit.js +44 -0
- package/dist/tools/glob.d.ts +19 -0
- package/dist/tools/glob.d.ts.map +1 -0
- package/dist/tools/glob.js +54 -0
- package/dist/tools/grep.d.ts +22 -0
- package/dist/tools/grep.d.ts.map +1 -0
- package/dist/tools/grep.js +92 -0
- package/dist/tools/index.d.ts +12 -0
- package/dist/tools/index.d.ts.map +1 -0
- package/dist/tools/index.js +51 -0
- package/dist/tools/patch.d.ts +16 -0
- package/dist/tools/patch.d.ts.map +1 -0
- package/dist/tools/patch.js +87 -0
- package/dist/tools/read.d.ts +22 -0
- package/dist/tools/read.d.ts.map +1 -0
- package/dist/tools/read.js +70 -0
- package/dist/tools/remember.d.ts +16 -0
- package/dist/tools/remember.d.ts.map +1 -0
- package/dist/tools/remember.js +58 -0
- package/dist/tools/semantic-search.d.ts +19 -0
- package/dist/tools/semantic-search.d.ts.map +1 -0
- package/dist/tools/semantic-search.js +54 -0
- package/dist/tools/skill.d.ts +17 -0
- package/dist/tools/skill.d.ts.map +1 -0
- package/dist/tools/skill.js +106 -0
- package/dist/tools/todo.d.ts +62 -0
- package/dist/tools/todo.d.ts.map +1 -0
- package/dist/tools/todo.js +62 -0
- package/dist/tools/write.d.ts +19 -0
- package/dist/tools/write.d.ts.map +1 -0
- package/dist/tools/write.js +37 -0
- package/dist/util/constants.d.ts +16 -0
- package/dist/util/constants.d.ts.map +1 -0
- package/dist/util/constants.js +39 -0
- package/dist/util/patch.d.ts +32 -0
- package/dist/util/patch.d.ts.map +1 -0
- package/dist/util/patch.js +240 -0
- package/dist/util/paths.d.ts +6 -0
- package/dist/util/paths.d.ts.map +1 -0
- package/dist/util/paths.js +76 -0
- package/dist/util/text.d.ts +4 -0
- package/dist/util/text.d.ts.map +1 -0
- package/dist/util/text.js +37 -0
- package/dist/util/todo.d.ts +5 -0
- package/dist/util/todo.d.ts.map +1 -0
- package/dist/util/todo.js +48 -0
- package/dist/util/types.d.ts +22 -0
- package/dist/util/types.d.ts.map +1 -0
- package/dist/util/types.js +1 -0
- package/package.json +53 -0
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* chat_skill tool implementation.
|
|
3
|
+
* Load skills from the repo's .opencode/skill directory only.
|
|
4
|
+
*/
|
|
5
|
+
import * as path from "path";
|
|
6
|
+
import * as fs from "fs/promises";
|
|
7
|
+
import { tool } from "@opencode-ai/plugin";
|
|
8
|
+
// Simple YAML frontmatter parser
|
|
9
|
+
function parseFrontmatter(content) {
|
|
10
|
+
const match = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
|
|
11
|
+
if (!match)
|
|
12
|
+
return { data: {}, content };
|
|
13
|
+
const frontmatter = match[1];
|
|
14
|
+
const body = match[2];
|
|
15
|
+
const data = {};
|
|
16
|
+
for (const line of frontmatter.split("\n")) {
|
|
17
|
+
const colonIdx = line.indexOf(":");
|
|
18
|
+
if (colonIdx > 0) {
|
|
19
|
+
const key = line.slice(0, colonIdx).trim();
|
|
20
|
+
const value = line.slice(colonIdx + 1).trim();
|
|
21
|
+
data[key] = value;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
return { data, content: body };
|
|
25
|
+
}
|
|
26
|
+
export function createChatSkill(baseDir) {
|
|
27
|
+
const skillDir = path.join(baseDir, ".opencode", "skill");
|
|
28
|
+
// Scan for skills in the repo's .opencode/skill directory
|
|
29
|
+
async function scanSkills() {
|
|
30
|
+
const skills = [];
|
|
31
|
+
try {
|
|
32
|
+
const entries = await fs.readdir(skillDir, { withFileTypes: true });
|
|
33
|
+
for (const entry of entries) {
|
|
34
|
+
if (!entry.isDirectory())
|
|
35
|
+
continue;
|
|
36
|
+
const skillPath = path.join(skillDir, entry.name, "SKILL.md");
|
|
37
|
+
try {
|
|
38
|
+
const content = await fs.readFile(skillPath, "utf-8");
|
|
39
|
+
const { data } = parseFrontmatter(content);
|
|
40
|
+
if (data.name && data.description) {
|
|
41
|
+
skills.push({
|
|
42
|
+
name: data.name,
|
|
43
|
+
description: data.description,
|
|
44
|
+
location: skillPath,
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
catch {
|
|
49
|
+
// Skip if SKILL.md doesn't exist or can't be read
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
// Skill directory doesn't exist
|
|
55
|
+
}
|
|
56
|
+
return skills;
|
|
57
|
+
}
|
|
58
|
+
// Get a specific skill by name
|
|
59
|
+
async function getSkill(name) {
|
|
60
|
+
const skills = await scanSkills();
|
|
61
|
+
return skills.find((s) => s.name === name);
|
|
62
|
+
}
|
|
63
|
+
const run = async (args) => {
|
|
64
|
+
const skill = await getSkill(args.name);
|
|
65
|
+
if (!skill) {
|
|
66
|
+
const skills = await scanSkills();
|
|
67
|
+
const available = skills.map((s) => s.name).join(", ");
|
|
68
|
+
throw new Error(`Skill "${args.name}" not found. Available skills: ${available || "none"}`);
|
|
69
|
+
}
|
|
70
|
+
const content = await fs.readFile(skill.location, "utf-8");
|
|
71
|
+
const parsed = parseFrontmatter(content);
|
|
72
|
+
const dir = path.dirname(skill.location);
|
|
73
|
+
return [`## Skill: ${skill.name}`, "", `**Base directory**: ${dir}`, "", parsed.content.trim()].join("\n");
|
|
74
|
+
};
|
|
75
|
+
// Build description with available skills
|
|
76
|
+
const buildDescription = async () => {
|
|
77
|
+
const skills = await scanSkills();
|
|
78
|
+
const skillList = skills.flatMap((skill) => [
|
|
79
|
+
` <skill>`,
|
|
80
|
+
` <name>${skill.name}</name>`,
|
|
81
|
+
` <description>${skill.description}</description>`,
|
|
82
|
+
` </skill>`,
|
|
83
|
+
]);
|
|
84
|
+
return [
|
|
85
|
+
"Load a skill to get detailed instructions for a specific task.",
|
|
86
|
+
"Skills provide specialized knowledge and step-by-step guidance.",
|
|
87
|
+
"<available_skills>",
|
|
88
|
+
...skillList,
|
|
89
|
+
"</available_skills>",
|
|
90
|
+
].join("\n");
|
|
91
|
+
};
|
|
92
|
+
return {
|
|
93
|
+
id: "chat_skill",
|
|
94
|
+
run,
|
|
95
|
+
buildDescription,
|
|
96
|
+
tool: tool({
|
|
97
|
+
description: "Load a skill for step-by-step guidance. Call without args to see available skills.",
|
|
98
|
+
args: {
|
|
99
|
+
name: tool.schema.string().describe("The skill name to load"),
|
|
100
|
+
},
|
|
101
|
+
async execute(args) {
|
|
102
|
+
return await run(args);
|
|
103
|
+
},
|
|
104
|
+
}),
|
|
105
|
+
};
|
|
106
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { TodoItem } from "../util/types";
|
|
2
|
+
export declare const todoSchema: import("zod").ZodObject<{
|
|
3
|
+
content: import("zod").ZodString;
|
|
4
|
+
status: import("zod").ZodEnum<{
|
|
5
|
+
pending: "pending";
|
|
6
|
+
in_progress: "in_progress";
|
|
7
|
+
completed: "completed";
|
|
8
|
+
cancelled: "cancelled";
|
|
9
|
+
}>;
|
|
10
|
+
priority: import("zod").ZodEnum<{
|
|
11
|
+
high: "high";
|
|
12
|
+
medium: "medium";
|
|
13
|
+
low: "low";
|
|
14
|
+
}>;
|
|
15
|
+
id: import("zod").ZodString;
|
|
16
|
+
}, import("zod/v4/core").$strip>;
|
|
17
|
+
export declare function createChatTodo(todoPath: string): {
|
|
18
|
+
write: {
|
|
19
|
+
id: string;
|
|
20
|
+
run: (args: {
|
|
21
|
+
todos: TodoItem[];
|
|
22
|
+
}) => Promise<string>;
|
|
23
|
+
tool: {
|
|
24
|
+
description: string;
|
|
25
|
+
args: {
|
|
26
|
+
todos: import("zod").ZodArray<import("zod").ZodObject<{
|
|
27
|
+
content: import("zod").ZodString;
|
|
28
|
+
status: import("zod").ZodEnum<{
|
|
29
|
+
pending: "pending";
|
|
30
|
+
in_progress: "in_progress";
|
|
31
|
+
completed: "completed";
|
|
32
|
+
cancelled: "cancelled";
|
|
33
|
+
}>;
|
|
34
|
+
priority: import("zod").ZodEnum<{
|
|
35
|
+
high: "high";
|
|
36
|
+
medium: "medium";
|
|
37
|
+
low: "low";
|
|
38
|
+
}>;
|
|
39
|
+
id: import("zod").ZodString;
|
|
40
|
+
}, import("zod/v4/core").$strip>>;
|
|
41
|
+
};
|
|
42
|
+
execute(args: {
|
|
43
|
+
todos: {
|
|
44
|
+
content: string;
|
|
45
|
+
status: "pending" | "in_progress" | "completed" | "cancelled";
|
|
46
|
+
priority: "high" | "medium" | "low";
|
|
47
|
+
id: string;
|
|
48
|
+
}[];
|
|
49
|
+
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
50
|
+
};
|
|
51
|
+
};
|
|
52
|
+
read: {
|
|
53
|
+
id: string;
|
|
54
|
+
run: () => Promise<string>;
|
|
55
|
+
tool: {
|
|
56
|
+
description: string;
|
|
57
|
+
args: {};
|
|
58
|
+
execute(args: Record<string, never>, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
59
|
+
};
|
|
60
|
+
};
|
|
61
|
+
};
|
|
62
|
+
//# sourceMappingURL=todo.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"todo.d.ts","sourceRoot":"","sources":["../../tools/todo.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAA;AAGxC,eAAO,MAAM,UAAU;;;;;;;;;;;;;;gCAOrB,CAAA;AAEF,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM;;;oBAClB;YAAE,KAAK,EAAE,QAAQ,EAAE,CAAA;SAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA+CjD"}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* chat_todowrite and chat_todoread tool implementations.
|
|
3
|
+
* Stores todos in a repo-level todo.md file with embedded JSON.
|
|
4
|
+
* Deletes the file when all tasks are completed.
|
|
5
|
+
*/
|
|
6
|
+
import { tool } from "@opencode-ai/plugin";
|
|
7
|
+
import { readTodoFile, writeTodoFile } from "../util/todo";
|
|
8
|
+
export const todoSchema = tool.schema.object({
|
|
9
|
+
content: tool.schema.string().describe("Brief description of the task"),
|
|
10
|
+
status: tool.schema
|
|
11
|
+
.enum(["pending", "in_progress", "completed", "cancelled"])
|
|
12
|
+
.describe("Current status of the task: pending, in_progress, completed, cancelled"),
|
|
13
|
+
priority: tool.schema.enum(["high", "medium", "low"]).describe("Priority level of the task: high, medium, low"),
|
|
14
|
+
id: tool.schema.string().describe("Unique identifier for the todo item"),
|
|
15
|
+
});
|
|
16
|
+
export function createChatTodo(todoPath) {
|
|
17
|
+
const write = async (args) => {
|
|
18
|
+
const message = await writeTodoFile(todoPath, args.todos);
|
|
19
|
+
return message + "\n" + JSON.stringify(args.todos, null, 2);
|
|
20
|
+
};
|
|
21
|
+
const read = async () => {
|
|
22
|
+
const todos = await readTodoFile(todoPath);
|
|
23
|
+
return JSON.stringify(todos, null, 2);
|
|
24
|
+
};
|
|
25
|
+
return {
|
|
26
|
+
write: {
|
|
27
|
+
id: "chat_todowrite",
|
|
28
|
+
run: write,
|
|
29
|
+
tool: tool({
|
|
30
|
+
description: `Manage task list.
|
|
31
|
+
|
|
32
|
+
Usage:
|
|
33
|
+
- Use for multi-step tasks (3+ steps)
|
|
34
|
+
- Only one task in_progress at a time
|
|
35
|
+
- Mark tasks complete immediately after finishing
|
|
36
|
+
- Skip for simple, single-step requests`,
|
|
37
|
+
args: {
|
|
38
|
+
todos: tool.schema.array(todoSchema).describe("The updated todo list"),
|
|
39
|
+
},
|
|
40
|
+
async execute(args) {
|
|
41
|
+
return await write(args);
|
|
42
|
+
},
|
|
43
|
+
}),
|
|
44
|
+
},
|
|
45
|
+
read: {
|
|
46
|
+
id: "chat_todoread",
|
|
47
|
+
run: read,
|
|
48
|
+
tool: tool({
|
|
49
|
+
description: `Read task list.
|
|
50
|
+
|
|
51
|
+
Usage:
|
|
52
|
+
- Check at start of conversations
|
|
53
|
+
- Use before starting new tasks
|
|
54
|
+
- Review after completing work`,
|
|
55
|
+
args: {},
|
|
56
|
+
async execute() {
|
|
57
|
+
return await read();
|
|
58
|
+
},
|
|
59
|
+
}),
|
|
60
|
+
},
|
|
61
|
+
};
|
|
62
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export declare function createChatWrite(baseDir: string, repoRoot: string): {
|
|
2
|
+
id: string;
|
|
3
|
+
run: (args: {
|
|
4
|
+
content: string;
|
|
5
|
+
filePath: string;
|
|
6
|
+
}) => Promise<string>;
|
|
7
|
+
tool: {
|
|
8
|
+
description: string;
|
|
9
|
+
args: {
|
|
10
|
+
content: import("zod").ZodString;
|
|
11
|
+
filePath: import("zod").ZodString;
|
|
12
|
+
};
|
|
13
|
+
execute(args: {
|
|
14
|
+
content: string;
|
|
15
|
+
filePath: string;
|
|
16
|
+
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
17
|
+
};
|
|
18
|
+
};
|
|
19
|
+
//# sourceMappingURL=write.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"write.d.ts","sourceRoot":"","sources":["../../tools/write.ts"],"names":[],"mappings":"AAUA,wBAAgB,eAAe,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;;gBACtC;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAA;KAAE;;;;;;;;;;;;EA2B/D"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* chat_write tool implementation.
|
|
3
|
+
* Writes or overwrites files within the working directory.
|
|
4
|
+
* Creates parent directories when needed.
|
|
5
|
+
*/
|
|
6
|
+
import * as fs from "fs/promises";
|
|
7
|
+
import path from "path";
|
|
8
|
+
import { tool } from "@opencode-ai/plugin";
|
|
9
|
+
import { resolvePath } from "../util/paths";
|
|
10
|
+
export function createChatWrite(baseDir, repoRoot) {
|
|
11
|
+
const run = async (args) => {
|
|
12
|
+
const filePath = resolvePath(baseDir, args.filePath);
|
|
13
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
14
|
+
await fs.writeFile(filePath, args.content, "utf-8");
|
|
15
|
+
const title = path.relative(repoRoot, filePath);
|
|
16
|
+
return `Wrote ${title}`;
|
|
17
|
+
};
|
|
18
|
+
return {
|
|
19
|
+
id: "chat_write",
|
|
20
|
+
run,
|
|
21
|
+
tool: tool({
|
|
22
|
+
description: `Write file contents.
|
|
23
|
+
|
|
24
|
+
Usage:
|
|
25
|
+
- Overwrites existing files completely
|
|
26
|
+
- Creates parent directories if needed
|
|
27
|
+
- Use Edit tool for partial modifications`,
|
|
28
|
+
args: {
|
|
29
|
+
content: tool.schema.string().describe("The content to write to the file"),
|
|
30
|
+
filePath: tool.schema.string().describe("The absolute path to the file to write"),
|
|
31
|
+
},
|
|
32
|
+
async execute(args) {
|
|
33
|
+
return await run(args);
|
|
34
|
+
},
|
|
35
|
+
}),
|
|
36
|
+
};
|
|
37
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared constants for the chatifier plugin.
|
|
3
|
+
* Centralizes limits and defaults so tool modules stay consistent.
|
|
4
|
+
* Keep these values in sync with tool behavior as it evolves.
|
|
5
|
+
*/
|
|
6
|
+
export declare const DEFAULT_READ_LIMIT = 2000;
|
|
7
|
+
export declare const MAX_LINE_LENGTH = 2000;
|
|
8
|
+
export declare const LIST_LIMIT = 100;
|
|
9
|
+
export declare const MAX_GREP_MATCHES = 100;
|
|
10
|
+
export declare const MAX_OUTPUT_LENGTH = 30000;
|
|
11
|
+
export declare const MAX_RESPONSE_SIZE: number;
|
|
12
|
+
export declare const DEFAULT_WEBFETCH_TIMEOUT_MS = 30000;
|
|
13
|
+
export declare const MAX_WEBFETCH_TIMEOUT_MS = 120000;
|
|
14
|
+
export declare const TODO_FILENAME = "todo.md";
|
|
15
|
+
export declare const IGNORE_DIRS: Set<string>;
|
|
16
|
+
//# sourceMappingURL=constants.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../util/constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH,eAAO,MAAM,kBAAkB,OAAO,CAAA;AACtC,eAAO,MAAM,eAAe,OAAO,CAAA;AACnC,eAAO,MAAM,UAAU,MAAM,CAAA;AAC7B,eAAO,MAAM,gBAAgB,MAAM,CAAA;AACnC,eAAO,MAAM,iBAAiB,QAAS,CAAA;AACvC,eAAO,MAAM,iBAAiB,QAAkB,CAAA;AAChD,eAAO,MAAM,2BAA2B,QAAS,CAAA;AACjD,eAAO,MAAM,uBAAuB,SAAU,CAAA;AAC9C,eAAO,MAAM,aAAa,YAAY,CAAA;AAEtC,eAAO,MAAM,WAAW,aAwBtB,CAAA"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared constants for the chatifier plugin.
|
|
3
|
+
* Centralizes limits and defaults so tool modules stay consistent.
|
|
4
|
+
* Keep these values in sync with tool behavior as it evolves.
|
|
5
|
+
*/
|
|
6
|
+
export const DEFAULT_READ_LIMIT = 2000;
|
|
7
|
+
export const MAX_LINE_LENGTH = 2000;
|
|
8
|
+
export const LIST_LIMIT = 100;
|
|
9
|
+
export const MAX_GREP_MATCHES = 100;
|
|
10
|
+
export const MAX_OUTPUT_LENGTH = 30_000;
|
|
11
|
+
export const MAX_RESPONSE_SIZE = 5 * 1024 * 1024;
|
|
12
|
+
export const DEFAULT_WEBFETCH_TIMEOUT_MS = 30_000;
|
|
13
|
+
export const MAX_WEBFETCH_TIMEOUT_MS = 120_000;
|
|
14
|
+
export const TODO_FILENAME = "todo.md";
|
|
15
|
+
export const IGNORE_DIRS = new Set([
|
|
16
|
+
"node_modules",
|
|
17
|
+
"__pycache__",
|
|
18
|
+
".git",
|
|
19
|
+
"dist",
|
|
20
|
+
"build",
|
|
21
|
+
"target",
|
|
22
|
+
"vendor",
|
|
23
|
+
"bin",
|
|
24
|
+
"obj",
|
|
25
|
+
".idea",
|
|
26
|
+
".vscode",
|
|
27
|
+
".zig-cache",
|
|
28
|
+
"zig-out",
|
|
29
|
+
".coverage",
|
|
30
|
+
"coverage",
|
|
31
|
+
"tmp",
|
|
32
|
+
"temp",
|
|
33
|
+
".cache",
|
|
34
|
+
"cache",
|
|
35
|
+
"logs",
|
|
36
|
+
".venv",
|
|
37
|
+
"venv",
|
|
38
|
+
"env",
|
|
39
|
+
]);
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
export type AddHunk = {
|
|
2
|
+
type: "add";
|
|
3
|
+
path: string;
|
|
4
|
+
contents: string;
|
|
5
|
+
};
|
|
6
|
+
export type DeleteHunk = {
|
|
7
|
+
type: "delete";
|
|
8
|
+
path: string;
|
|
9
|
+
};
|
|
10
|
+
export type UpdateHunk = {
|
|
11
|
+
type: "update";
|
|
12
|
+
path: string;
|
|
13
|
+
move_path?: string;
|
|
14
|
+
chunks: UpdateChunk[];
|
|
15
|
+
};
|
|
16
|
+
export type Hunk = AddHunk | DeleteHunk | UpdateHunk;
|
|
17
|
+
export type UpdateChunk = {
|
|
18
|
+
old_lines: string[];
|
|
19
|
+
new_lines: string[];
|
|
20
|
+
change_context?: string;
|
|
21
|
+
is_end_of_file?: boolean;
|
|
22
|
+
};
|
|
23
|
+
export type AffectedPaths = {
|
|
24
|
+
added: string[];
|
|
25
|
+
modified: string[];
|
|
26
|
+
deleted: string[];
|
|
27
|
+
};
|
|
28
|
+
export declare function parsePatch(patchText: string): {
|
|
29
|
+
hunks: Hunk[];
|
|
30
|
+
};
|
|
31
|
+
export declare function applyHunksToFiles(hunks: Hunk[]): Promise<AffectedPaths>;
|
|
32
|
+
//# sourceMappingURL=patch.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"patch.d.ts","sourceRoot":"","sources":["../../util/patch.ts"],"names":[],"mappings":"AAQA,MAAM,MAAM,OAAO,GAAG;IAAE,IAAI,EAAE,KAAK,CAAC;IAAC,IAAI,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAA;CAAE,CAAA;AACrE,MAAM,MAAM,UAAU,GAAG;IAAE,IAAI,EAAE,QAAQ,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CAAA;AACzD,MAAM,MAAM,UAAU,GAAG;IAAE,IAAI,EAAE,QAAQ,CAAC;IAAC,IAAI,EAAE,MAAM,CAAC;IAAC,SAAS,CAAC,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,WAAW,EAAE,CAAA;CAAE,CAAA;AACpG,MAAM,MAAM,IAAI,GAAG,OAAO,GAAG,UAAU,GAAG,UAAU,CAAA;AAEpD,MAAM,MAAM,WAAW,GAAG;IACxB,SAAS,EAAE,MAAM,EAAE,CAAA;IACnB,SAAS,EAAE,MAAM,EAAE,CAAA;IACnB,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,cAAc,CAAC,EAAE,OAAO,CAAA;CACzB,CAAA;AAED,MAAM,MAAM,aAAa,GAAG;IAC1B,KAAK,EAAE,MAAM,EAAE,CAAA;IACf,QAAQ,EAAE,MAAM,EAAE,CAAA;IAClB,OAAO,EAAE,MAAM,EAAE,CAAA;CAClB,CAAA;AA+FD,wBAAgB,UAAU,CAAC,SAAS,EAAE,MAAM;;EAqC3C;AAkGD,wBAAsB,iBAAiB,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC,CAqC7E"}
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Patch parsing and application utilities.
|
|
3
|
+
* Handles the *** Begin/End Patch format for file operations.
|
|
4
|
+
*/
|
|
5
|
+
import * as path from "path";
|
|
6
|
+
import * as fs from "fs/promises";
|
|
7
|
+
import * as fsSync from "fs";
|
|
8
|
+
function parsePatchHeader(lines, idx) {
|
|
9
|
+
const line = lines[idx];
|
|
10
|
+
if (line.startsWith("*** Add File:")) {
|
|
11
|
+
const filePath = line.split(":", 2)[1]?.trim();
|
|
12
|
+
return filePath ? { filePath, nextIdx: idx + 1 } : null;
|
|
13
|
+
}
|
|
14
|
+
if (line.startsWith("*** Delete File:")) {
|
|
15
|
+
const filePath = line.split(":", 2)[1]?.trim();
|
|
16
|
+
return filePath ? { filePath, nextIdx: idx + 1 } : null;
|
|
17
|
+
}
|
|
18
|
+
if (line.startsWith("*** Update File:")) {
|
|
19
|
+
const filePath = line.split(":", 2)[1]?.trim();
|
|
20
|
+
let movePath;
|
|
21
|
+
let nextIdx = idx + 1;
|
|
22
|
+
if (nextIdx < lines.length && lines[nextIdx].startsWith("*** Move to:")) {
|
|
23
|
+
movePath = lines[nextIdx].split(":", 2)[1]?.trim();
|
|
24
|
+
nextIdx++;
|
|
25
|
+
}
|
|
26
|
+
return filePath ? { filePath, movePath, nextIdx } : null;
|
|
27
|
+
}
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
function parseUpdateChunks(lines, startIdx) {
|
|
31
|
+
const chunks = [];
|
|
32
|
+
let i = startIdx;
|
|
33
|
+
while (i < lines.length && !lines[i].startsWith("***")) {
|
|
34
|
+
if (lines[i].startsWith("@@")) {
|
|
35
|
+
const contextLine = lines[i].substring(2).trim();
|
|
36
|
+
i++;
|
|
37
|
+
const oldLines = [];
|
|
38
|
+
const newLines = [];
|
|
39
|
+
let isEndOfFile = false;
|
|
40
|
+
while (i < lines.length && !lines[i].startsWith("@@") && !lines[i].startsWith("***")) {
|
|
41
|
+
const changeLine = lines[i];
|
|
42
|
+
if (changeLine === "*** End of File") {
|
|
43
|
+
isEndOfFile = true;
|
|
44
|
+
i++;
|
|
45
|
+
break;
|
|
46
|
+
}
|
|
47
|
+
if (changeLine.startsWith(" ")) {
|
|
48
|
+
const content = changeLine.substring(1);
|
|
49
|
+
oldLines.push(content);
|
|
50
|
+
newLines.push(content);
|
|
51
|
+
}
|
|
52
|
+
else if (changeLine.startsWith("-")) {
|
|
53
|
+
oldLines.push(changeLine.substring(1));
|
|
54
|
+
}
|
|
55
|
+
else if (changeLine.startsWith("+")) {
|
|
56
|
+
newLines.push(changeLine.substring(1));
|
|
57
|
+
}
|
|
58
|
+
i++;
|
|
59
|
+
}
|
|
60
|
+
chunks.push({
|
|
61
|
+
old_lines: oldLines,
|
|
62
|
+
new_lines: newLines,
|
|
63
|
+
change_context: contextLine || undefined,
|
|
64
|
+
is_end_of_file: isEndOfFile || undefined,
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
else {
|
|
68
|
+
i++;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return { chunks, nextIdx: i };
|
|
72
|
+
}
|
|
73
|
+
function parseAddContent(lines, startIdx) {
|
|
74
|
+
let content = "";
|
|
75
|
+
let i = startIdx;
|
|
76
|
+
while (i < lines.length && !lines[i].startsWith("***")) {
|
|
77
|
+
if (lines[i].startsWith("+")) {
|
|
78
|
+
content += lines[i].substring(1) + "\n";
|
|
79
|
+
}
|
|
80
|
+
i++;
|
|
81
|
+
}
|
|
82
|
+
if (content.endsWith("\n"))
|
|
83
|
+
content = content.slice(0, -1);
|
|
84
|
+
return { content, nextIdx: i };
|
|
85
|
+
}
|
|
86
|
+
export function parsePatch(patchText) {
|
|
87
|
+
const lines = patchText.split("\n");
|
|
88
|
+
const hunks = [];
|
|
89
|
+
const beginIdx = lines.findIndex((line) => line.trim() === "*** Begin Patch");
|
|
90
|
+
const endIdx = lines.findIndex((line) => line.trim() === "*** End Patch");
|
|
91
|
+
if (beginIdx === -1 || endIdx === -1 || beginIdx >= endIdx) {
|
|
92
|
+
throw new Error("Invalid patch format: missing Begin/End markers");
|
|
93
|
+
}
|
|
94
|
+
let i = beginIdx + 1;
|
|
95
|
+
while (i < endIdx) {
|
|
96
|
+
const header = parsePatchHeader(lines, i);
|
|
97
|
+
if (!header) {
|
|
98
|
+
i++;
|
|
99
|
+
continue;
|
|
100
|
+
}
|
|
101
|
+
if (lines[i].startsWith("*** Add File:")) {
|
|
102
|
+
const { content, nextIdx } = parseAddContent(lines, header.nextIdx);
|
|
103
|
+
hunks.push({ type: "add", path: header.filePath, contents: content });
|
|
104
|
+
i = nextIdx;
|
|
105
|
+
}
|
|
106
|
+
else if (lines[i].startsWith("*** Delete File:")) {
|
|
107
|
+
hunks.push({ type: "delete", path: header.filePath });
|
|
108
|
+
i = header.nextIdx;
|
|
109
|
+
}
|
|
110
|
+
else if (lines[i].startsWith("*** Update File:")) {
|
|
111
|
+
const { chunks, nextIdx } = parseUpdateChunks(lines, header.nextIdx);
|
|
112
|
+
hunks.push({ type: "update", path: header.filePath, move_path: header.movePath, chunks });
|
|
113
|
+
i = nextIdx;
|
|
114
|
+
}
|
|
115
|
+
else {
|
|
116
|
+
i++;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
return { hunks };
|
|
120
|
+
}
|
|
121
|
+
function seekSequence(lines, pattern, startIndex) {
|
|
122
|
+
if (pattern.length === 0)
|
|
123
|
+
return -1;
|
|
124
|
+
for (let i = startIndex; i <= lines.length - pattern.length; i++) {
|
|
125
|
+
let matches = true;
|
|
126
|
+
for (let j = 0; j < pattern.length; j++) {
|
|
127
|
+
if (lines[i + j] !== pattern[j]) {
|
|
128
|
+
matches = false;
|
|
129
|
+
break;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
if (matches)
|
|
133
|
+
return i;
|
|
134
|
+
}
|
|
135
|
+
return -1;
|
|
136
|
+
}
|
|
137
|
+
function computeReplacements(originalLines, filePath, chunks) {
|
|
138
|
+
const replacements = [];
|
|
139
|
+
let lineIndex = 0;
|
|
140
|
+
for (const chunk of chunks) {
|
|
141
|
+
let contextIdx = -1;
|
|
142
|
+
if (chunk.change_context) {
|
|
143
|
+
contextIdx = seekSequence(originalLines, [chunk.change_context], lineIndex);
|
|
144
|
+
if (contextIdx === -1) {
|
|
145
|
+
throw new Error(`Failed to find context '${chunk.change_context}' in ${filePath}`);
|
|
146
|
+
}
|
|
147
|
+
lineIndex = contextIdx;
|
|
148
|
+
}
|
|
149
|
+
if (chunk.old_lines.length === 0) {
|
|
150
|
+
if (chunk.change_context && contextIdx !== -1) {
|
|
151
|
+
replacements.push([contextIdx, 1, chunk.new_lines]);
|
|
152
|
+
}
|
|
153
|
+
else {
|
|
154
|
+
replacements.push([originalLines.length, 0, chunk.new_lines]);
|
|
155
|
+
}
|
|
156
|
+
continue;
|
|
157
|
+
}
|
|
158
|
+
let pattern = chunk.old_lines;
|
|
159
|
+
let newSlice = chunk.new_lines;
|
|
160
|
+
let found = seekSequence(originalLines, pattern, lineIndex);
|
|
161
|
+
if (found === -1 && pattern.length > 0 && pattern[pattern.length - 1] === "") {
|
|
162
|
+
pattern = pattern.slice(0, -1);
|
|
163
|
+
if (newSlice.length > 0 && newSlice[newSlice.length - 1] === "") {
|
|
164
|
+
newSlice = newSlice.slice(0, -1);
|
|
165
|
+
}
|
|
166
|
+
found = seekSequence(originalLines, pattern, lineIndex);
|
|
167
|
+
}
|
|
168
|
+
if (found !== -1) {
|
|
169
|
+
replacements.push([found, pattern.length, newSlice]);
|
|
170
|
+
lineIndex = found + pattern.length;
|
|
171
|
+
}
|
|
172
|
+
else {
|
|
173
|
+
throw new Error(`Failed to find expected lines in ${filePath}:\n${chunk.old_lines.join("\n")}`);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
replacements.sort((a, b) => a[0] - b[0]);
|
|
177
|
+
return replacements;
|
|
178
|
+
}
|
|
179
|
+
function applyReplacements(lines, replacements) {
|
|
180
|
+
const result = [...lines];
|
|
181
|
+
for (let i = replacements.length - 1; i >= 0; i--) {
|
|
182
|
+
const [startIdx, oldLen, newSegment] = replacements[i];
|
|
183
|
+
result.splice(startIdx, oldLen);
|
|
184
|
+
for (let j = 0; j < newSegment.length; j++) {
|
|
185
|
+
result.splice(startIdx + j, 0, newSegment[j]);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
return result;
|
|
189
|
+
}
|
|
190
|
+
function deriveNewContents(filePath, chunks) {
|
|
191
|
+
const originalContent = fsSync.readFileSync(filePath, "utf-8");
|
|
192
|
+
let originalLines = originalContent.split("\n");
|
|
193
|
+
if (originalLines.length > 0 && originalLines[originalLines.length - 1] === "") {
|
|
194
|
+
originalLines.pop();
|
|
195
|
+
}
|
|
196
|
+
const replacements = computeReplacements(originalLines, filePath, chunks);
|
|
197
|
+
const newLines = applyReplacements(originalLines, replacements);
|
|
198
|
+
if (newLines.length === 0 || newLines[newLines.length - 1] !== "") {
|
|
199
|
+
newLines.push("");
|
|
200
|
+
}
|
|
201
|
+
return newLines.join("\n");
|
|
202
|
+
}
|
|
203
|
+
export async function applyHunksToFiles(hunks) {
|
|
204
|
+
if (hunks.length === 0)
|
|
205
|
+
throw new Error("No files were modified.");
|
|
206
|
+
const added = [];
|
|
207
|
+
const modified = [];
|
|
208
|
+
const deleted = [];
|
|
209
|
+
for (const hunk of hunks) {
|
|
210
|
+
if (hunk.type === "add") {
|
|
211
|
+
const dir = path.dirname(hunk.path);
|
|
212
|
+
if (dir !== "." && dir !== "/") {
|
|
213
|
+
await fs.mkdir(dir, { recursive: true });
|
|
214
|
+
}
|
|
215
|
+
await fs.writeFile(hunk.path, hunk.contents, "utf-8");
|
|
216
|
+
added.push(hunk.path);
|
|
217
|
+
}
|
|
218
|
+
else if (hunk.type === "delete") {
|
|
219
|
+
await fs.unlink(hunk.path);
|
|
220
|
+
deleted.push(hunk.path);
|
|
221
|
+
}
|
|
222
|
+
else if (hunk.type === "update") {
|
|
223
|
+
const content = deriveNewContents(hunk.path, hunk.chunks);
|
|
224
|
+
if (hunk.move_path) {
|
|
225
|
+
const dir = path.dirname(hunk.move_path);
|
|
226
|
+
if (dir !== "." && dir !== "/") {
|
|
227
|
+
await fs.mkdir(dir, { recursive: true });
|
|
228
|
+
}
|
|
229
|
+
await fs.writeFile(hunk.move_path, content, "utf-8");
|
|
230
|
+
await fs.unlink(hunk.path);
|
|
231
|
+
modified.push(hunk.move_path);
|
|
232
|
+
}
|
|
233
|
+
else {
|
|
234
|
+
await fs.writeFile(hunk.path, content, "utf-8");
|
|
235
|
+
modified.push(hunk.path);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
return { added, modified, deleted };
|
|
240
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export declare function resolvePath(baseDir: string, inputPath: string): string;
|
|
2
|
+
export declare function isBlockedEnvPath(filePath: string): boolean;
|
|
3
|
+
export declare function isBinaryExtension(filePath: string): boolean;
|
|
4
|
+
export declare function isImageExtension(filePath: string): boolean;
|
|
5
|
+
export declare function isBinaryFile(filePath: string): Promise<boolean>;
|
|
6
|
+
//# sourceMappingURL=paths.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"paths.d.ts","sourceRoot":"","sources":["../../util/paths.ts"],"names":[],"mappings":"AAOA,wBAAgB,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,UAO7D;AAED,wBAAgB,gBAAgB,CAAC,QAAQ,EAAE,MAAM,WAGhD;AAED,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,WAgCjD;AAED,wBAAgB,gBAAgB,CAAC,QAAQ,EAAE,MAAM,WAGhD;AAED,wBAAsB,YAAY,CAAC,QAAQ,EAAE,MAAM,oBAelD"}
|