@movemama/opencode-legacy 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +53 -0
- package/index.js +19 -0
- package/legacy-rules.json +19 -0
- package/package.json +36 -0
- package/plugin-meta.js +14 -0
- package/tools/edit.js +56 -0
- package/tools/edit.ts +64 -0
- package/tools/grep.js +210 -0
- package/tools/legacy-codec.js +13 -0
- package/tools/legacy-edit-core.mjs +134 -0
- package/tools/legacy-router.mjs +149 -0
- package/tools/legacy-search-core.mjs +84 -0
- package/tools/legacy.js +78 -0
- package/tools/legacy.ts +230 -0
- package/tools/opencode-paths.mjs +41 -0
- package/tools/read.js +148 -0
- package/tools/read.ts +213 -0
- package/tools/script-edit-core.mjs +126 -0
- package/tools/script-edit.js +59 -0
- package/tools/script-edit.ts +59 -0
- package/tools/txt-gb2312-tool.mjs +392 -0
- package/tools/write.js +53 -0
- package/tools/write.ts +67 -0
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
|
|
3
|
+
function normalizePath(filePath) {
|
|
4
|
+
return filePath.replace(/\\/g, '/');
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
function globToRegExp(glob) {
|
|
8
|
+
const normalized = normalizePath(glob);
|
|
9
|
+
let pattern = '^';
|
|
10
|
+
|
|
11
|
+
for (let i = 0; i < normalized.length; i += 1) {
|
|
12
|
+
const ch = normalized[i];
|
|
13
|
+
|
|
14
|
+
if (ch === '*') {
|
|
15
|
+
const next = normalized[i + 1];
|
|
16
|
+
if (next === '*') {
|
|
17
|
+
pattern += '.*';
|
|
18
|
+
i += 1;
|
|
19
|
+
} else {
|
|
20
|
+
pattern += '[^/]*';
|
|
21
|
+
}
|
|
22
|
+
continue;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (ch === '?') {
|
|
26
|
+
pattern += '.';
|
|
27
|
+
continue;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
if (ch === '{') {
|
|
31
|
+
const closeIndex = normalized.indexOf('}', i);
|
|
32
|
+
if (closeIndex > i) {
|
|
33
|
+
const group = normalized
|
|
34
|
+
.slice(i + 1, closeIndex)
|
|
35
|
+
.split(',')
|
|
36
|
+
.map((part) => part.trim().replace(/[.*+?^${}()|[\]\\]/g, '\\$&'))
|
|
37
|
+
.join('|');
|
|
38
|
+
pattern += `(${group})`;
|
|
39
|
+
i = closeIndex;
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if ('\\.[]{}()+-^$|'.includes(ch)) {
|
|
45
|
+
pattern += `\\${ch}`;
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
pattern += ch;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
pattern += '$';
|
|
53
|
+
return new RegExp(pattern, 'i');
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function createDefaultLegacyRules() {
|
|
57
|
+
return [
|
|
58
|
+
{ glob: '**/*.txt', encoding: 'gb2312', strict: true },
|
|
59
|
+
{ glob: '**/*.{ini,cfg,dat}', encoding: 'gbk', strict: false },
|
|
60
|
+
];
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export function matchLegacyRule(filePath, rules = createDefaultLegacyRules()) {
|
|
64
|
+
const normalized = normalizePath(filePath);
|
|
65
|
+
const matched = [];
|
|
66
|
+
|
|
67
|
+
for (const rule of rules) {
|
|
68
|
+
if (globToRegExp(rule.glob).test(normalized)) {
|
|
69
|
+
matched.push(rule);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (matched.length === 0) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
matched.sort((left, right) => {
|
|
78
|
+
const rightPriority = Number(right.priority || 0);
|
|
79
|
+
const leftPriority = Number(left.priority || 0);
|
|
80
|
+
if (rightPriority !== leftPriority) {
|
|
81
|
+
return rightPriority - leftPriority;
|
|
82
|
+
}
|
|
83
|
+
return right.glob.length - left.glob.length;
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
return matched[0];
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
export function shouldUseLegacyTools(filePath, rules = createDefaultLegacyRules()) {
|
|
90
|
+
return Boolean(matchLegacyRule(filePath, rules));
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export function buildLegacyToolCall({ tool, args, rules = createDefaultLegacyRules() }) {
|
|
94
|
+
const filePath = args.filePath ?? args.targetPath ?? args.path;
|
|
95
|
+
if (!filePath) {
|
|
96
|
+
return null;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
const matched = matchLegacyRule(filePath, rules);
|
|
100
|
+
if (!matched) {
|
|
101
|
+
return null;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (tool === 'read') {
|
|
105
|
+
return {
|
|
106
|
+
tool: 'legacy_read',
|
|
107
|
+
args: {
|
|
108
|
+
filePath,
|
|
109
|
+
encoding: matched.encoding,
|
|
110
|
+
strict: matched.strict,
|
|
111
|
+
},
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
if (tool === 'write') {
|
|
116
|
+
return {
|
|
117
|
+
tool: 'legacy_write',
|
|
118
|
+
args: {
|
|
119
|
+
filePath,
|
|
120
|
+
content: args.content,
|
|
121
|
+
encoding: matched.encoding,
|
|
122
|
+
strict: matched.strict,
|
|
123
|
+
},
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (tool === 'edit' || tool === 'apply_patch') {
|
|
128
|
+
return {
|
|
129
|
+
tool: 'legacy_edit',
|
|
130
|
+
args: {
|
|
131
|
+
filePath,
|
|
132
|
+
oldString: args.oldString,
|
|
133
|
+
newString: args.newString,
|
|
134
|
+
replaceAll: Boolean(args.replaceAll),
|
|
135
|
+
encoding: matched.encoding,
|
|
136
|
+
strict: matched.strict,
|
|
137
|
+
},
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return null;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export function resolveLegacyConfigPaths(projectRoot, globalConfigRoot) {
|
|
145
|
+
return {
|
|
146
|
+
projectConfigPath: path.join(projectRoot, '.opencode', 'legacy-rules.json'),
|
|
147
|
+
globalConfigPath: path.join(globalConfigRoot, 'legacy-rules.json'),
|
|
148
|
+
};
|
|
149
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
function escapeRegex(text) {
|
|
2
|
+
return text.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
|
3
|
+
}
|
|
4
|
+
|
|
5
|
+
export function createLineMatcher(pattern, options = {}) {
|
|
6
|
+
const flags = options.caseSensitive ? 'g' : 'gi'
|
|
7
|
+
|
|
8
|
+
if (options.fixedStrings) {
|
|
9
|
+
const escaped = escapeRegex(pattern)
|
|
10
|
+
const source = options.wholeWord ? `\\b${escaped}\\b` : escaped
|
|
11
|
+
const regex = new RegExp(source, flags)
|
|
12
|
+
return (line) => regex.test(line)
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const source = options.wholeWord ? `\\b(?:${pattern})\\b` : pattern
|
|
16
|
+
const regex = new RegExp(source, flags)
|
|
17
|
+
return (line) => regex.test(line)
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function searchContentLines(content, pattern, options = {}) {
|
|
21
|
+
const matcher = createLineMatcher(pattern, options)
|
|
22
|
+
const lines = content.split(/\r?\n/)
|
|
23
|
+
const matches = []
|
|
24
|
+
|
|
25
|
+
for (let index = 0; index < lines.length; index += 1) {
|
|
26
|
+
if (matcher(lines[index])) {
|
|
27
|
+
matches.push({
|
|
28
|
+
line: index + 1,
|
|
29
|
+
text: lines[index],
|
|
30
|
+
})
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return matches
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function searchContentLinesChunked(content, pattern, options = {}) {
|
|
38
|
+
const chunkSize = options.chunkSize || 1024 * 256
|
|
39
|
+
if (content.length <= chunkSize) {
|
|
40
|
+
return searchContentLines(content, pattern, options)
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const matcher = createLineMatcher(pattern, options)
|
|
44
|
+
const matches = []
|
|
45
|
+
let offset = 0
|
|
46
|
+
let lineNumber = 1
|
|
47
|
+
let carry = ''
|
|
48
|
+
|
|
49
|
+
while (offset < content.length) {
|
|
50
|
+
const nextChunk = content.slice(offset, offset + chunkSize)
|
|
51
|
+
offset += chunkSize
|
|
52
|
+
|
|
53
|
+
const merged = carry + nextChunk
|
|
54
|
+
const hasMore = offset < content.length
|
|
55
|
+
const lines = merged.split(/\r?\n/)
|
|
56
|
+
|
|
57
|
+
if (hasMore) {
|
|
58
|
+
carry = lines.pop() ?? ''
|
|
59
|
+
} else {
|
|
60
|
+
carry = ''
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
for (const line of lines) {
|
|
64
|
+
if (matcher(line)) {
|
|
65
|
+
matches.push({
|
|
66
|
+
line: lineNumber,
|
|
67
|
+
text: line,
|
|
68
|
+
})
|
|
69
|
+
}
|
|
70
|
+
lineNumber += 1
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (carry) {
|
|
75
|
+
if (createLineMatcher(pattern, options)(carry)) {
|
|
76
|
+
matches.push({
|
|
77
|
+
line: lineNumber,
|
|
78
|
+
text: carry,
|
|
79
|
+
})
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return matches
|
|
84
|
+
}
|
package/tools/legacy.js
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { tool } from '@opencode-ai/plugin'
|
|
2
|
+
import { readFileSync, existsSync } from 'node:fs'
|
|
3
|
+
import { writeFile } from 'node:fs/promises'
|
|
4
|
+
import path from 'node:path'
|
|
5
|
+
import { applyLegacyEdit } from './legacy-edit-core.mjs'
|
|
6
|
+
import { decodeLegacyBuffer, encodeLegacyText } from './legacy-codec.js'
|
|
7
|
+
|
|
8
|
+
function readLegacyFile(filePath, encoding) {
|
|
9
|
+
const buffer = readFileSync(filePath)
|
|
10
|
+
return decodeLegacyBuffer(buffer, encoding)
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export const read = tool({
|
|
14
|
+
description: '按 legacy 编码读取文件',
|
|
15
|
+
args: {
|
|
16
|
+
filePath: tool.schema.string().describe('文件路径'),
|
|
17
|
+
encoding: tool.schema.string().describe('legacy 编码'),
|
|
18
|
+
strict: tool.schema.boolean().describe('是否严格模式'),
|
|
19
|
+
},
|
|
20
|
+
async execute(args, context) {
|
|
21
|
+
const filePath = path.isAbsolute(args.filePath)
|
|
22
|
+
? args.filePath
|
|
23
|
+
: path.join(context.worktree, args.filePath)
|
|
24
|
+
|
|
25
|
+
return readLegacyFile(filePath, args.encoding)
|
|
26
|
+
},
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
export const write = tool({
|
|
30
|
+
description: '按 legacy 编码写入文件',
|
|
31
|
+
args: {
|
|
32
|
+
filePath: tool.schema.string().describe('文件路径'),
|
|
33
|
+
content: tool.schema.string().describe('UTF-8 内容'),
|
|
34
|
+
encoding: tool.schema.string().describe('legacy 编码'),
|
|
35
|
+
strict: tool.schema.boolean().describe('是否严格模式'),
|
|
36
|
+
},
|
|
37
|
+
async execute(args, context) {
|
|
38
|
+
const filePath = path.isAbsolute(args.filePath)
|
|
39
|
+
? args.filePath
|
|
40
|
+
: path.join(context.worktree, args.filePath)
|
|
41
|
+
|
|
42
|
+
const buffer = encodeLegacyText(args.content, args.encoding)
|
|
43
|
+
await writeFile(filePath, buffer)
|
|
44
|
+
return `已按 ${args.encoding} 编码写入 ${filePath}`
|
|
45
|
+
},
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
export const edit = tool({
|
|
49
|
+
description: '按 legacy 编码编辑文件',
|
|
50
|
+
args: {
|
|
51
|
+
filePath: tool.schema.string().describe('文件路径'),
|
|
52
|
+
oldString: tool.schema.string().describe('要替换的旧文本'),
|
|
53
|
+
newString: tool.schema.string().describe('要写入的新文本'),
|
|
54
|
+
replaceAll: tool.schema.boolean().describe('是否全部替换'),
|
|
55
|
+
encoding: tool.schema.string().describe('legacy 编码'),
|
|
56
|
+
strict: tool.schema.boolean().describe('是否严格模式'),
|
|
57
|
+
},
|
|
58
|
+
async execute(args, context) {
|
|
59
|
+
const filePath = path.isAbsolute(args.filePath)
|
|
60
|
+
? args.filePath
|
|
61
|
+
: path.join(context.worktree, args.filePath)
|
|
62
|
+
|
|
63
|
+
if (!existsSync(filePath)) {
|
|
64
|
+
throw new Error(`文件不存在: ${filePath}`)
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const content = readLegacyFile(filePath, args.encoding)
|
|
68
|
+
const result = applyLegacyEdit(content, args.oldString, args.newString, Boolean(args.replaceAll))
|
|
69
|
+
|
|
70
|
+
if (!result.changed) {
|
|
71
|
+
throw new Error(result.error)
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const buffer = encodeLegacyText(result.content, args.encoding)
|
|
75
|
+
await writeFile(filePath, buffer)
|
|
76
|
+
return `已按 ${args.encoding} 编码编辑 ${filePath}`
|
|
77
|
+
},
|
|
78
|
+
})
|
package/tools/legacy.ts
ADDED
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
import { tool } from '@opencode-ai/plugin'
|
|
2
|
+
import { mkdtemp, rm, writeFile } from 'node:fs/promises'
|
|
3
|
+
import { existsSync } from 'node:fs'
|
|
4
|
+
import path from 'node:path'
|
|
5
|
+
import { tmpdir } from 'node:os'
|
|
6
|
+
import { spawnSync } from 'node:child_process'
|
|
7
|
+
import { applyLegacyEdit } from './legacy-edit-core.mjs'
|
|
8
|
+
import { getGlobalToolPath, getKnownIconvCandidates } from './opencode-paths.mjs'
|
|
9
|
+
|
|
10
|
+
let cachedNodeCommand = null
|
|
11
|
+
let cachedIconvPath = null
|
|
12
|
+
|
|
13
|
+
function normalizeWindowsPath(filePath) {
|
|
14
|
+
return filePath.replace(/\\/g, '/')
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function getDerivedIconvCandidates() {
|
|
18
|
+
const pathValue = process.env.Path || process.env.PATH || ''
|
|
19
|
+
const parts = pathValue.split(';').map((item) => item.trim()).filter(Boolean)
|
|
20
|
+
const derived = []
|
|
21
|
+
|
|
22
|
+
for (const part of parts) {
|
|
23
|
+
const normalized = normalizeWindowsPath(part)
|
|
24
|
+
if (normalized.toLowerCase().endsWith('/git/cmd')) {
|
|
25
|
+
derived.push(normalized.replace(/\/cmd$/i, '/usr/bin/iconv.exe'))
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return derived
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function resolveTxtToolPath(context) {
|
|
33
|
+
const projectToolPath = path.join(context.worktree, 'tools', 'txt-gb2312-tool.mjs')
|
|
34
|
+
if (existsSync(projectToolPath)) {
|
|
35
|
+
return projectToolPath
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return getGlobalToolPath('txt-gb2312-tool.mjs')
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function resolveNodeCommand() {
|
|
42
|
+
if (cachedNodeCommand) {
|
|
43
|
+
return cachedNodeCommand
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const candidates = [process.env.OPENCODE_NODE_PATH, process.execPath, 'node'].filter(Boolean)
|
|
47
|
+
|
|
48
|
+
for (const candidate of candidates) {
|
|
49
|
+
const probe = spawnSync(candidate, ['-e', 'process.stdout.write(process.release?.name === "node" ? "node-ok" : "not-node")'], {
|
|
50
|
+
encoding: 'utf8',
|
|
51
|
+
})
|
|
52
|
+
const output = `${probe.stdout || ''}${probe.stderr || ''}`.trim()
|
|
53
|
+
if (probe.status === 0 && output === 'node-ok') {
|
|
54
|
+
cachedNodeCommand = candidate
|
|
55
|
+
return cachedNodeCommand
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
throw new Error('未找到可用的 Node.js 命令,无法执行 txt-gb2312-tool.mjs')
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function resolveIconvPath() {
|
|
63
|
+
if (cachedIconvPath) {
|
|
64
|
+
return cachedIconvPath
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const candidates = [
|
|
68
|
+
'iconv',
|
|
69
|
+
process.env.OPENCODE_ICONV_PATH,
|
|
70
|
+
process.env.ICONV_PATH,
|
|
71
|
+
...getKnownIconvCandidates(),
|
|
72
|
+
...getDerivedIconvCandidates(),
|
|
73
|
+
].filter(Boolean)
|
|
74
|
+
|
|
75
|
+
for (const candidate of candidates) {
|
|
76
|
+
const probe = spawnSync(candidate, ['--version'], { encoding: 'utf8' })
|
|
77
|
+
if (probe.status === 0) {
|
|
78
|
+
cachedIconvPath = candidate
|
|
79
|
+
return cachedIconvPath
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
throw new Error('iconv 不可用,无法处理 legacy 文件')
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function runNode(scriptArgs, cwd, extraEnv = {}) {
|
|
87
|
+
const nodeCommand = resolveNodeCommand()
|
|
88
|
+
const result = spawnSync(nodeCommand, scriptArgs, {
|
|
89
|
+
cwd,
|
|
90
|
+
encoding: 'utf8',
|
|
91
|
+
env: { ...process.env, ...extraEnv },
|
|
92
|
+
})
|
|
93
|
+
|
|
94
|
+
if (result.status !== 0) {
|
|
95
|
+
const stderr = (result.stderr || '').trim()
|
|
96
|
+
const stdout = (result.stdout || '').trim()
|
|
97
|
+
throw new Error(
|
|
98
|
+
[
|
|
99
|
+
'legacy 工具执行失败',
|
|
100
|
+
`nodeCommand=${nodeCommand}`,
|
|
101
|
+
`cwd=${cwd}`,
|
|
102
|
+
`status=${result.status}`,
|
|
103
|
+
`stderr=${stderr || '<empty>'}`,
|
|
104
|
+
`stdout=${stdout || '<empty>'}`,
|
|
105
|
+
].join(' | '),
|
|
106
|
+
)
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return result.stdout
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
function runIconv(iconvPath, args) {
|
|
113
|
+
const result = spawnSync(iconvPath, args, { encoding: 'buffer' })
|
|
114
|
+
if (result.status !== 0) {
|
|
115
|
+
throw new Error(result.stderr?.toString('utf8') || 'iconv 执行失败')
|
|
116
|
+
}
|
|
117
|
+
return result.stdout
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function readGb2312File(filePath) {
|
|
121
|
+
const iconvPath = resolveIconvPath()
|
|
122
|
+
const output = runIconv(iconvPath, ['-f', 'GB2312', '-t', 'UTF-8', filePath])
|
|
123
|
+
return output.toString('utf8')
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
export const read = tool({
|
|
127
|
+
description: '按 legacy 编码读取文件',
|
|
128
|
+
args: {
|
|
129
|
+
filePath: tool.schema.string().describe('文件路径'),
|
|
130
|
+
encoding: tool.schema.string().describe('legacy 编码'),
|
|
131
|
+
strict: tool.schema.boolean().describe('是否严格模式'),
|
|
132
|
+
},
|
|
133
|
+
async execute(args, context) {
|
|
134
|
+
const filePath = path.isAbsolute(args.filePath)
|
|
135
|
+
? args.filePath
|
|
136
|
+
: path.join(context.worktree, args.filePath)
|
|
137
|
+
|
|
138
|
+
if (args.encoding.toLowerCase() === 'gb2312') {
|
|
139
|
+
return readGb2312File(filePath)
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
const iconvPath = resolveIconvPath()
|
|
143
|
+
const output = runIconv(iconvPath, ['-f', args.encoding, '-t', 'UTF-8', filePath])
|
|
144
|
+
return output.toString('utf8')
|
|
145
|
+
},
|
|
146
|
+
})
|
|
147
|
+
|
|
148
|
+
export const write = tool({
|
|
149
|
+
description: '按 legacy 编码写入文件',
|
|
150
|
+
args: {
|
|
151
|
+
filePath: tool.schema.string().describe('文件路径'),
|
|
152
|
+
content: tool.schema.string().describe('UTF-8 内容'),
|
|
153
|
+
encoding: tool.schema.string().describe('legacy 编码'),
|
|
154
|
+
strict: tool.schema.boolean().describe('是否严格模式'),
|
|
155
|
+
},
|
|
156
|
+
async execute(args, context) {
|
|
157
|
+
const filePath = path.isAbsolute(args.filePath)
|
|
158
|
+
? args.filePath
|
|
159
|
+
: path.join(context.worktree, args.filePath)
|
|
160
|
+
|
|
161
|
+
const tempDir = await mkdtemp(path.join(tmpdir(), 'legacy-write-'))
|
|
162
|
+
const tempPath = path.join(tempDir, 'content.utf8')
|
|
163
|
+
|
|
164
|
+
try {
|
|
165
|
+
await writeFile(tempPath, args.content, 'utf8')
|
|
166
|
+
|
|
167
|
+
if (args.encoding.toLowerCase() === 'gb2312') {
|
|
168
|
+
const iconvPath = resolveIconvPath()
|
|
169
|
+
return runNode([resolveTxtToolPath(context), 'write', filePath, tempPath], context.worktree, {
|
|
170
|
+
OPENCODE_ICONV_PATH: iconvPath,
|
|
171
|
+
ICONV_PATH: iconvPath,
|
|
172
|
+
})
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const iconvPath = resolveIconvPath()
|
|
176
|
+
const converted = runIconv(iconvPath, ['-f', 'UTF-8', '-t', args.encoding, tempPath])
|
|
177
|
+
await writeFile(filePath, converted)
|
|
178
|
+
return `已按 ${args.encoding} 编码写入 ${filePath}`
|
|
179
|
+
} finally {
|
|
180
|
+
await rm(tempDir, { recursive: true, force: true })
|
|
181
|
+
}
|
|
182
|
+
},
|
|
183
|
+
})
|
|
184
|
+
|
|
185
|
+
export const edit = tool({
|
|
186
|
+
description: '按 legacy 编码编辑文件',
|
|
187
|
+
args: {
|
|
188
|
+
filePath: tool.schema.string().describe('文件路径'),
|
|
189
|
+
oldString: tool.schema.string().describe('要替换的旧文本'),
|
|
190
|
+
newString: tool.schema.string().describe('要写入的新文本'),
|
|
191
|
+
replaceAll: tool.schema.boolean().describe('是否全部替换'),
|
|
192
|
+
encoding: tool.schema.string().describe('legacy 编码'),
|
|
193
|
+
strict: tool.schema.boolean().describe('是否严格模式'),
|
|
194
|
+
},
|
|
195
|
+
async execute(args, context) {
|
|
196
|
+
const filePath = path.isAbsolute(args.filePath)
|
|
197
|
+
? args.filePath
|
|
198
|
+
: path.join(context.worktree, args.filePath)
|
|
199
|
+
|
|
200
|
+
if (!existsSync(filePath)) {
|
|
201
|
+
throw new Error(`文件不存在: ${filePath}`)
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
const content = await read.execute(
|
|
205
|
+
{
|
|
206
|
+
filePath,
|
|
207
|
+
encoding: args.encoding,
|
|
208
|
+
strict: args.strict,
|
|
209
|
+
raw: true,
|
|
210
|
+
},
|
|
211
|
+
context,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
const result = applyLegacyEdit(content, args.oldString, args.newString, Boolean(args.replaceAll))
|
|
215
|
+
|
|
216
|
+
if (!result.changed) {
|
|
217
|
+
throw new Error(result.error)
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
return write.execute(
|
|
221
|
+
{
|
|
222
|
+
filePath,
|
|
223
|
+
content: result.content,
|
|
224
|
+
encoding: args.encoding,
|
|
225
|
+
strict: args.strict,
|
|
226
|
+
},
|
|
227
|
+
context,
|
|
228
|
+
)
|
|
229
|
+
},
|
|
230
|
+
})
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { fileURLToPath } from 'node:url';
|
|
3
|
+
|
|
4
|
+
const TOOLS_DIR = path.dirname(fileURLToPath(import.meta.url));
|
|
5
|
+
const PACKAGE_ROOT = path.join(TOOLS_DIR, '..');
|
|
6
|
+
|
|
7
|
+
export function getUserHome() {
|
|
8
|
+
return process.env.USERPROFILE || process.env.HOME || path.join('C:', 'Users', 'Administrator');
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function getGlobalOpencodeRoot() {
|
|
12
|
+
return path.join(getUserHome(), '.config', 'opencode');
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export function getGlobalLegacyRulesPath() {
|
|
16
|
+
return path.join(getGlobalOpencodeRoot(), 'legacy-rules.json');
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function getGlobalToolPath(fileName) {
|
|
20
|
+
return path.join(getGlobalOpencodeRoot(), 'tools', fileName);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function getBundledPackageRoot() {
|
|
24
|
+
return PACKAGE_ROOT;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function getBundledLegacyRulesPath() {
|
|
28
|
+
return path.join(PACKAGE_ROOT, 'legacy-rules.json');
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export function getBundledToolPath(fileName) {
|
|
32
|
+
return path.join(TOOLS_DIR, fileName);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function getKnownIconvCandidates() {
|
|
36
|
+
const home = getUserHome();
|
|
37
|
+
return [
|
|
38
|
+
path.join(home, 'AppData', 'Local', 'Programs', 'gettext-iconv', 'bin', 'iconv.exe'),
|
|
39
|
+
path.join(home, 'AppData', 'Local', 'Atlassian', 'SourceTree', 'git_local', 'usr', 'bin', 'iconv.exe'),
|
|
40
|
+
];
|
|
41
|
+
}
|