opencode-hashline 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +442 -0
- package/README.ru.md +417 -0
- package/dist/chunk-C2EVIAGV.js +177 -0
- package/dist/chunk-IVZSANZ4.js +411 -0
- package/dist/hashline-Civwirvf.d.cts +278 -0
- package/dist/hashline-Civwirvf.d.ts +278 -0
- package/dist/hashline-W2FT5QN4.js +44 -0
- package/dist/index.cjs +811 -0
- package/dist/index.d.cts +48 -0
- package/dist/index.d.ts +48 -0
- package/dist/index.js +197 -0
- package/dist/utils.cjs +637 -0
- package/dist/utils.d.cts +74 -0
- package/dist/utils.d.ts +74 -0
- package/dist/utils.js +54 -0
- package/package.json +56 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,811 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __esm = (fn, res) => function __init() {
|
|
9
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
10
|
+
};
|
|
11
|
+
var __export = (target, all) => {
|
|
12
|
+
for (var name in all)
|
|
13
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
14
|
+
};
|
|
15
|
+
var __copyProps = (to, from, except, desc) => {
|
|
16
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
17
|
+
for (let key of __getOwnPropNames(from))
|
|
18
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
19
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
20
|
+
}
|
|
21
|
+
return to;
|
|
22
|
+
};
|
|
23
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
24
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
25
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
26
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
27
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
28
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
29
|
+
mod
|
|
30
|
+
));
|
|
31
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
32
|
+
|
|
33
|
+
// src/hashline.ts
|
|
34
|
+
var hashline_exports = {};
|
|
35
|
+
__export(hashline_exports, {
|
|
36
|
+
DEFAULT_CONFIG: () => DEFAULT_CONFIG,
|
|
37
|
+
DEFAULT_EXCLUDE_PATTERNS: () => DEFAULT_EXCLUDE_PATTERNS,
|
|
38
|
+
DEFAULT_PREFIX: () => DEFAULT_PREFIX,
|
|
39
|
+
HashlineCache: () => HashlineCache,
|
|
40
|
+
applyHashEdit: () => applyHashEdit,
|
|
41
|
+
buildHashMap: () => buildHashMap,
|
|
42
|
+
computeLineHash: () => computeLineHash,
|
|
43
|
+
createHashline: () => createHashline,
|
|
44
|
+
formatFileWithHashes: () => formatFileWithHashes,
|
|
45
|
+
getAdaptiveHashLength: () => getAdaptiveHashLength,
|
|
46
|
+
getByteLength: () => getByteLength,
|
|
47
|
+
matchesGlob: () => matchesGlob,
|
|
48
|
+
normalizeHashRef: () => normalizeHashRef,
|
|
49
|
+
parseHashRef: () => parseHashRef,
|
|
50
|
+
replaceRange: () => replaceRange,
|
|
51
|
+
resolveConfig: () => resolveConfig,
|
|
52
|
+
resolveRange: () => resolveRange,
|
|
53
|
+
shouldExclude: () => shouldExclude,
|
|
54
|
+
stripHashes: () => stripHashes,
|
|
55
|
+
verifyHash: () => verifyHash
|
|
56
|
+
});
|
|
57
|
+
function resolveConfig(config, pluginConfig) {
|
|
58
|
+
const merged = {
|
|
59
|
+
...pluginConfig,
|
|
60
|
+
...config
|
|
61
|
+
};
|
|
62
|
+
if (!merged || Object.keys(merged).length === 0) {
|
|
63
|
+
return { ...DEFAULT_CONFIG, exclude: [...DEFAULT_CONFIG.exclude] };
|
|
64
|
+
}
|
|
65
|
+
return {
|
|
66
|
+
exclude: merged.exclude ?? [...DEFAULT_CONFIG.exclude],
|
|
67
|
+
maxFileSize: merged.maxFileSize ?? DEFAULT_CONFIG.maxFileSize,
|
|
68
|
+
hashLength: merged.hashLength ?? DEFAULT_CONFIG.hashLength,
|
|
69
|
+
cacheSize: merged.cacheSize ?? DEFAULT_CONFIG.cacheSize,
|
|
70
|
+
prefix: merged.prefix !== void 0 ? merged.prefix : DEFAULT_CONFIG.prefix
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
function fnv1aHash(str) {
|
|
74
|
+
let hash = 2166136261;
|
|
75
|
+
for (let i = 0; i < str.length; i++) {
|
|
76
|
+
hash ^= str.charCodeAt(i);
|
|
77
|
+
hash = hash * 16777619 >>> 0;
|
|
78
|
+
}
|
|
79
|
+
return hash;
|
|
80
|
+
}
|
|
81
|
+
function getModulus(hashLen) {
|
|
82
|
+
let cached = modulusCache.get(hashLen);
|
|
83
|
+
if (cached === void 0) {
|
|
84
|
+
cached = Math.pow(16, hashLen);
|
|
85
|
+
modulusCache.set(hashLen, cached);
|
|
86
|
+
}
|
|
87
|
+
return cached;
|
|
88
|
+
}
|
|
89
|
+
function getAdaptiveHashLength(lineCount) {
|
|
90
|
+
if (lineCount <= 4096) return 3;
|
|
91
|
+
return 4;
|
|
92
|
+
}
|
|
93
|
+
function computeLineHash(idx, line, hashLen = 3) {
|
|
94
|
+
const trimmed = line.trimEnd();
|
|
95
|
+
const input = `${idx}:${trimmed}`;
|
|
96
|
+
const raw = fnv1aHash(input);
|
|
97
|
+
const modulus = getModulus(hashLen);
|
|
98
|
+
const hash = raw % modulus;
|
|
99
|
+
return hash.toString(16).padStart(hashLen, "0");
|
|
100
|
+
}
|
|
101
|
+
function formatFileWithHashes(content, hashLen, prefix) {
|
|
102
|
+
const lines = content.split("\n");
|
|
103
|
+
const effectiveLen = hashLen && hashLen >= 3 ? hashLen : getAdaptiveHashLength(lines.length);
|
|
104
|
+
const effectivePrefix = prefix === void 0 ? DEFAULT_PREFIX : prefix === false ? "" : prefix;
|
|
105
|
+
const hashes = new Array(lines.length);
|
|
106
|
+
const seen = /* @__PURE__ */ new Map();
|
|
107
|
+
for (let idx = 0; idx < lines.length; idx++) {
|
|
108
|
+
const hash = computeLineHash(idx, lines[idx], effectiveLen);
|
|
109
|
+
if (seen.has(hash)) {
|
|
110
|
+
const longerLen = Math.min(effectiveLen + 1, 8);
|
|
111
|
+
hashes[idx] = computeLineHash(idx, lines[idx], longerLen);
|
|
112
|
+
} else {
|
|
113
|
+
seen.set(hash, idx);
|
|
114
|
+
hashes[idx] = hash;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return lines.map((line, idx) => {
|
|
118
|
+
return `${effectivePrefix}${idx + 1}:${hashes[idx]}|${line}`;
|
|
119
|
+
}).join("\n");
|
|
120
|
+
}
|
|
121
|
+
function stripHashes(content, prefix) {
|
|
122
|
+
const effectivePrefix = prefix === void 0 ? DEFAULT_PREFIX : prefix === false ? "" : prefix;
|
|
123
|
+
const escapedPrefix = effectivePrefix.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
124
|
+
let hashLinePattern = stripRegexCache.get(escapedPrefix);
|
|
125
|
+
if (!hashLinePattern) {
|
|
126
|
+
hashLinePattern = new RegExp(`^${escapedPrefix}\\d+:[0-9a-f]{2,8}\\|`);
|
|
127
|
+
stripRegexCache.set(escapedPrefix, hashLinePattern);
|
|
128
|
+
}
|
|
129
|
+
return content.split("\n").map((line) => {
|
|
130
|
+
const match = line.match(hashLinePattern);
|
|
131
|
+
if (match) {
|
|
132
|
+
return line.slice(match[0].length);
|
|
133
|
+
}
|
|
134
|
+
return line;
|
|
135
|
+
}).join("\n");
|
|
136
|
+
}
|
|
137
|
+
function parseHashRef(ref) {
|
|
138
|
+
const match = ref.match(/^(\d+):([0-9a-f]{2,8})$/);
|
|
139
|
+
if (!match) {
|
|
140
|
+
throw new Error(`Invalid hash reference: "${ref}". Expected format: "<line>:<2-8 char hex>"`);
|
|
141
|
+
}
|
|
142
|
+
return {
|
|
143
|
+
line: parseInt(match[1], 10),
|
|
144
|
+
hash: match[2]
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
function normalizeHashRef(ref) {
|
|
148
|
+
const trimmed = ref.trim();
|
|
149
|
+
const plain = trimmed.match(/^(\d+):([0-9a-f]{2,8})$/i);
|
|
150
|
+
if (plain) {
|
|
151
|
+
return `${parseInt(plain[1], 10)}:${plain[2].toLowerCase()}`;
|
|
152
|
+
}
|
|
153
|
+
const annotated = trimmed.match(/^(?:[#\w]*\s+)?(\d+):([0-9a-f]{2,8})\|.*$/i);
|
|
154
|
+
if (annotated) {
|
|
155
|
+
return `${parseInt(annotated[1], 10)}:${annotated[2].toLowerCase()}`;
|
|
156
|
+
}
|
|
157
|
+
throw new Error(
|
|
158
|
+
`Invalid hash reference: "${ref}". Expected "<line>:<hash>" or an annotated line like "#HL <line>:<hash>|..."`
|
|
159
|
+
);
|
|
160
|
+
}
|
|
161
|
+
function buildHashMap(content, hashLen) {
|
|
162
|
+
const lines = content.split("\n");
|
|
163
|
+
const effectiveLen = hashLen && hashLen >= 3 ? hashLen : getAdaptiveHashLength(lines.length);
|
|
164
|
+
const map = /* @__PURE__ */ new Map();
|
|
165
|
+
for (let idx = 0; idx < lines.length; idx++) {
|
|
166
|
+
const hash = computeLineHash(idx, lines[idx], effectiveLen);
|
|
167
|
+
const lineNum = idx + 1;
|
|
168
|
+
map.set(`${lineNum}:${hash}`, lineNum);
|
|
169
|
+
}
|
|
170
|
+
return map;
|
|
171
|
+
}
|
|
172
|
+
function verifyHash(lineNumber, hash, currentContent, hashLen, lines) {
|
|
173
|
+
const contentLines = lines ?? currentContent.split("\n");
|
|
174
|
+
const effectiveLen = hashLen && hashLen >= 2 ? hashLen : hash.length;
|
|
175
|
+
if (lineNumber < 1 || lineNumber > contentLines.length) {
|
|
176
|
+
return {
|
|
177
|
+
valid: false,
|
|
178
|
+
message: `Line ${lineNumber} is out of range (file has ${contentLines.length} lines)`
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
const idx = lineNumber - 1;
|
|
182
|
+
const actualHash = computeLineHash(idx, contentLines[idx], effectiveLen);
|
|
183
|
+
if (actualHash !== hash) {
|
|
184
|
+
return {
|
|
185
|
+
valid: false,
|
|
186
|
+
expected: hash,
|
|
187
|
+
actual: actualHash,
|
|
188
|
+
message: `Hash mismatch at line ${lineNumber}: expected "${hash}", got "${actualHash}". The file may have changed since it was read.`
|
|
189
|
+
};
|
|
190
|
+
}
|
|
191
|
+
return { valid: true };
|
|
192
|
+
}
|
|
193
|
+
function resolveRange(startRef, endRef, content, hashLen) {
|
|
194
|
+
const start = parseHashRef(startRef);
|
|
195
|
+
const end = parseHashRef(endRef);
|
|
196
|
+
if (start.line > end.line) {
|
|
197
|
+
throw new Error(
|
|
198
|
+
`Invalid range: start line ${start.line} is after end line ${end.line}`
|
|
199
|
+
);
|
|
200
|
+
}
|
|
201
|
+
const lines = content.split("\n");
|
|
202
|
+
const startVerify = verifyHash(start.line, start.hash, content, hashLen, lines);
|
|
203
|
+
if (!startVerify.valid) {
|
|
204
|
+
throw new Error(`Start reference invalid: ${startVerify.message}`);
|
|
205
|
+
}
|
|
206
|
+
const endVerify = verifyHash(end.line, end.hash, content, hashLen, lines);
|
|
207
|
+
if (!endVerify.valid) {
|
|
208
|
+
throw new Error(`End reference invalid: ${endVerify.message}`);
|
|
209
|
+
}
|
|
210
|
+
const rangeLines = lines.slice(start.line - 1, end.line);
|
|
211
|
+
return {
|
|
212
|
+
startLine: start.line,
|
|
213
|
+
endLine: end.line,
|
|
214
|
+
lines: rangeLines,
|
|
215
|
+
content: rangeLines.join("\n")
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
function replaceRange(startRef, endRef, content, replacement, hashLen) {
|
|
219
|
+
const range = resolveRange(startRef, endRef, content, hashLen);
|
|
220
|
+
const lines = content.split("\n");
|
|
221
|
+
const before = lines.slice(0, range.startLine - 1);
|
|
222
|
+
const after = lines.slice(range.endLine);
|
|
223
|
+
const replacementLines = replacement.split("\n");
|
|
224
|
+
return [...before, ...replacementLines, ...after].join("\n");
|
|
225
|
+
}
|
|
226
|
+
function applyHashEdit(input, content, hashLen) {
|
|
227
|
+
const normalizedStart = normalizeHashRef(input.startRef);
|
|
228
|
+
const start = parseHashRef(normalizedStart);
|
|
229
|
+
const lines = content.split("\n");
|
|
230
|
+
const startVerify = verifyHash(start.line, start.hash, content, hashLen, lines);
|
|
231
|
+
if (!startVerify.valid) {
|
|
232
|
+
throw new Error(`Start reference invalid: ${startVerify.message}`);
|
|
233
|
+
}
|
|
234
|
+
if (input.operation === "insert_before" || input.operation === "insert_after") {
|
|
235
|
+
if (input.replacement === void 0) {
|
|
236
|
+
throw new Error(`Operation "${input.operation}" requires "replacement" content`);
|
|
237
|
+
}
|
|
238
|
+
const insertionLines = input.replacement.split("\n");
|
|
239
|
+
const insertIndex = input.operation === "insert_before" ? start.line - 1 : start.line;
|
|
240
|
+
const next2 = [
|
|
241
|
+
...lines.slice(0, insertIndex),
|
|
242
|
+
...insertionLines,
|
|
243
|
+
...lines.slice(insertIndex)
|
|
244
|
+
].join("\n");
|
|
245
|
+
return {
|
|
246
|
+
operation: input.operation,
|
|
247
|
+
startLine: start.line,
|
|
248
|
+
endLine: start.line,
|
|
249
|
+
content: next2
|
|
250
|
+
};
|
|
251
|
+
}
|
|
252
|
+
const normalizedEnd = normalizeHashRef(input.endRef ?? input.startRef);
|
|
253
|
+
const end = parseHashRef(normalizedEnd);
|
|
254
|
+
if (start.line > end.line) {
|
|
255
|
+
throw new Error(
|
|
256
|
+
`Invalid range: start line ${start.line} is after end line ${end.line}`
|
|
257
|
+
);
|
|
258
|
+
}
|
|
259
|
+
const endVerify = verifyHash(end.line, end.hash, content, hashLen, lines);
|
|
260
|
+
if (!endVerify.valid) {
|
|
261
|
+
throw new Error(`End reference invalid: ${endVerify.message}`);
|
|
262
|
+
}
|
|
263
|
+
const replacement = input.operation === "delete" ? "" : input.replacement;
|
|
264
|
+
if (replacement === void 0) {
|
|
265
|
+
throw new Error(`Operation "${input.operation}" requires "replacement" content`);
|
|
266
|
+
}
|
|
267
|
+
const before = lines.slice(0, start.line - 1);
|
|
268
|
+
const after = lines.slice(end.line);
|
|
269
|
+
const replacementLines = input.operation === "delete" ? [] : replacement.split("\n");
|
|
270
|
+
const next = [...before, ...replacementLines, ...after].join("\n");
|
|
271
|
+
return {
|
|
272
|
+
operation: input.operation,
|
|
273
|
+
startLine: start.line,
|
|
274
|
+
endLine: end.line,
|
|
275
|
+
content: next
|
|
276
|
+
};
|
|
277
|
+
}
|
|
278
|
+
function matchesGlob(filePath, pattern) {
|
|
279
|
+
const normalizedPath = filePath.replace(/\\/g, "/");
|
|
280
|
+
const normalizedPattern = pattern.replace(/\\/g, "/");
|
|
281
|
+
const isMatch = (0, import_picomatch.default)(normalizedPattern, { dot: true });
|
|
282
|
+
return isMatch(normalizedPath);
|
|
283
|
+
}
|
|
284
|
+
function shouldExclude(filePath, patterns) {
|
|
285
|
+
return patterns.some((pattern) => matchesGlob(filePath, pattern));
|
|
286
|
+
}
|
|
287
|
+
function getByteLength(content) {
|
|
288
|
+
return textEncoder.encode(content).length;
|
|
289
|
+
}
|
|
290
|
+
function createHashline(config) {
|
|
291
|
+
const resolved = resolveConfig(config);
|
|
292
|
+
const cache = new HashlineCache(resolved.cacheSize);
|
|
293
|
+
const hl = resolved.hashLength || 0;
|
|
294
|
+
const pfx = resolved.prefix;
|
|
295
|
+
return {
|
|
296
|
+
config: resolved,
|
|
297
|
+
cache,
|
|
298
|
+
formatFileWithHashes(content, filePath) {
|
|
299
|
+
if (filePath) {
|
|
300
|
+
const cached = cache.get(filePath, content);
|
|
301
|
+
if (cached) return cached;
|
|
302
|
+
}
|
|
303
|
+
const result = formatFileWithHashes(content, hl, pfx);
|
|
304
|
+
if (filePath) {
|
|
305
|
+
cache.set(filePath, content, result);
|
|
306
|
+
}
|
|
307
|
+
return result;
|
|
308
|
+
},
|
|
309
|
+
stripHashes(content) {
|
|
310
|
+
return stripHashes(content, pfx);
|
|
311
|
+
},
|
|
312
|
+
computeLineHash(idx, line) {
|
|
313
|
+
return computeLineHash(idx, line, hl || 3);
|
|
314
|
+
},
|
|
315
|
+
buildHashMap(content) {
|
|
316
|
+
return buildHashMap(content, hl);
|
|
317
|
+
},
|
|
318
|
+
verifyHash(lineNumber, hash, currentContent) {
|
|
319
|
+
return verifyHash(lineNumber, hash, currentContent, hl);
|
|
320
|
+
},
|
|
321
|
+
resolveRange(startRef, endRef, content) {
|
|
322
|
+
return resolveRange(startRef, endRef, content, hl);
|
|
323
|
+
},
|
|
324
|
+
replaceRange(startRef, endRef, content, replacement) {
|
|
325
|
+
return replaceRange(startRef, endRef, content, replacement, hl);
|
|
326
|
+
},
|
|
327
|
+
applyHashEdit(input, content) {
|
|
328
|
+
return applyHashEdit(input, content, hl);
|
|
329
|
+
},
|
|
330
|
+
normalizeHashRef(ref) {
|
|
331
|
+
return normalizeHashRef(ref);
|
|
332
|
+
},
|
|
333
|
+
parseHashRef(ref) {
|
|
334
|
+
return parseHashRef(ref);
|
|
335
|
+
},
|
|
336
|
+
shouldExclude(filePath) {
|
|
337
|
+
return shouldExclude(filePath, resolved.exclude);
|
|
338
|
+
}
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
var import_picomatch, DEFAULT_EXCLUDE_PATTERNS, DEFAULT_PREFIX, DEFAULT_CONFIG, modulusCache, stripRegexCache, HashlineCache, textEncoder;
|
|
342
|
+
var init_hashline = __esm({
|
|
343
|
+
"src/hashline.ts"() {
|
|
344
|
+
"use strict";
|
|
345
|
+
import_picomatch = __toESM(require("picomatch"), 1);
|
|
346
|
+
DEFAULT_EXCLUDE_PATTERNS = [
|
|
347
|
+
"**/node_modules/**",
|
|
348
|
+
"**/*.lock",
|
|
349
|
+
"**/package-lock.json",
|
|
350
|
+
"**/yarn.lock",
|
|
351
|
+
"**/pnpm-lock.yaml",
|
|
352
|
+
"**/*.min.js",
|
|
353
|
+
"**/*.min.css",
|
|
354
|
+
"**/*.bundle.js",
|
|
355
|
+
"**/*.map",
|
|
356
|
+
"**/*.wasm",
|
|
357
|
+
"**/*.png",
|
|
358
|
+
"**/*.jpg",
|
|
359
|
+
"**/*.jpeg",
|
|
360
|
+
"**/*.gif",
|
|
361
|
+
"**/*.ico",
|
|
362
|
+
"**/*.svg",
|
|
363
|
+
"**/*.woff",
|
|
364
|
+
"**/*.woff2",
|
|
365
|
+
"**/*.ttf",
|
|
366
|
+
"**/*.eot",
|
|
367
|
+
"**/*.pdf",
|
|
368
|
+
"**/*.zip",
|
|
369
|
+
"**/*.tar",
|
|
370
|
+
"**/*.gz",
|
|
371
|
+
"**/*.exe",
|
|
372
|
+
"**/*.dll",
|
|
373
|
+
"**/*.so",
|
|
374
|
+
"**/*.dylib"
|
|
375
|
+
];
|
|
376
|
+
DEFAULT_PREFIX = "#HL ";
|
|
377
|
+
DEFAULT_CONFIG = {
|
|
378
|
+
exclude: DEFAULT_EXCLUDE_PATTERNS,
|
|
379
|
+
maxFileSize: 1048576,
|
|
380
|
+
// 1 MB
|
|
381
|
+
hashLength: 0,
|
|
382
|
+
// 0 = adaptive
|
|
383
|
+
cacheSize: 100,
|
|
384
|
+
prefix: DEFAULT_PREFIX
|
|
385
|
+
};
|
|
386
|
+
modulusCache = /* @__PURE__ */ new Map();
|
|
387
|
+
stripRegexCache = /* @__PURE__ */ new Map();
|
|
388
|
+
HashlineCache = class {
|
|
389
|
+
cache;
|
|
390
|
+
maxSize;
|
|
391
|
+
constructor(maxSize = 100) {
|
|
392
|
+
this.cache = /* @__PURE__ */ new Map();
|
|
393
|
+
this.maxSize = maxSize;
|
|
394
|
+
}
|
|
395
|
+
/**
|
|
396
|
+
* Get cached annotated content for a file, or null if not cached / stale.
|
|
397
|
+
*/
|
|
398
|
+
get(filePath, content) {
|
|
399
|
+
const entry = this.cache.get(filePath);
|
|
400
|
+
if (!entry) return null;
|
|
401
|
+
const currentHash = fnv1aHash(content);
|
|
402
|
+
if (entry.contentHash !== currentHash) {
|
|
403
|
+
this.cache.delete(filePath);
|
|
404
|
+
return null;
|
|
405
|
+
}
|
|
406
|
+
this.cache.delete(filePath);
|
|
407
|
+
this.cache.set(filePath, entry);
|
|
408
|
+
return entry.annotated;
|
|
409
|
+
}
|
|
410
|
+
/**
|
|
411
|
+
* Store annotated content in the cache.
|
|
412
|
+
*/
|
|
413
|
+
set(filePath, content, annotated) {
|
|
414
|
+
if (this.cache.has(filePath)) {
|
|
415
|
+
this.cache.delete(filePath);
|
|
416
|
+
}
|
|
417
|
+
if (this.cache.size >= this.maxSize) {
|
|
418
|
+
const oldest = this.cache.keys().next().value;
|
|
419
|
+
if (oldest !== void 0) {
|
|
420
|
+
this.cache.delete(oldest);
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
this.cache.set(filePath, {
|
|
424
|
+
contentHash: fnv1aHash(content),
|
|
425
|
+
annotated
|
|
426
|
+
});
|
|
427
|
+
}
|
|
428
|
+
/**
|
|
429
|
+
* Invalidate a specific file from the cache.
|
|
430
|
+
*/
|
|
431
|
+
invalidate(filePath) {
|
|
432
|
+
this.cache.delete(filePath);
|
|
433
|
+
}
|
|
434
|
+
/**
|
|
435
|
+
* Clear the entire cache.
|
|
436
|
+
*/
|
|
437
|
+
clear() {
|
|
438
|
+
this.cache.clear();
|
|
439
|
+
}
|
|
440
|
+
/**
|
|
441
|
+
* Get the current number of cached entries.
|
|
442
|
+
*/
|
|
443
|
+
get size() {
|
|
444
|
+
return this.cache.size;
|
|
445
|
+
}
|
|
446
|
+
};
|
|
447
|
+
textEncoder = new TextEncoder();
|
|
448
|
+
}
|
|
449
|
+
});
|
|
450
|
+
|
|
451
|
+
// src/index.ts
|
|
452
|
+
var index_exports = {};
|
|
453
|
+
__export(index_exports, {
|
|
454
|
+
HashlinePlugin: () => HashlinePlugin,
|
|
455
|
+
createHashlinePlugin: () => createHashlinePlugin,
|
|
456
|
+
default: () => index_default
|
|
457
|
+
});
|
|
458
|
+
module.exports = __toCommonJS(index_exports);
|
|
459
|
+
var import_fs3 = require("fs");
|
|
460
|
+
var import_path3 = require("path");
|
|
461
|
+
var import_os2 = require("os");
|
|
462
|
+
var import_url = require("url");
|
|
463
|
+
|
|
464
|
+
// src/hooks.ts
|
|
465
|
+
var import_fs = require("fs");
|
|
466
|
+
var import_path = require("path");
|
|
467
|
+
var import_os = require("os");
|
|
468
|
+
init_hashline();
|
|
469
|
+
var DEBUG_LOG = (0, import_path.join)((0, import_os.homedir)(), ".config", "opencode", "hashline-debug.log");
|
|
470
|
+
function debug(...args) {
|
|
471
|
+
const line = `[${(/* @__PURE__ */ new Date()).toISOString()}] ${args.map((a) => typeof a === "string" ? a : JSON.stringify(a)).join(" ")}
|
|
472
|
+
`;
|
|
473
|
+
try {
|
|
474
|
+
(0, import_fs.appendFileSync)(DEBUG_LOG, line);
|
|
475
|
+
} catch {
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
var FILE_READ_TOOLS = ["read", "file_read", "read_file", "cat", "view"];
|
|
479
|
+
var FILE_EDIT_TOOLS = ["write", "file_write", "file_edit", "edit", "edit_file", "patch", "apply_patch", "multiedit"];
|
|
480
|
+
function isFileReadTool(toolName, args) {
|
|
481
|
+
const lower = toolName.toLowerCase();
|
|
482
|
+
const nameMatch = FILE_READ_TOOLS.some(
|
|
483
|
+
(name) => lower === name || lower.endsWith(`.${name}`)
|
|
484
|
+
);
|
|
485
|
+
if (nameMatch) return true;
|
|
486
|
+
if (args && typeof args === "object") {
|
|
487
|
+
if (typeof args.path === "string" || typeof args.filePath === "string" || typeof args.file === "string") {
|
|
488
|
+
const writeIndicators = ["write", "edit", "patch", "execute", "run", "command", "shell", "bash"];
|
|
489
|
+
const isWrite = writeIndicators.some((w) => lower.includes(w));
|
|
490
|
+
if (!isWrite) return true;
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
return false;
|
|
494
|
+
}
|
|
495
|
+
function createFileReadAfterHook(cache, config) {
|
|
496
|
+
const resolved = config ?? resolveConfig();
|
|
497
|
+
const hashLen = resolved.hashLength || 0;
|
|
498
|
+
const prefix = resolved.prefix;
|
|
499
|
+
return async (input, output) => {
|
|
500
|
+
debug("tool.execute.after:", input.tool, "args:", input.args);
|
|
501
|
+
if (!isFileReadTool(input.tool, input.args)) {
|
|
502
|
+
debug("skipped: not a file-read tool");
|
|
503
|
+
return;
|
|
504
|
+
}
|
|
505
|
+
if (!output.output || typeof output.output !== "string") {
|
|
506
|
+
debug("skipped: no string output, type:", typeof output.output, "keys:", Object.keys(output));
|
|
507
|
+
return;
|
|
508
|
+
}
|
|
509
|
+
const content = output.output;
|
|
510
|
+
if (resolved.maxFileSize > 0) {
|
|
511
|
+
const byteLength = getByteLength(content);
|
|
512
|
+
if (byteLength > resolved.maxFileSize) {
|
|
513
|
+
return;
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
const filePath = input.args?.path || input.args?.file || input.args?.filePath;
|
|
517
|
+
if (typeof filePath === "string" && shouldExclude(filePath, resolved.exclude)) {
|
|
518
|
+
return;
|
|
519
|
+
}
|
|
520
|
+
if (cache && typeof filePath === "string") {
|
|
521
|
+
const cached = cache.get(filePath, content);
|
|
522
|
+
if (cached) {
|
|
523
|
+
output.output = cached;
|
|
524
|
+
return;
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
const annotated = formatFileWithHashes(content, hashLen || void 0, prefix);
|
|
528
|
+
output.output = annotated;
|
|
529
|
+
debug("annotated", typeof filePath === "string" ? filePath : input.tool, "lines:", content.split("\n").length);
|
|
530
|
+
if (cache && typeof filePath === "string") {
|
|
531
|
+
cache.set(filePath, content, annotated);
|
|
532
|
+
}
|
|
533
|
+
};
|
|
534
|
+
}
|
|
535
|
+
function createFileEditBeforeHook(config) {
|
|
536
|
+
const resolved = config ?? resolveConfig();
|
|
537
|
+
const prefix = resolved.prefix;
|
|
538
|
+
return async (input, output) => {
|
|
539
|
+
const toolName = input.tool.toLowerCase();
|
|
540
|
+
const isFileEdit = FILE_EDIT_TOOLS.some(
|
|
541
|
+
(name) => toolName === name || toolName.endsWith(`.${name}`)
|
|
542
|
+
);
|
|
543
|
+
if (!isFileEdit) return;
|
|
544
|
+
if (!output.args || typeof output.args !== "object") return;
|
|
545
|
+
const contentFields = [
|
|
546
|
+
"content",
|
|
547
|
+
"new_content",
|
|
548
|
+
"old_content",
|
|
549
|
+
"old_string",
|
|
550
|
+
"new_string",
|
|
551
|
+
"replacement",
|
|
552
|
+
"text",
|
|
553
|
+
"diff",
|
|
554
|
+
"patch",
|
|
555
|
+
"patchText"
|
|
556
|
+
];
|
|
557
|
+
for (const field of contentFields) {
|
|
558
|
+
if (typeof output.args[field] === "string") {
|
|
559
|
+
output.args[field] = stripHashes(output.args[field], prefix);
|
|
560
|
+
}
|
|
561
|
+
}
|
|
562
|
+
};
|
|
563
|
+
}
|
|
564
|
+
function createSystemPromptHook(config) {
|
|
565
|
+
const resolved = config ?? resolveConfig();
|
|
566
|
+
const prefix = resolved.prefix === false ? "" : resolved.prefix;
|
|
567
|
+
return async (_input, output) => {
|
|
568
|
+
output.system.push(
|
|
569
|
+
[
|
|
570
|
+
"## Hashline \u2014 Line Reference System",
|
|
571
|
+
"",
|
|
572
|
+
`File contents are annotated with hashline prefixes in the format \`${prefix}<line>:<hash>|<content>\`.`,
|
|
573
|
+
"The hash length adapts to file size: 3 chars for files \u22644096 lines, 4 chars for larger files.",
|
|
574
|
+
"",
|
|
575
|
+
"### Example (small file, 3-char hashes):",
|
|
576
|
+
"```",
|
|
577
|
+
`${prefix}1:a3f|function hello() {`,
|
|
578
|
+
`${prefix}2:f1c| return "world";`,
|
|
579
|
+
`${prefix}3:0e7|}`,
|
|
580
|
+
"```",
|
|
581
|
+
"",
|
|
582
|
+
"### Example (large file, 4-char hashes):",
|
|
583
|
+
"```",
|
|
584
|
+
`${prefix}1:a3f2|import { useState } from 'react';`,
|
|
585
|
+
`${prefix}2:f12c|`,
|
|
586
|
+
`${prefix}3:0e7a|export function App() {`,
|
|
587
|
+
"```",
|
|
588
|
+
"",
|
|
589
|
+
"### How to reference lines:",
|
|
590
|
+
"You can reference specific lines using their hash tags (e.g., `2:f1c` or `2:f12c`).",
|
|
591
|
+
"When editing files, you may include or omit the hash prefixes \u2014 they will be stripped automatically.",
|
|
592
|
+
"",
|
|
593
|
+
"### Edit operations using hash references:",
|
|
594
|
+
"",
|
|
595
|
+
"**Preferred tool-based edit (hash-aware):**",
|
|
596
|
+
'- Use the `hashline_edit` tool with refs like `startRef: "2:f1c"` and optional `endRef`.',
|
|
597
|
+
"- This avoids fragile old_string matching because edits are resolved by hash references.",
|
|
598
|
+
"",
|
|
599
|
+
"**Replace a single line:**",
|
|
600
|
+
'- "Replace line 2:f1c" \u2014 target a specific line unambiguously',
|
|
601
|
+
"",
|
|
602
|
+
"**Replace a block of lines:**",
|
|
603
|
+
'- "Replace block from 1:a3f to 3:0e7" \u2014 replace a range of lines',
|
|
604
|
+
"- Example: replace lines 1:a3f through 3:0e7 with new content",
|
|
605
|
+
"",
|
|
606
|
+
"**Insert content:**",
|
|
607
|
+
'- "Insert after 3:0e7" \u2014 insert new lines after a specific line',
|
|
608
|
+
'- "Insert before 1:a3f" \u2014 insert new lines before a specific line',
|
|
609
|
+
"",
|
|
610
|
+
"**Delete lines:**",
|
|
611
|
+
'- "Delete lines from 2:f1c to 3:0e7" \u2014 remove a range of lines',
|
|
612
|
+
"",
|
|
613
|
+
"### Hash verification rules:",
|
|
614
|
+
"- **Always verify** that the hash reference matches the current line content before editing.",
|
|
615
|
+
"- If a hash doesn't match, the file may have changed since you last read it \u2014 re-read the file first.",
|
|
616
|
+
'- Hash references include both the line number AND the content hash, so `2:f1c` means "line 2 with hash f1c".',
|
|
617
|
+
"- If you see a mismatch, do NOT proceed with the edit \u2014 re-read the file to get fresh references.",
|
|
618
|
+
"",
|
|
619
|
+
"### Best practices:",
|
|
620
|
+
"- Use hash references for all edit operations to ensure precision.",
|
|
621
|
+
"- When making multiple edits, work from bottom to top to avoid line number shifts.",
|
|
622
|
+
"- For large replacements, use range references (e.g., `1:a3f to 10:b2c`) instead of individual lines."
|
|
623
|
+
].join("\n")
|
|
624
|
+
);
|
|
625
|
+
};
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
// src/index.ts
|
|
629
|
+
init_hashline();
|
|
630
|
+
|
|
631
|
+
// src/hashline-tool.ts
|
|
632
|
+
var import_fs2 = require("fs");
|
|
633
|
+
var import_path2 = require("path");
|
|
634
|
+
var import_tool = require("@opencode-ai/plugin/tool");
|
|
635
|
+
init_hashline();
|
|
636
|
+
function createHashlineEditTool(config, cache) {
|
|
637
|
+
return (0, import_tool.tool)({
|
|
638
|
+
description: "Edit files using hashline references. Resolves refs like 5:a3f or '#HL 5:a3f|...' and applies replace/delete/insert without old_string matching.",
|
|
639
|
+
args: {
|
|
640
|
+
path: import_tool.tool.schema.string().describe("Path to the file (absolute or relative to project directory)"),
|
|
641
|
+
operation: import_tool.tool.schema.enum(["replace", "delete", "insert_before", "insert_after"]).describe("Edit operation"),
|
|
642
|
+
startRef: import_tool.tool.schema.string().describe('Start hash reference, e.g. "5:a3f" or "#HL 5:a3f|const x = 1;"'),
|
|
643
|
+
endRef: import_tool.tool.schema.string().optional().describe("End hash reference for range operations. Defaults to startRef when omitted."),
|
|
644
|
+
replacement: import_tool.tool.schema.string().optional().describe("Replacement/inserted content. Required for replace/insert operations.")
|
|
645
|
+
},
|
|
646
|
+
async execute(args, context) {
|
|
647
|
+
const absPath = (0, import_path2.isAbsolute)(args.path) ? args.path : (0, import_path2.resolve)(context.directory, args.path);
|
|
648
|
+
const normalizedAbs = (0, import_path2.resolve)(absPath);
|
|
649
|
+
const normalizedWorktree = (0, import_path2.resolve)(context.worktree);
|
|
650
|
+
if (normalizedAbs !== normalizedWorktree && !normalizedAbs.startsWith(normalizedWorktree + import_path2.sep)) {
|
|
651
|
+
throw new Error(`Access denied: "${args.path}" resolves outside the project directory`);
|
|
652
|
+
}
|
|
653
|
+
const displayPath = (0, import_path2.relative)(context.worktree, absPath) || args.path;
|
|
654
|
+
let current;
|
|
655
|
+
try {
|
|
656
|
+
current = (0, import_fs2.readFileSync)(absPath, "utf-8");
|
|
657
|
+
} catch (error) {
|
|
658
|
+
const reason = error instanceof Error ? error.message : String(error);
|
|
659
|
+
throw new Error(`Failed to read "${displayPath}": ${reason}`);
|
|
660
|
+
}
|
|
661
|
+
let nextContent;
|
|
662
|
+
let startLine;
|
|
663
|
+
let endLine;
|
|
664
|
+
try {
|
|
665
|
+
const result = applyHashEdit(
|
|
666
|
+
{
|
|
667
|
+
operation: args.operation,
|
|
668
|
+
startRef: args.startRef,
|
|
669
|
+
endRef: args.endRef,
|
|
670
|
+
replacement: args.replacement
|
|
671
|
+
},
|
|
672
|
+
current,
|
|
673
|
+
config.hashLength || void 0
|
|
674
|
+
);
|
|
675
|
+
nextContent = result.content;
|
|
676
|
+
startLine = result.startLine;
|
|
677
|
+
endLine = result.endLine;
|
|
678
|
+
} catch (error) {
|
|
679
|
+
const reason = error instanceof Error ? error.message : String(error);
|
|
680
|
+
throw new Error(`Hashline edit failed for "${displayPath}": ${reason}`);
|
|
681
|
+
}
|
|
682
|
+
try {
|
|
683
|
+
(0, import_fs2.writeFileSync)(absPath, nextContent, "utf-8");
|
|
684
|
+
} catch (error) {
|
|
685
|
+
const reason = error instanceof Error ? error.message : String(error);
|
|
686
|
+
throw new Error(`Failed to write "${displayPath}": ${reason}`);
|
|
687
|
+
}
|
|
688
|
+
if (cache) {
|
|
689
|
+
cache.invalidate(absPath);
|
|
690
|
+
cache.invalidate(normalizedAbs);
|
|
691
|
+
if (args.path !== absPath) cache.invalidate(args.path);
|
|
692
|
+
if (displayPath !== absPath) cache.invalidate(displayPath);
|
|
693
|
+
}
|
|
694
|
+
context.metadata({
|
|
695
|
+
title: `hashline_edit: ${args.operation} ${displayPath}`,
|
|
696
|
+
metadata: {
|
|
697
|
+
path: displayPath,
|
|
698
|
+
operation: args.operation,
|
|
699
|
+
startLine,
|
|
700
|
+
endLine
|
|
701
|
+
}
|
|
702
|
+
});
|
|
703
|
+
return [
|
|
704
|
+
`Applied ${args.operation} to ${displayPath}.`,
|
|
705
|
+
`Resolved range: ${startLine}-${endLine}.`,
|
|
706
|
+
"Re-read the file to get fresh hash references before the next edit."
|
|
707
|
+
].join("\n");
|
|
708
|
+
}
|
|
709
|
+
});
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
// src/index.ts
|
|
713
|
+
var CONFIG_FILENAME = "opencode-hashline.json";
|
|
714
|
+
function loadConfigFile(filePath) {
|
|
715
|
+
try {
|
|
716
|
+
const raw = (0, import_fs3.readFileSync)(filePath, "utf-8");
|
|
717
|
+
return JSON.parse(raw);
|
|
718
|
+
} catch {
|
|
719
|
+
return void 0;
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
function loadConfig(projectDir, userConfig) {
|
|
723
|
+
const globalPath = (0, import_path3.join)((0, import_os2.homedir)(), ".config", "opencode", CONFIG_FILENAME);
|
|
724
|
+
const globalConfig = loadConfigFile(globalPath);
|
|
725
|
+
let projectConfig;
|
|
726
|
+
if (projectDir) {
|
|
727
|
+
projectConfig = loadConfigFile((0, import_path3.join)(projectDir, CONFIG_FILENAME));
|
|
728
|
+
}
|
|
729
|
+
return {
|
|
730
|
+
...globalConfig,
|
|
731
|
+
...projectConfig,
|
|
732
|
+
...userConfig
|
|
733
|
+
};
|
|
734
|
+
}
|
|
735
|
+
function createHashlinePlugin(userConfig) {
|
|
736
|
+
return async (input) => {
|
|
737
|
+
const projectDir = input.directory;
|
|
738
|
+
const fileConfig = loadConfig(projectDir, userConfig);
|
|
739
|
+
const config = resolveConfig(fileConfig);
|
|
740
|
+
const cache = new HashlineCache(config.cacheSize);
|
|
741
|
+
const { appendFileSync: writeLog } = await import("fs");
|
|
742
|
+
const debugLog = (0, import_path3.join)((0, import_os2.homedir)(), ".config", "opencode", "hashline-debug.log");
|
|
743
|
+
try {
|
|
744
|
+
writeLog(debugLog, `[${(/* @__PURE__ */ new Date()).toISOString()}] plugin loaded, prefix: ${JSON.stringify(config.prefix)}, maxFileSize: ${config.maxFileSize}, projectDir: ${projectDir}
|
|
745
|
+
`);
|
|
746
|
+
} catch {
|
|
747
|
+
}
|
|
748
|
+
return {
|
|
749
|
+
tool: {
|
|
750
|
+
hashline_edit: createHashlineEditTool(config, cache)
|
|
751
|
+
},
|
|
752
|
+
"tool.execute.after": createFileReadAfterHook(cache, config),
|
|
753
|
+
"tool.execute.before": createFileEditBeforeHook(config),
|
|
754
|
+
"experimental.chat.system.transform": createSystemPromptHook(config),
|
|
755
|
+
"chat.message": async (_input, output) => {
|
|
756
|
+
try {
|
|
757
|
+
const out = output;
|
|
758
|
+
const hashLen = config.hashLength || 0;
|
|
759
|
+
const prefix = config.prefix;
|
|
760
|
+
const { formatFileWithHashes: formatFileWithHashes2, shouldExclude: shouldExclude2, getByteLength: getByteLength2 } = await Promise.resolve().then(() => (init_hashline(), hashline_exports));
|
|
761
|
+
for (const p of out.parts ?? []) {
|
|
762
|
+
if (p.type !== "file") continue;
|
|
763
|
+
if (!p.url || !p.mime?.startsWith("text/")) continue;
|
|
764
|
+
let filePath;
|
|
765
|
+
if (typeof p.url === "string" && p.url.startsWith("file://")) {
|
|
766
|
+
filePath = (0, import_url.fileURLToPath)(p.url);
|
|
767
|
+
}
|
|
768
|
+
if (!filePath) continue;
|
|
769
|
+
if (shouldExclude2(filePath, config.exclude)) continue;
|
|
770
|
+
let content;
|
|
771
|
+
try {
|
|
772
|
+
content = (0, import_fs3.readFileSync)(filePath, "utf-8");
|
|
773
|
+
} catch {
|
|
774
|
+
continue;
|
|
775
|
+
}
|
|
776
|
+
if (config.maxFileSize > 0 && getByteLength2(content) > config.maxFileSize) continue;
|
|
777
|
+
const cached = cache.get(filePath, content);
|
|
778
|
+
if (cached) {
|
|
779
|
+
const tmpPath2 = (0, import_path3.join)((0, import_os2.tmpdir)(), `hashline-${p.id}.txt`);
|
|
780
|
+
(0, import_fs3.writeFileSync)(tmpPath2, cached, "utf-8");
|
|
781
|
+
p.url = `file://${tmpPath2}`;
|
|
782
|
+
writeLog(debugLog, `[${(/* @__PURE__ */ new Date()).toISOString()}] chat.message annotated (cached): ${filePath}
|
|
783
|
+
`);
|
|
784
|
+
continue;
|
|
785
|
+
}
|
|
786
|
+
const annotated = formatFileWithHashes2(content, hashLen || void 0, prefix);
|
|
787
|
+
cache.set(filePath, content, annotated);
|
|
788
|
+
const tmpPath = (0, import_path3.join)((0, import_os2.tmpdir)(), `hashline-${p.id}.txt`);
|
|
789
|
+
(0, import_fs3.writeFileSync)(tmpPath, annotated, "utf-8");
|
|
790
|
+
p.url = `file://${tmpPath}`;
|
|
791
|
+
writeLog(debugLog, `[${(/* @__PURE__ */ new Date()).toISOString()}] chat.message annotated: ${filePath} lines=${content.split("\n").length}
|
|
792
|
+
`);
|
|
793
|
+
}
|
|
794
|
+
} catch (e) {
|
|
795
|
+
try {
|
|
796
|
+
writeLog(debugLog, `[${(/* @__PURE__ */ new Date()).toISOString()}] chat.message error: ${e}
|
|
797
|
+
`);
|
|
798
|
+
} catch {
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
};
|
|
803
|
+
};
|
|
804
|
+
}
|
|
805
|
+
var HashlinePlugin = createHashlinePlugin();
|
|
806
|
+
var index_default = HashlinePlugin;
|
|
807
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
808
|
+
0 && (module.exports = {
|
|
809
|
+
HashlinePlugin,
|
|
810
|
+
createHashlinePlugin
|
|
811
|
+
});
|