@jsonstudio/llms 0.6.753 → 0.6.795
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/conversion/compat/actions/apply-patch-fixer.d.ts +1 -0
- package/dist/conversion/compat/actions/apply-patch-fixer.js +30 -0
- package/dist/conversion/compat/actions/apply-patch-format-fixer.d.ts +1 -0
- package/dist/conversion/compat/actions/apply-patch-format-fixer.js +233 -0
- package/dist/conversion/compat/actions/index.d.ts +2 -0
- package/dist/conversion/compat/actions/index.js +2 -0
- package/dist/conversion/compat/profiles/chat-gemini.json +15 -15
- package/dist/conversion/compat/profiles/chat-glm.json +194 -194
- package/dist/conversion/compat/profiles/chat-iflow.json +199 -199
- package/dist/conversion/compat/profiles/chat-lmstudio.json +43 -43
- package/dist/conversion/compat/profiles/chat-qwen.json +20 -20
- package/dist/conversion/compat/profiles/responses-c4m.json +42 -42
- package/dist/conversion/compat/profiles/responses-output2choices-test.json +10 -9
- package/dist/conversion/hub/pipeline/hub-pipeline.d.ts +6 -0
- package/dist/conversion/hub/pipeline/hub-pipeline.js +35 -0
- package/dist/conversion/shared/bridge-message-utils.d.ts +1 -0
- package/dist/conversion/shared/bridge-message-utils.js +7 -0
- package/dist/conversion/shared/bridge-policies.js +8 -8
- package/dist/conversion/shared/tool-governor.js +18 -23
- package/dist/filters/special/response-tool-arguments-stringify.js +3 -22
- package/dist/router/virtual-router/engine.d.ts +5 -0
- package/dist/router/virtual-router/engine.js +21 -0
- package/dist/tools/apply-patch/regression-capturer.d.ts +12 -0
- package/dist/tools/apply-patch/regression-capturer.js +112 -0
- package/dist/tools/apply-patch/structured.d.ts +20 -0
- package/dist/tools/apply-patch/structured.js +441 -0
- package/dist/tools/apply-patch/validator.d.ts +8 -0
- package/dist/tools/apply-patch/validator.js +466 -0
- package/dist/tools/apply-patch-structured.d.ts +1 -20
- package/dist/tools/apply-patch-structured.js +1 -277
- package/dist/tools/args-json.d.ts +1 -0
- package/dist/tools/args-json.js +175 -0
- package/dist/tools/exec-command/normalize.d.ts +17 -0
- package/dist/tools/exec-command/normalize.js +112 -0
- package/dist/tools/exec-command/regression-capturer.d.ts +11 -0
- package/dist/tools/exec-command/regression-capturer.js +144 -0
- package/dist/tools/exec-command/validator.d.ts +6 -0
- package/dist/tools/exec-command/validator.js +22 -0
- package/dist/tools/patch-args-normalizer.d.ts +15 -0
- package/dist/tools/patch-args-normalizer.js +472 -0
- package/dist/tools/patch-regression-capturer.d.ts +1 -0
- package/dist/tools/patch-regression-capturer.js +1 -0
- package/dist/tools/tool-registry.js +36 -541
- package/package.json +1 -1
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import * as os from 'os';
|
|
4
|
+
import { createHash } from 'crypto';
|
|
5
|
+
/**
|
|
6
|
+
* Captures exec_command validation regressions into a golden samples structure.
|
|
7
|
+
*
|
|
8
|
+
* Trigger: exec_command validation failure (shape/JSON/required fields)
|
|
9
|
+
*
|
|
10
|
+
* Default destination:
|
|
11
|
+
* ~/.routecodex/golden_samples/ci-regression/exec_command/<error-type>/
|
|
12
|
+
*
|
|
13
|
+
* Optional repo destination (explicit opt-in):
|
|
14
|
+
* ROUTECODEX_EXEC_COMMAND_REGRESSION_TO_REPO=1
|
|
15
|
+
* → <repoRoot>/samples/ci-goldens/_regressions/exec_command/<error-type>/
|
|
16
|
+
*
|
|
17
|
+
* Explicit override:
|
|
18
|
+
* ROUTECODEX_EXEC_COMMAND_REGRESSION_DIR=/abs/path
|
|
19
|
+
*
|
|
20
|
+
* Note: we intentionally redact obvious secrets from captured payloads.
|
|
21
|
+
*/
|
|
22
|
+
const MAX_SAMPLES_PER_TYPE = 50;
|
|
23
|
+
function detectRepoRootFromCwd() {
|
|
24
|
+
try {
|
|
25
|
+
let dir = process.cwd();
|
|
26
|
+
for (let i = 0; i < 12; i += 1) {
|
|
27
|
+
const marker = path.join(dir, 'samples', 'ci-goldens');
|
|
28
|
+
const pkg = path.join(dir, 'package.json');
|
|
29
|
+
if (fs.existsSync(marker) && fs.existsSync(pkg)) {
|
|
30
|
+
try {
|
|
31
|
+
const raw = fs.readFileSync(pkg, 'utf-8');
|
|
32
|
+
const json = JSON.parse(raw);
|
|
33
|
+
if (json && typeof json === 'object' && String(json.name || '') === 'routecodex') {
|
|
34
|
+
return dir;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
// ignore parse errors
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
const parent = path.dirname(dir);
|
|
42
|
+
if (parent === dir)
|
|
43
|
+
break;
|
|
44
|
+
dir = parent;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
catch {
|
|
48
|
+
// ignore
|
|
49
|
+
}
|
|
50
|
+
return null;
|
|
51
|
+
}
|
|
52
|
+
function resolveSamplesRoot() {
|
|
53
|
+
const explicit = String(process?.env?.ROUTECODEX_EXEC_COMMAND_REGRESSION_DIR || '').trim();
|
|
54
|
+
if (explicit) {
|
|
55
|
+
return path.resolve(explicit);
|
|
56
|
+
}
|
|
57
|
+
const toRepo = String(process?.env?.ROUTECODEX_EXEC_COMMAND_REGRESSION_TO_REPO || '').trim() === '1';
|
|
58
|
+
if (toRepo) {
|
|
59
|
+
const repoRoot = detectRepoRootFromCwd();
|
|
60
|
+
if (repoRoot) {
|
|
61
|
+
return path.join(repoRoot, 'samples', 'ci-goldens', '_regressions', 'exec_command');
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return path.join(os.homedir(), '.routecodex', 'golden_samples', 'ci-regression', 'exec_command');
|
|
65
|
+
}
|
|
66
|
+
function stableSampleId(sample) {
|
|
67
|
+
const key = `${String(sample.errorType || 'unknown')}:` +
|
|
68
|
+
`${String(sample.originalArgs ?? '')}:` +
|
|
69
|
+
`${String(sample.normalizedArgs ?? '')}:` +
|
|
70
|
+
`${String(sample.validationError ?? '')}`;
|
|
71
|
+
return createHash('sha1').update(key).digest('hex').slice(0, 16);
|
|
72
|
+
}
|
|
73
|
+
function redactSecrets(text) {
|
|
74
|
+
if (!text)
|
|
75
|
+
return text;
|
|
76
|
+
let next = text;
|
|
77
|
+
// Common API key formats (best-effort).
|
|
78
|
+
next = next.replace(/\bsk-[A-Za-z0-9_-]{10,}\b/g, 'sk-<REDACTED>');
|
|
79
|
+
next = next.replace(/\bcr_[A-Za-z0-9]{20,}\b/g, 'cr_<REDACTED>');
|
|
80
|
+
next = next.replace(/\bAIza[0-9A-Za-z_-]{20,}\b/g, 'AIza<REDACTED>');
|
|
81
|
+
// Authorization headers.
|
|
82
|
+
next = next.replace(/(authorization\"?\s*:\s*\"?bearer\s+)[^\"]+/gi, '$1<REDACTED>');
|
|
83
|
+
next = next.replace(/(authorization:\s*bearer\s+)[^\s]+/gi, '$1<REDACTED>');
|
|
84
|
+
// Shell exports of *_KEY/*_TOKEN/*_SECRET.
|
|
85
|
+
next = next.replace(/(\bexport\s+[A-Z0-9_]*(?:KEY|TOKEN|SECRET)[A-Z0-9_]*=)[^\s'"]+/gi, '$1<REDACTED>');
|
|
86
|
+
return next;
|
|
87
|
+
}
|
|
88
|
+
function sanitizeArgs(raw) {
|
|
89
|
+
const trimmed = String(raw || '');
|
|
90
|
+
if (!trimmed)
|
|
91
|
+
return trimmed;
|
|
92
|
+
// If it's JSON, redact known sensitive fields (cmd) aggressively.
|
|
93
|
+
try {
|
|
94
|
+
const obj = JSON.parse(trimmed);
|
|
95
|
+
if (obj && typeof obj === 'object') {
|
|
96
|
+
const cloned = Array.isArray(obj) ? obj.slice() : { ...obj };
|
|
97
|
+
if (typeof cloned.cmd === 'string') {
|
|
98
|
+
const cmdText = String(cloned.cmd);
|
|
99
|
+
cloned.cmd = redactSecrets(cmdText).slice(0, 500);
|
|
100
|
+
}
|
|
101
|
+
return redactSecrets(JSON.stringify(cloned, null, 2));
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
catch {
|
|
105
|
+
// ignore JSON parse errors
|
|
106
|
+
}
|
|
107
|
+
return redactSecrets(trimmed);
|
|
108
|
+
}
|
|
109
|
+
export function captureExecCommandRegression(sample) {
|
|
110
|
+
try {
|
|
111
|
+
const root = resolveSamplesRoot();
|
|
112
|
+
const safeType = String(sample.errorType || 'unknown').replace(/[^a-z0-9-]/gi, '_');
|
|
113
|
+
const typeDir = path.join(root, safeType);
|
|
114
|
+
if (!fs.existsSync(typeDir)) {
|
|
115
|
+
fs.mkdirSync(typeDir, { recursive: true });
|
|
116
|
+
}
|
|
117
|
+
try {
|
|
118
|
+
const existing = fs.readdirSync(typeDir).filter((f) => f.endsWith('.json'));
|
|
119
|
+
if (existing.length >= MAX_SAMPLES_PER_TYPE)
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
catch {
|
|
123
|
+
// ignore
|
|
124
|
+
}
|
|
125
|
+
const sanitized = {
|
|
126
|
+
...sample,
|
|
127
|
+
originalArgs: sanitizeArgs(sample.originalArgs),
|
|
128
|
+
normalizedArgs: typeof sample.normalizedArgs === 'string' ? sanitizeArgs(sample.normalizedArgs) : sample.normalizedArgs
|
|
129
|
+
};
|
|
130
|
+
const id = `sample_${stableSampleId(sanitized)}`;
|
|
131
|
+
const file = path.join(typeDir, `${id}.json`);
|
|
132
|
+
if (fs.existsSync(file))
|
|
133
|
+
return;
|
|
134
|
+
const fullSample = {
|
|
135
|
+
id,
|
|
136
|
+
timestamp: new Date().toISOString(),
|
|
137
|
+
...sanitized
|
|
138
|
+
};
|
|
139
|
+
fs.writeFileSync(file, JSON.stringify(fullSample, null, 2), 'utf-8');
|
|
140
|
+
}
|
|
141
|
+
catch {
|
|
142
|
+
// Silently fail to avoid disrupting runtime
|
|
143
|
+
}
|
|
144
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { parseToolArgsJson } from '../args-json.js';
|
|
2
|
+
import { normalizeExecCommandArgs } from './normalize.js';
|
|
3
|
+
const isRecord = (value) => typeof value === 'object' && value !== null && !Array.isArray(value);
|
|
4
|
+
const toJson = (value) => {
|
|
5
|
+
try {
|
|
6
|
+
return JSON.stringify(value ?? {});
|
|
7
|
+
}
|
|
8
|
+
catch {
|
|
9
|
+
return '{}';
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
export function validateExecCommandArgs(argsString, rawArgs) {
|
|
13
|
+
const raw = typeof argsString === 'string' ? argsString : String(argsString ?? '');
|
|
14
|
+
const parsed = isRecord(rawArgs) && Object.keys(rawArgs).length > 0
|
|
15
|
+
? rawArgs
|
|
16
|
+
: parseToolArgsJson(raw);
|
|
17
|
+
const normalized = normalizeExecCommandArgs(parsed);
|
|
18
|
+
if (normalized.ok === false) {
|
|
19
|
+
return { ok: false, reason: normalized.reason };
|
|
20
|
+
}
|
|
21
|
+
return { ok: true, normalizedArgs: toJson(normalized.normalized) };
|
|
22
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export interface PatchNormalizationResult {
|
|
2
|
+
normalized?: string;
|
|
3
|
+
error?: string;
|
|
4
|
+
}
|
|
5
|
+
/**
|
|
6
|
+
* Attempt to fix common apply_patch argument format issues:
|
|
7
|
+
* 1. Missing *** Begin Patch / *** End Patch markers
|
|
8
|
+
* 2. Lines missing +/- prefix in hunks
|
|
9
|
+
* 3. Field aliases (input->patch, code->patch, diff->patch)
|
|
10
|
+
* 4. Structured changes array conversion
|
|
11
|
+
* 5. Git diff format conversion
|
|
12
|
+
*/
|
|
13
|
+
export declare function normalizeApplyPatchArgs(argsStr: string): PatchNormalizationResult;
|
|
14
|
+
export declare function looksLikePatch(text?: string): boolean;
|
|
15
|
+
export declare function normalizeApplyPatchText(raw: string): string;
|
|
@@ -0,0 +1,472 @@
|
|
|
1
|
+
// Unified apply_patch arguments normalizer
|
|
2
|
+
// Centralizes all fixable format repairs: missing headers, prefix issues, field aliases, etc.
|
|
3
|
+
// Does NOT fix context mismatches (those must be reported to user).
|
|
4
|
+
import { buildStructuredPatch, isStructuredApplyPatchPayload } from './apply-patch-structured.js';
|
|
5
|
+
/**
|
|
6
|
+
* Attempt to fix common apply_patch argument format issues:
|
|
7
|
+
* 1. Missing *** Begin Patch / *** End Patch markers
|
|
8
|
+
* 2. Lines missing +/- prefix in hunks
|
|
9
|
+
* 3. Field aliases (input->patch, code->patch, diff->patch)
|
|
10
|
+
* 4. Structured changes array conversion
|
|
11
|
+
* 5. Git diff format conversion
|
|
12
|
+
*/
|
|
13
|
+
export function normalizeApplyPatchArgs(argsStr) {
|
|
14
|
+
try {
|
|
15
|
+
let args;
|
|
16
|
+
try {
|
|
17
|
+
args = JSON.parse(argsStr);
|
|
18
|
+
}
|
|
19
|
+
catch {
|
|
20
|
+
// Fallback: raw patch text without JSON wrapper
|
|
21
|
+
if (argsStr.includes('*** Begin Patch')) {
|
|
22
|
+
return { normalized: JSON.stringify({ patch: argsStr }) };
|
|
23
|
+
}
|
|
24
|
+
return { error: 'invalid_json' };
|
|
25
|
+
}
|
|
26
|
+
if (!args || typeof args !== 'object') {
|
|
27
|
+
return { error: 'invalid_args_type' };
|
|
28
|
+
}
|
|
29
|
+
let modified = false;
|
|
30
|
+
// Fix 1: Handle field aliases
|
|
31
|
+
if (args.patch && args.input) {
|
|
32
|
+
delete args.input;
|
|
33
|
+
modified = true;
|
|
34
|
+
}
|
|
35
|
+
else if (!args.patch && args.input) {
|
|
36
|
+
args.patch = args.input;
|
|
37
|
+
delete args.input;
|
|
38
|
+
modified = true;
|
|
39
|
+
}
|
|
40
|
+
// Fix 2: Convert structured changes to patch text
|
|
41
|
+
if (!args.patch && isStructuredApplyPatchPayload(args)) {
|
|
42
|
+
try {
|
|
43
|
+
args.patch = buildStructuredPatch(args);
|
|
44
|
+
delete args.changes;
|
|
45
|
+
delete args.file;
|
|
46
|
+
delete args.instructions;
|
|
47
|
+
modified = true;
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
// Let structured errors pass through
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
// Fix 2.1: Convert { changes:[{search,replace}] } to structured replace
|
|
54
|
+
if (!args.patch && Array.isArray(args.changes)) {
|
|
55
|
+
const converted = convertSearchReplaceChanges(args);
|
|
56
|
+
if (converted) {
|
|
57
|
+
try {
|
|
58
|
+
args.patch = buildStructuredPatch(converted);
|
|
59
|
+
delete args.changes;
|
|
60
|
+
delete args.file;
|
|
61
|
+
delete args.instructions;
|
|
62
|
+
modified = true;
|
|
63
|
+
}
|
|
64
|
+
catch {
|
|
65
|
+
// Ignore conversion failure
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
// Fix 3: Alias fields (code, diff) -> patch
|
|
70
|
+
if (!args.patch) {
|
|
71
|
+
if (typeof args.code === 'string') {
|
|
72
|
+
args.patch = args.code;
|
|
73
|
+
delete args.code;
|
|
74
|
+
modified = true;
|
|
75
|
+
}
|
|
76
|
+
else if (typeof args.diff === 'string') {
|
|
77
|
+
args.patch = args.diff;
|
|
78
|
+
delete args.diff;
|
|
79
|
+
modified = true;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
// Fix 4: Normalize patch text format
|
|
83
|
+
if (typeof args.patch === 'string') {
|
|
84
|
+
const cleaned = stripConflictMarkers(args.patch);
|
|
85
|
+
if (cleaned !== args.patch) {
|
|
86
|
+
args.patch = cleaned;
|
|
87
|
+
modified = true;
|
|
88
|
+
}
|
|
89
|
+
const fixed = fixPatchTextFormat(args.patch);
|
|
90
|
+
if (fixed !== args.patch) {
|
|
91
|
+
args.patch = fixed;
|
|
92
|
+
modified = true;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return modified ? { normalized: JSON.stringify(args) } : { normalized: null };
|
|
96
|
+
}
|
|
97
|
+
catch (error) {
|
|
98
|
+
return { error: 'normalization_failed' };
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
export function looksLikePatch(text) {
|
|
102
|
+
if (!text)
|
|
103
|
+
return false;
|
|
104
|
+
const t = text.trim();
|
|
105
|
+
if (!t)
|
|
106
|
+
return false;
|
|
107
|
+
return (t.includes('*** Begin Patch') ||
|
|
108
|
+
t.includes('*** Update File:') ||
|
|
109
|
+
t.includes('*** Add File:') ||
|
|
110
|
+
t.includes('*** Delete File:') ||
|
|
111
|
+
t.includes('diff --git') ||
|
|
112
|
+
/^(?:@@|\+\+\+\s|---\s)/m.test(t));
|
|
113
|
+
}
|
|
114
|
+
export function normalizeApplyPatchText(raw) {
|
|
115
|
+
if (!raw)
|
|
116
|
+
return raw;
|
|
117
|
+
let text = raw.replace(/\r\n/g, '\n');
|
|
118
|
+
text = decodeEscapedNewlinesIfNeeded(text);
|
|
119
|
+
text = stripCodeFences(text);
|
|
120
|
+
text = text.trim();
|
|
121
|
+
if (!text) {
|
|
122
|
+
return raw;
|
|
123
|
+
}
|
|
124
|
+
if (!text.includes('*** Begin Patch') && text.includes('diff --git')) {
|
|
125
|
+
const converted = convertGitDiffToApplyPatch(text);
|
|
126
|
+
if (converted) {
|
|
127
|
+
text = converted;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
else if (!text.includes('*** Begin Patch') && !text.includes('diff --git')) {
|
|
131
|
+
const minusMatch = text.match(/^---\s+(.*)$/m);
|
|
132
|
+
const plusMatch = text.match(/^\+\+\+\s+(.*)$/m);
|
|
133
|
+
if (minusMatch && plusMatch) {
|
|
134
|
+
const rawPlus = plusMatch[1] || '';
|
|
135
|
+
const pathMatch = rawPlus.match(/^(?:b\/)?(.+)$/);
|
|
136
|
+
const path = (pathMatch && pathMatch[1] ? pathMatch[1] : rawPlus).trim();
|
|
137
|
+
if (path) {
|
|
138
|
+
const synthetic = `diff --git a/${path} b/${path}\n${text}`;
|
|
139
|
+
const converted = convertGitDiffToApplyPatch(synthetic);
|
|
140
|
+
if (converted) {
|
|
141
|
+
text = converted;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
if (text.includes('*** Add File:')) {
|
|
147
|
+
text = text.replace(/\*\*\* Create File:/g, '*** Add File:');
|
|
148
|
+
}
|
|
149
|
+
let hasBegin = text.includes('*** Begin Patch');
|
|
150
|
+
const hasEnd = text.includes('*** End Patch');
|
|
151
|
+
if (hasBegin && !hasEnd) {
|
|
152
|
+
text = `${text}\n*** End Patch`;
|
|
153
|
+
}
|
|
154
|
+
if (!hasBegin && /^\*\*\* (Add|Update|Delete) File:/m.test(text)) {
|
|
155
|
+
text = `*** Begin Patch\n${text}\n*** End Patch`;
|
|
156
|
+
hasBegin = true;
|
|
157
|
+
}
|
|
158
|
+
if (!text.includes('*** Begin Patch')) {
|
|
159
|
+
return text;
|
|
160
|
+
}
|
|
161
|
+
const beginIndex = text.indexOf('*** Begin Patch');
|
|
162
|
+
if (beginIndex > 0) {
|
|
163
|
+
text = text.slice(beginIndex);
|
|
164
|
+
}
|
|
165
|
+
const endMarker = '*** End Patch';
|
|
166
|
+
const firstEndIndex = text.indexOf(endMarker);
|
|
167
|
+
const concatSignatures = [
|
|
168
|
+
`${endMarker}","input":"*** Begin Patch`,
|
|
169
|
+
`${endMarker}","patch":"*** Begin Patch`,
|
|
170
|
+
`${endMarker}\\",\\"input\\":\\"*** Begin Patch`,
|
|
171
|
+
`${endMarker}\\",\\"patch\\":\\"*** Begin Patch`
|
|
172
|
+
];
|
|
173
|
+
const hasConcatenationSignal = concatSignatures.some((sig) => text.includes(sig));
|
|
174
|
+
if (hasConcatenationSignal && firstEndIndex >= 0) {
|
|
175
|
+
text = text.slice(0, firstEndIndex + endMarker.length);
|
|
176
|
+
}
|
|
177
|
+
else {
|
|
178
|
+
const lastEndIndex = text.lastIndexOf(endMarker);
|
|
179
|
+
if (lastEndIndex >= 0) {
|
|
180
|
+
const afterEnd = text.slice(lastEndIndex + endMarker.length);
|
|
181
|
+
if (afterEnd.trim().length > 0) {
|
|
182
|
+
text = text.slice(0, lastEndIndex + endMarker.length);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
const lines = text.split('\n');
|
|
187
|
+
const output = [];
|
|
188
|
+
let inUpdateSection = false;
|
|
189
|
+
let afterUpdateHeader = false;
|
|
190
|
+
for (const line of lines) {
|
|
191
|
+
if (line.startsWith('*** Begin Patch')) {
|
|
192
|
+
output.push(line);
|
|
193
|
+
inUpdateSection = false;
|
|
194
|
+
afterUpdateHeader = false;
|
|
195
|
+
continue;
|
|
196
|
+
}
|
|
197
|
+
if (line.startsWith('*** End Patch')) {
|
|
198
|
+
output.push(line);
|
|
199
|
+
inUpdateSection = false;
|
|
200
|
+
afterUpdateHeader = false;
|
|
201
|
+
continue;
|
|
202
|
+
}
|
|
203
|
+
if (line.startsWith('*** Update File:')) {
|
|
204
|
+
output.push(line);
|
|
205
|
+
inUpdateSection = true;
|
|
206
|
+
afterUpdateHeader = true;
|
|
207
|
+
continue;
|
|
208
|
+
}
|
|
209
|
+
if (line.startsWith('*** Add File:') || line.startsWith('*** Delete File:')) {
|
|
210
|
+
output.push(line);
|
|
211
|
+
inUpdateSection = false;
|
|
212
|
+
afterUpdateHeader = false;
|
|
213
|
+
continue;
|
|
214
|
+
}
|
|
215
|
+
if (inUpdateSection) {
|
|
216
|
+
if (afterUpdateHeader && line.trim() === '') {
|
|
217
|
+
continue;
|
|
218
|
+
}
|
|
219
|
+
afterUpdateHeader = false;
|
|
220
|
+
if (line.startsWith('@@') || line.startsWith('+') || line.startsWith('-') || line.startsWith(' ')) {
|
|
221
|
+
output.push(line);
|
|
222
|
+
}
|
|
223
|
+
else {
|
|
224
|
+
output.push(` ${line}`);
|
|
225
|
+
}
|
|
226
|
+
continue;
|
|
227
|
+
}
|
|
228
|
+
output.push(line);
|
|
229
|
+
}
|
|
230
|
+
return output.join('\n');
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Fix common patch text format issues:
|
|
234
|
+
* - Missing *** Begin/End Patch markers
|
|
235
|
+
* - Lines missing +/- prefix in update hunks
|
|
236
|
+
* - Extra whitespace/comments
|
|
237
|
+
*/
|
|
238
|
+
function fixPatchTextFormat(patchText) {
|
|
239
|
+
let text = patchText.trim();
|
|
240
|
+
if (!text)
|
|
241
|
+
return patchText;
|
|
242
|
+
// Fix 1: Add missing *** Begin Patch
|
|
243
|
+
const hasBegin = text.includes('*** Begin Patch');
|
|
244
|
+
const hasEnd = text.includes('*** End Patch');
|
|
245
|
+
const hasFileOp = /^\*\*\* (Add|Update|Delete) File:/m.test(text);
|
|
246
|
+
if (!hasBegin && hasFileOp) {
|
|
247
|
+
text = `*** Begin Patch\n${text}`;
|
|
248
|
+
}
|
|
249
|
+
// Fix 2: Add missing *** End Patch
|
|
250
|
+
if (text.includes('*** Begin Patch') && !hasEnd) {
|
|
251
|
+
text = `${text}\n*** End Patch`;
|
|
252
|
+
}
|
|
253
|
+
// Fix 3: Normalize lines in update hunks
|
|
254
|
+
text = fixHunkLinePrefixes(text);
|
|
255
|
+
return text;
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* Fix missing +/- prefixes in update hunks.
|
|
259
|
+
* Pattern: After "*** Update File:" and before next file operation,
|
|
260
|
+
* lines should start with ' ', '+', '-', or '@@'.
|
|
261
|
+
*/
|
|
262
|
+
function fixHunkLinePrefixes(text) {
|
|
263
|
+
const lines = text.split('\n');
|
|
264
|
+
const output = [];
|
|
265
|
+
let inUpdateHunk = false;
|
|
266
|
+
let afterUpdateHeader = false;
|
|
267
|
+
for (const line of lines) {
|
|
268
|
+
// Track section boundaries
|
|
269
|
+
if (line.startsWith('*** Begin Patch') || line.startsWith('*** End Patch')) {
|
|
270
|
+
output.push(line);
|
|
271
|
+
inUpdateHunk = false;
|
|
272
|
+
afterUpdateHeader = false;
|
|
273
|
+
continue;
|
|
274
|
+
}
|
|
275
|
+
if (line.startsWith('*** Update File:')) {
|
|
276
|
+
output.push(line);
|
|
277
|
+
inUpdateHunk = true;
|
|
278
|
+
afterUpdateHeader = true;
|
|
279
|
+
continue;
|
|
280
|
+
}
|
|
281
|
+
if (line.startsWith('*** Add File:') || line.startsWith('*** Delete File:')) {
|
|
282
|
+
output.push(line);
|
|
283
|
+
inUpdateHunk = false;
|
|
284
|
+
afterUpdateHeader = false;
|
|
285
|
+
continue;
|
|
286
|
+
}
|
|
287
|
+
// Skip empty line after Update File: header
|
|
288
|
+
if (inUpdateHunk && afterUpdateHeader && line.trim() === '') {
|
|
289
|
+
afterUpdateHeader = false;
|
|
290
|
+
continue;
|
|
291
|
+
}
|
|
292
|
+
if (inUpdateHunk) {
|
|
293
|
+
afterUpdateHeader = false;
|
|
294
|
+
// Line should start with ' ', '+', '-', or '@@'
|
|
295
|
+
if (line.startsWith('@@') || line.startsWith('+') || line.startsWith('-') || line.startsWith(' ')) {
|
|
296
|
+
output.push(line);
|
|
297
|
+
}
|
|
298
|
+
else if (line.trim() === '') {
|
|
299
|
+
// Empty lines in hunks become context lines
|
|
300
|
+
output.push(' ');
|
|
301
|
+
}
|
|
302
|
+
else {
|
|
303
|
+
// Missing prefix: treat as context line
|
|
304
|
+
output.push(` ${line}`);
|
|
305
|
+
}
|
|
306
|
+
continue;
|
|
307
|
+
}
|
|
308
|
+
// Not in update hunk: pass through
|
|
309
|
+
output.push(line);
|
|
310
|
+
}
|
|
311
|
+
return output.join('\n');
|
|
312
|
+
}
|
|
313
|
+
function stripConflictMarkers(patchText) {
|
|
314
|
+
if (!patchText.includes('<<<<<<<') && !patchText.includes('>>>>>>>') && !patchText.includes('=======')) {
|
|
315
|
+
return patchText;
|
|
316
|
+
}
|
|
317
|
+
const lines = patchText.split('\n');
|
|
318
|
+
const cleanLines = [];
|
|
319
|
+
for (const line of lines) {
|
|
320
|
+
if (line.startsWith('<<<<<<<')) {
|
|
321
|
+
if (line.includes('SEARCH')) {
|
|
322
|
+
cleanLines.push(line);
|
|
323
|
+
}
|
|
324
|
+
continue;
|
|
325
|
+
}
|
|
326
|
+
if (line.startsWith('=======')) {
|
|
327
|
+
cleanLines.push(line);
|
|
328
|
+
continue;
|
|
329
|
+
}
|
|
330
|
+
if (line.startsWith('>>>>>>>')) {
|
|
331
|
+
if (line.includes('REPLACE')) {
|
|
332
|
+
cleanLines.push(line);
|
|
333
|
+
}
|
|
334
|
+
continue;
|
|
335
|
+
}
|
|
336
|
+
cleanLines.push(line);
|
|
337
|
+
}
|
|
338
|
+
return cleanLines.join('\n');
|
|
339
|
+
}
|
|
340
|
+
function convertSearchReplaceChanges(payload) {
|
|
341
|
+
if (!payload || !Array.isArray(payload.changes))
|
|
342
|
+
return null;
|
|
343
|
+
const outChanges = [];
|
|
344
|
+
let saw = false;
|
|
345
|
+
for (const entry of payload.changes) {
|
|
346
|
+
if (!entry || typeof entry !== 'object')
|
|
347
|
+
continue;
|
|
348
|
+
if (typeof entry.search === 'string' && typeof entry.replace === 'string') {
|
|
349
|
+
outChanges.push({
|
|
350
|
+
kind: 'replace',
|
|
351
|
+
file: typeof entry.file === 'string' ? entry.file : payload.file,
|
|
352
|
+
target: entry.search,
|
|
353
|
+
lines: entry.replace
|
|
354
|
+
});
|
|
355
|
+
saw = true;
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
if (!saw)
|
|
359
|
+
return null;
|
|
360
|
+
return { file: payload.file, changes: outChanges };
|
|
361
|
+
}
|
|
362
|
+
function decodeEscapedNewlinesIfNeeded(value) {
|
|
363
|
+
if (!value)
|
|
364
|
+
return value;
|
|
365
|
+
if (value.includes('\n'))
|
|
366
|
+
return value;
|
|
367
|
+
const lower = value.toLowerCase();
|
|
368
|
+
const looksEscaped = value.includes('\\r\\n') ||
|
|
369
|
+
(value.includes('\\n') && /\\n[ \t]/.test(value)) ||
|
|
370
|
+
lower.includes('\\u000a') ||
|
|
371
|
+
lower.includes('\\u000d');
|
|
372
|
+
if (!looksEscaped) {
|
|
373
|
+
return value;
|
|
374
|
+
}
|
|
375
|
+
let out = value;
|
|
376
|
+
out = out.replace(/\\r\\n/g, '\n');
|
|
377
|
+
out = out.replace(/\\n/g, '\n');
|
|
378
|
+
out = out.replace(/\\r/g, '\n');
|
|
379
|
+
out = out.replace(/\\u000a/gi, '\n');
|
|
380
|
+
out = out.replace(/\\u000d/gi, '\n');
|
|
381
|
+
return out;
|
|
382
|
+
}
|
|
383
|
+
function stripCodeFences(text) {
|
|
384
|
+
const trimmed = text.trim();
|
|
385
|
+
if (!trimmed.startsWith('```')) {
|
|
386
|
+
return text;
|
|
387
|
+
}
|
|
388
|
+
const fenceRe = /^```(?:diff|patch|apply_patch|text|json)?[ \t]*\n([\s\S]*?)\n```/gmi;
|
|
389
|
+
const candidates = [];
|
|
390
|
+
let match = null;
|
|
391
|
+
while ((match = fenceRe.exec(trimmed))) {
|
|
392
|
+
if (match[1]) {
|
|
393
|
+
candidates.push(match[1].trim());
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
if (!candidates.length) {
|
|
397
|
+
return text;
|
|
398
|
+
}
|
|
399
|
+
for (const candidate of candidates) {
|
|
400
|
+
if (candidate.includes('*** Begin Patch') ||
|
|
401
|
+
candidate.includes('*** Update File:') ||
|
|
402
|
+
candidate.includes('diff --git')) {
|
|
403
|
+
return candidate;
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
return candidates[0] ?? text;
|
|
407
|
+
}
|
|
408
|
+
function convertGitDiffToApplyPatch(text) {
|
|
409
|
+
const lines = text.replace(/\r\n/g, '\n').split('\n');
|
|
410
|
+
const files = [];
|
|
411
|
+
let current = null;
|
|
412
|
+
const flush = () => {
|
|
413
|
+
if (!current)
|
|
414
|
+
return;
|
|
415
|
+
if (current.path && current.kind === 'delete') {
|
|
416
|
+
files.push(current);
|
|
417
|
+
current = null;
|
|
418
|
+
return;
|
|
419
|
+
}
|
|
420
|
+
if (!current.path || (current.kind === 'update' && current.lines.length === 0)) {
|
|
421
|
+
current = null;
|
|
422
|
+
return;
|
|
423
|
+
}
|
|
424
|
+
files.push(current);
|
|
425
|
+
current = null;
|
|
426
|
+
};
|
|
427
|
+
for (const raw of lines) {
|
|
428
|
+
const line = raw;
|
|
429
|
+
const diffMatch = line.match(/^diff --git a\/(.+?) b\/(.+)$/);
|
|
430
|
+
if (diffMatch) {
|
|
431
|
+
flush();
|
|
432
|
+
const path = diffMatch[2] || diffMatch[1];
|
|
433
|
+
current = { path, kind: 'update', lines: [] };
|
|
434
|
+
continue;
|
|
435
|
+
}
|
|
436
|
+
if (!current)
|
|
437
|
+
continue;
|
|
438
|
+
if (line.startsWith('new file mode')) {
|
|
439
|
+
current.kind = 'add';
|
|
440
|
+
continue;
|
|
441
|
+
}
|
|
442
|
+
if (line.startsWith('deleted file mode')) {
|
|
443
|
+
current.kind = 'delete';
|
|
444
|
+
continue;
|
|
445
|
+
}
|
|
446
|
+
if (line.startsWith('@@') || line.startsWith('+') || line.startsWith('-') || line.startsWith(' ')) {
|
|
447
|
+
current.lines.push(line);
|
|
448
|
+
continue;
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
flush();
|
|
452
|
+
if (!files.length)
|
|
453
|
+
return null;
|
|
454
|
+
const out = ['*** Begin Patch'];
|
|
455
|
+
for (const file of files) {
|
|
456
|
+
if (file.kind === 'add') {
|
|
457
|
+
out.push(`*** Add File: ${file.path}`);
|
|
458
|
+
for (const line of file.lines) {
|
|
459
|
+
out.push(line.startsWith('+') ? line : `+${line}`);
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
else if (file.kind === 'delete') {
|
|
463
|
+
out.push(`*** Delete File: ${file.path}`);
|
|
464
|
+
}
|
|
465
|
+
else {
|
|
466
|
+
out.push(`*** Update File: ${file.path}`);
|
|
467
|
+
out.push(...file.lines);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
out.push('*** End Patch');
|
|
471
|
+
return out.join('\n');
|
|
472
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { captureApplyPatchRegression } from './apply-patch/regression-capturer.js';
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { captureApplyPatchRegression } from './apply-patch/regression-capturer.js';
|