sonance-brand-mcp 1.3.46 → 1.3.48
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -468,6 +468,8 @@ Return search/replace patches (NOT full files). The system applies your patches
|
|
|
468
468
|
- "replace" contains your modified version
|
|
469
469
|
- Include 2-4 lines of context in "search" to make it unique
|
|
470
470
|
- You may ONLY edit files provided in the PAGE CONTEXT section
|
|
471
|
+
- CRITICAL: NEVER invent or guess code. Your "search" string MUST be copied EXACTLY from the provided file content. If you cannot find the exact code to modify, return an empty modifications array.
|
|
472
|
+
- If the file content appears truncated, only modify code that is visible in the provided content.
|
|
471
473
|
|
|
472
474
|
**SONANCE BRAND COLORS:**
|
|
473
475
|
- Charcoal: #333F48 (primary text)
|
|
@@ -658,10 +660,10 @@ export async function POST(request: Request) {
|
|
|
658
660
|
}
|
|
659
661
|
|
|
660
662
|
// ========== SMART CONTEXT BUDGETING ==========
|
|
661
|
-
//
|
|
662
|
-
// Priority: Recommended file (
|
|
663
|
-
const TOTAL_CONTEXT_BUDGET =
|
|
664
|
-
const MAX_RECOMMENDED_FILE =
|
|
663
|
+
// Claude can handle 200k tokens (~800k chars), so we can safely include large files
|
|
664
|
+
// Priority: Recommended file (NEVER truncate) > Page file (limited) > Other components (dynamic)
|
|
665
|
+
const TOTAL_CONTEXT_BUDGET = 500000; // 500k chars total budget
|
|
666
|
+
const MAX_RECOMMENDED_FILE = Infinity; // NEVER truncate the target file - AI needs full context
|
|
665
667
|
const MAX_PAGE_FILE = 2000; // Page file is just a wrapper
|
|
666
668
|
const MAX_GLOBALS_CSS = 1500;
|
|
667
669
|
const MAX_FILES = 25;
|
|
@@ -695,9 +697,8 @@ ${focusedElements.map((el) => `- ${el.name} (${el.type}) at (${el.coordinates.x}
|
|
|
695
697
|
|
|
696
698
|
// ========== TARGET COMPONENT (RECOMMENDED FILE) - SHOWN FIRST ==========
|
|
697
699
|
if (recommendedFileContent) {
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
: recommendedFileContent.content;
|
|
700
|
+
// Never truncate the recommended file - AI needs full context to avoid hallucination
|
|
701
|
+
const content = recommendedFileContent.content;
|
|
701
702
|
|
|
702
703
|
textContent += `═══════════════════════════════════════════════════════════════════════════════
|
|
703
704
|
⚡ TARGET COMPONENT - YOU MUST EDIT THIS FILE
|
|
@@ -808,90 +809,10 @@ CRITICAL: Edit the TARGET COMPONENT (marked with ***), not the page wrapper.`;
|
|
|
808
809
|
text: textContent,
|
|
809
810
|
});
|
|
810
811
|
|
|
811
|
-
// Call Claude Vision API
|
|
812
|
+
// Call Claude Vision API with retry mechanism
|
|
812
813
|
const anthropic = new Anthropic({ apiKey });
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
model: "claude-sonnet-4-20250514",
|
|
816
|
-
max_tokens: 16384,
|
|
817
|
-
messages: [
|
|
818
|
-
{
|
|
819
|
-
role: "user",
|
|
820
|
-
content: messageContent,
|
|
821
|
-
},
|
|
822
|
-
],
|
|
823
|
-
system: VISION_SYSTEM_PROMPT,
|
|
824
|
-
});
|
|
825
|
-
|
|
826
|
-
// Extract text content from response
|
|
827
|
-
const textResponse = response.content.find((block) => block.type === "text");
|
|
828
|
-
if (!textResponse || textResponse.type !== "text") {
|
|
829
|
-
return NextResponse.json(
|
|
830
|
-
{ error: "No text response from AI" },
|
|
831
|
-
{ status: 500 }
|
|
832
|
-
);
|
|
833
|
-
}
|
|
834
|
-
|
|
835
|
-
// Parse AI response - now expecting patches instead of full file content
|
|
836
|
-
let aiResponse: {
|
|
837
|
-
reasoning?: string;
|
|
838
|
-
modifications: Array<{
|
|
839
|
-
filePath: string;
|
|
840
|
-
patches?: Patch[];
|
|
841
|
-
// Legacy support for modifiedContent (will be deprecated)
|
|
842
|
-
modifiedContent?: string;
|
|
843
|
-
explanation?: string;
|
|
844
|
-
}>;
|
|
845
|
-
explanation?: string;
|
|
846
|
-
};
|
|
847
|
-
|
|
848
|
-
try {
|
|
849
|
-
let jsonText = textResponse.text.trim();
|
|
850
|
-
|
|
851
|
-
// Try to extract JSON from markdown code blocks
|
|
852
|
-
const jsonMatch = jsonText.match(/```json\n([\s\S]*?)\n```/) ||
|
|
853
|
-
jsonText.match(/```\n([\s\S]*?)\n```/);
|
|
854
|
-
|
|
855
|
-
if (jsonMatch) {
|
|
856
|
-
jsonText = jsonMatch[1];
|
|
857
|
-
} else if (jsonText.includes("```json")) {
|
|
858
|
-
const start = jsonText.indexOf("```json") + 7;
|
|
859
|
-
const end = jsonText.lastIndexOf("```");
|
|
860
|
-
if (end > start) {
|
|
861
|
-
jsonText = jsonText.substring(start, end);
|
|
862
|
-
}
|
|
863
|
-
}
|
|
864
|
-
|
|
865
|
-
jsonText = jsonText.trim();
|
|
866
|
-
|
|
867
|
-
// Robust JSON extraction: find the first { and last } to extract JSON object
|
|
868
|
-
// This handles cases where the LLM includes preamble text before the JSON
|
|
869
|
-
const firstBrace = jsonText.indexOf('{');
|
|
870
|
-
const lastBrace = jsonText.lastIndexOf('}');
|
|
871
|
-
if (firstBrace !== -1 && lastBrace > firstBrace) {
|
|
872
|
-
jsonText = jsonText.substring(firstBrace, lastBrace + 1);
|
|
873
|
-
}
|
|
874
|
-
|
|
875
|
-
aiResponse = JSON.parse(jsonText);
|
|
876
|
-
} catch {
|
|
877
|
-
console.error("Failed to parse AI response:", textResponse.text);
|
|
878
|
-
return NextResponse.json(
|
|
879
|
-
{ error: "Failed to parse AI response. Please try again." },
|
|
880
|
-
{ status: 500 }
|
|
881
|
-
);
|
|
882
|
-
}
|
|
883
|
-
|
|
884
|
-
if (!aiResponse.modifications || aiResponse.modifications.length === 0) {
|
|
885
|
-
return NextResponse.json({
|
|
886
|
-
success: true,
|
|
887
|
-
sessionId: newSessionId,
|
|
888
|
-
modifications: [],
|
|
889
|
-
explanation: aiResponse.explanation || "No changes needed.",
|
|
890
|
-
reasoning: aiResponse.reasoning,
|
|
891
|
-
});
|
|
892
|
-
}
|
|
893
|
-
|
|
894
|
-
// Build set of valid file paths from page context
|
|
814
|
+
|
|
815
|
+
// Build set of valid file paths from page context (needed for validation)
|
|
895
816
|
const validFilePaths = new Set<string>();
|
|
896
817
|
if (pageContext.pageFile) {
|
|
897
818
|
validFilePaths.add(pageContext.pageFile);
|
|
@@ -905,15 +826,131 @@ CRITICAL: Edit the TARGET COMPONENT (marked with ***), not the page wrapper.`;
|
|
|
905
826
|
validFilePaths.add(recommendedFileContent.path);
|
|
906
827
|
}
|
|
907
828
|
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
829
|
+
// Retry loop for handling patch failures
|
|
830
|
+
const MAX_RETRIES = 1;
|
|
831
|
+
let retryCount = 0;
|
|
832
|
+
let lastPatchErrors: string[] = [];
|
|
833
|
+
let modifications: VisionFileModification[] = [];
|
|
834
|
+
let finalExplanation: string | undefined;
|
|
835
|
+
let finalReasoning: string | undefined;
|
|
913
836
|
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
837
|
+
while (retryCount <= MAX_RETRIES) {
|
|
838
|
+
// Build messages for this attempt
|
|
839
|
+
const currentMessages: Anthropic.MessageCreateParams["messages"] = [
|
|
840
|
+
{
|
|
841
|
+
role: "user",
|
|
842
|
+
content: messageContent,
|
|
843
|
+
},
|
|
844
|
+
];
|
|
845
|
+
|
|
846
|
+
// If this is a retry, add feedback about what went wrong
|
|
847
|
+
if (retryCount > 0 && lastPatchErrors.length > 0) {
|
|
848
|
+
debugLog("Retry attempt with feedback", { retryCount, errorCount: lastPatchErrors.length });
|
|
849
|
+
currentMessages.push({
|
|
850
|
+
role: "assistant",
|
|
851
|
+
content: "I'll analyze the screenshot and generate the patches now.",
|
|
852
|
+
});
|
|
853
|
+
currentMessages.push({
|
|
854
|
+
role: "user",
|
|
855
|
+
content: `PATCH APPLICATION FAILED. Your previous patches referenced code that does not exist in the file (hallucination detected).
|
|
856
|
+
|
|
857
|
+
Failed patches:
|
|
858
|
+
${lastPatchErrors.join("\n\n")}
|
|
859
|
+
|
|
860
|
+
IMPORTANT: You must copy the "search" string EXACTLY from the file content I provided. Do NOT invent or guess code.
|
|
861
|
+
Look carefully at the ACTUAL file content in the TARGET COMPONENT section above and try again.
|
|
862
|
+
|
|
863
|
+
If you cannot find the exact code to modify, return an empty modifications array with an explanation.`,
|
|
864
|
+
});
|
|
865
|
+
}
|
|
866
|
+
|
|
867
|
+
const response = await anthropic.messages.create({
|
|
868
|
+
model: "claude-sonnet-4-20250514",
|
|
869
|
+
max_tokens: 16384,
|
|
870
|
+
messages: currentMessages,
|
|
871
|
+
system: VISION_SYSTEM_PROMPT,
|
|
872
|
+
});
|
|
873
|
+
|
|
874
|
+
// Extract text content from response
|
|
875
|
+
const textResponse = response.content.find((block) => block.type === "text");
|
|
876
|
+
if (!textResponse || textResponse.type !== "text") {
|
|
877
|
+
return NextResponse.json(
|
|
878
|
+
{ error: "No text response from AI" },
|
|
879
|
+
{ status: 500 }
|
|
880
|
+
);
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
// Parse AI response - now expecting patches instead of full file content
|
|
884
|
+
let aiResponse: {
|
|
885
|
+
reasoning?: string;
|
|
886
|
+
modifications: Array<{
|
|
887
|
+
filePath: string;
|
|
888
|
+
patches?: Patch[];
|
|
889
|
+
// Legacy support for modifiedContent (will be deprecated)
|
|
890
|
+
modifiedContent?: string;
|
|
891
|
+
explanation?: string;
|
|
892
|
+
}>;
|
|
893
|
+
explanation?: string;
|
|
894
|
+
};
|
|
895
|
+
|
|
896
|
+
try {
|
|
897
|
+
let jsonText = textResponse.text.trim();
|
|
898
|
+
|
|
899
|
+
// Try to extract JSON from markdown code blocks
|
|
900
|
+
const jsonMatch = jsonText.match(/```json\n([\s\S]*?)\n```/) ||
|
|
901
|
+
jsonText.match(/```\n([\s\S]*?)\n```/);
|
|
902
|
+
|
|
903
|
+
if (jsonMatch) {
|
|
904
|
+
jsonText = jsonMatch[1];
|
|
905
|
+
} else if (jsonText.includes("```json")) {
|
|
906
|
+
const start = jsonText.indexOf("```json") + 7;
|
|
907
|
+
const end = jsonText.lastIndexOf("```");
|
|
908
|
+
if (end > start) {
|
|
909
|
+
jsonText = jsonText.substring(start, end);
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
|
|
913
|
+
jsonText = jsonText.trim();
|
|
914
|
+
|
|
915
|
+
// Robust JSON extraction: find the first { and last } to extract JSON object
|
|
916
|
+
// This handles cases where the LLM includes preamble text before the JSON
|
|
917
|
+
const firstBrace = jsonText.indexOf('{');
|
|
918
|
+
const lastBrace = jsonText.lastIndexOf('}');
|
|
919
|
+
if (firstBrace !== -1 && lastBrace > firstBrace) {
|
|
920
|
+
jsonText = jsonText.substring(firstBrace, lastBrace + 1);
|
|
921
|
+
}
|
|
922
|
+
|
|
923
|
+
aiResponse = JSON.parse(jsonText);
|
|
924
|
+
} catch {
|
|
925
|
+
console.error("Failed to parse AI response:", textResponse.text);
|
|
926
|
+
return NextResponse.json(
|
|
927
|
+
{ error: "Failed to parse AI response. Please try again." },
|
|
928
|
+
{ status: 500 }
|
|
929
|
+
);
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
finalExplanation = aiResponse.explanation;
|
|
933
|
+
finalReasoning = aiResponse.reasoning;
|
|
934
|
+
|
|
935
|
+
if (!aiResponse.modifications || aiResponse.modifications.length === 0) {
|
|
936
|
+
return NextResponse.json({
|
|
937
|
+
success: true,
|
|
938
|
+
sessionId: newSessionId,
|
|
939
|
+
modifications: [],
|
|
940
|
+
explanation: aiResponse.explanation || "No changes needed.",
|
|
941
|
+
reasoning: aiResponse.reasoning,
|
|
942
|
+
});
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
debugLog("VALIDATION: Valid file paths from page context", {
|
|
946
|
+
pageFile: pageContext.pageFile,
|
|
947
|
+
validFilePaths: Array.from(validFilePaths),
|
|
948
|
+
aiRequestedFiles: aiResponse.modifications.map(m => m.filePath)
|
|
949
|
+
});
|
|
950
|
+
|
|
951
|
+
// Process modifications - apply patches to get modified content
|
|
952
|
+
modifications = [];
|
|
953
|
+
const patchErrors: string[] = [];
|
|
917
954
|
|
|
918
955
|
for (const mod of aiResponse.modifications) {
|
|
919
956
|
// Validate that the file path is in the page context
|
|
@@ -947,6 +984,47 @@ CRITICAL: Edit the TARGET COMPONENT (marked with ***), not the page wrapper.`;
|
|
|
947
984
|
// New patch-based approach
|
|
948
985
|
console.log(`[Apply-First] Applying ${mod.patches.length} patches to ${mod.filePath}`);
|
|
949
986
|
|
|
987
|
+
// PRE-VALIDATION: Check if all search strings exist in the file BEFORE applying
|
|
988
|
+
const preValidationErrors: string[] = [];
|
|
989
|
+
for (const patch of mod.patches) {
|
|
990
|
+
const normalizedSearch = patch.search.replace(/\\n/g, "\n");
|
|
991
|
+
if (!originalContent.includes(normalizedSearch)) {
|
|
992
|
+
// Try fuzzy match as fallback
|
|
993
|
+
const fuzzyMatch = findFuzzyMatch(normalizedSearch, originalContent);
|
|
994
|
+
if (!fuzzyMatch) {
|
|
995
|
+
// Find the closest matching snippet to help with debugging
|
|
996
|
+
const searchPreview = normalizedSearch.substring(0, 80).replace(/\n/g, "\\n");
|
|
997
|
+
|
|
998
|
+
// Look for partial matches to give helpful feedback
|
|
999
|
+
const searchLines = normalizedSearch.split("\n").filter(l => l.trim().length > 10);
|
|
1000
|
+
const partialMatches: string[] = [];
|
|
1001
|
+
for (const line of searchLines.slice(0, 3)) {
|
|
1002
|
+
const trimmedLine = line.trim();
|
|
1003
|
+
if (trimmedLine.length > 10 && originalContent.includes(trimmedLine)) {
|
|
1004
|
+
partialMatches.push(trimmedLine.substring(0, 50));
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
|
|
1008
|
+
let errorMsg = `Patch search string not found: "${searchPreview}..."`;
|
|
1009
|
+
if (partialMatches.length > 0) {
|
|
1010
|
+
errorMsg += ` (partial matches found: ${partialMatches.join(", ")})`;
|
|
1011
|
+
}
|
|
1012
|
+
preValidationErrors.push(errorMsg);
|
|
1013
|
+
debugLog("Pre-validation failed: search string not found", {
|
|
1014
|
+
filePath: mod.filePath,
|
|
1015
|
+
searchPreview,
|
|
1016
|
+
partialMatches
|
|
1017
|
+
});
|
|
1018
|
+
}
|
|
1019
|
+
}
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1022
|
+
// If pre-validation failed, add to errors and skip this file
|
|
1023
|
+
if (preValidationErrors.length > 0) {
|
|
1024
|
+
patchErrors.push(`${mod.filePath}: AI generated patches with non-existent code (hallucination detected):\n${preValidationErrors.join("\n")}`);
|
|
1025
|
+
continue;
|
|
1026
|
+
}
|
|
1027
|
+
|
|
950
1028
|
const patchResult = applyPatches(originalContent, mod.patches);
|
|
951
1029
|
|
|
952
1030
|
if (!patchResult.success) {
|
|
@@ -993,22 +1071,39 @@ CRITICAL: Edit the TARGET COMPONENT (marked with ***), not the page wrapper.`;
|
|
|
993
1071
|
});
|
|
994
1072
|
}
|
|
995
1073
|
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1074
|
+
// If all modifications failed, check if we should retry
|
|
1075
|
+
if (patchErrors.length > 0 && modifications.length === 0) {
|
|
1076
|
+
if (retryCount < MAX_RETRIES) {
|
|
1077
|
+
console.warn(`[Apply-First] All patches failed, retrying (attempt ${retryCount + 1}/${MAX_RETRIES + 1})...`);
|
|
1078
|
+
debugLog("Retry triggered due to patch failures", {
|
|
1079
|
+
retryCount,
|
|
1080
|
+
errorCount: patchErrors.length,
|
|
1081
|
+
errors: patchErrors.slice(0, 3) // Log first 3 errors
|
|
1082
|
+
});
|
|
1083
|
+
lastPatchErrors = patchErrors;
|
|
1084
|
+
retryCount++;
|
|
1085
|
+
continue; // Retry the LLM call
|
|
1086
|
+
}
|
|
1087
|
+
|
|
1088
|
+
// Exhausted retries, return error
|
|
1089
|
+
console.error("All AI patches failed after retries:", patchErrors);
|
|
1090
|
+
return NextResponse.json(
|
|
1091
|
+
{
|
|
1092
|
+
success: false,
|
|
1093
|
+
error: `Patch application failed (after ${retryCount} retry attempts):\n\n${patchErrors.join("\n\n")}`,
|
|
1094
|
+
},
|
|
1095
|
+
{ status: 400 }
|
|
1096
|
+
);
|
|
1097
|
+
}
|
|
1007
1098
|
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1099
|
+
// Log patch errors as warnings if some modifications succeeded
|
|
1100
|
+
if (patchErrors.length > 0) {
|
|
1101
|
+
console.warn("Some patches failed:", patchErrors);
|
|
1102
|
+
}
|
|
1103
|
+
|
|
1104
|
+
// Successfully processed at least some modifications - break out of retry loop
|
|
1105
|
+
break;
|
|
1106
|
+
} // End of retry loop
|
|
1012
1107
|
|
|
1013
1108
|
// Create backups and apply changes atomically
|
|
1014
1109
|
const applyResult = await applyChangesWithBackup(
|
|
@@ -1029,8 +1124,8 @@ CRITICAL: Edit the TARGET COMPONENT (marked with ***), not the page wrapper.`;
|
|
|
1029
1124
|
sessionId: newSessionId,
|
|
1030
1125
|
modifications,
|
|
1031
1126
|
backupPaths: applyResult.backupPaths,
|
|
1032
|
-
explanation:
|
|
1033
|
-
reasoning:
|
|
1127
|
+
explanation: finalExplanation,
|
|
1128
|
+
reasoning: finalReasoning,
|
|
1034
1129
|
});
|
|
1035
1130
|
}
|
|
1036
1131
|
|
|
@@ -1753,32 +1848,58 @@ function searchFilesForKeywords(
|
|
|
1753
1848
|
// Cache for tsconfig path aliases
|
|
1754
1849
|
let cachedPathAliases: Map<string, string> | null = null;
|
|
1755
1850
|
let cachedProjectRoot: string | null = null;
|
|
1851
|
+
let cachedTsconfigMtime: number | null = null;
|
|
1852
|
+
|
|
1853
|
+
/**
|
|
1854
|
+
* Clean tsconfig.json content to make it valid JSON
|
|
1855
|
+
* tsconfig.json allows comments and trailing commas which JSON.parse doesn't support
|
|
1856
|
+
*/
|
|
1857
|
+
function cleanTsconfigContent(content: string): string {
|
|
1858
|
+
return content
|
|
1859
|
+
// Remove single-line comments
|
|
1860
|
+
.replace(/\/\/.*$/gm, "")
|
|
1861
|
+
// Remove multi-line comments
|
|
1862
|
+
.replace(/\/\*[\s\S]*?\*\//g, "")
|
|
1863
|
+
// Remove trailing commas before } or ]
|
|
1864
|
+
.replace(/,(\s*[}\]])/g, "$1")
|
|
1865
|
+
// Handle potential issues with escaped characters in strings
|
|
1866
|
+
.replace(/\r\n/g, "\n")
|
|
1867
|
+
// Remove any BOM
|
|
1868
|
+
.replace(/^\uFEFF/, "");
|
|
1869
|
+
}
|
|
1756
1870
|
|
|
1757
1871
|
/**
|
|
1758
1872
|
* Read and parse tsconfig.json to get path aliases
|
|
1759
1873
|
*/
|
|
1760
1874
|
function getPathAliases(projectRoot: string): Map<string, string> {
|
|
1761
|
-
|
|
1875
|
+
const tsconfigPath = path.join(projectRoot, "tsconfig.json");
|
|
1876
|
+
|
|
1877
|
+
// Check cache validity - also check file modification time
|
|
1762
1878
|
if (cachedPathAliases && cachedProjectRoot === projectRoot) {
|
|
1879
|
+
try {
|
|
1880
|
+
const stat = fs.statSync(tsconfigPath);
|
|
1881
|
+
if (cachedTsconfigMtime === stat.mtimeMs) {
|
|
1763
1882
|
return cachedPathAliases;
|
|
1883
|
+
}
|
|
1884
|
+
} catch {
|
|
1885
|
+
// File doesn't exist or can't be read, continue with fresh parse
|
|
1886
|
+
}
|
|
1764
1887
|
}
|
|
1765
1888
|
|
|
1766
1889
|
const aliases = new Map<string, string>();
|
|
1890
|
+
let parsedSuccessfully = false;
|
|
1767
1891
|
|
|
1768
1892
|
// Try to read tsconfig.json
|
|
1769
|
-
const tsconfigPath = path.join(projectRoot, "tsconfig.json");
|
|
1770
1893
|
if (fs.existsSync(tsconfigPath)) {
|
|
1771
1894
|
try {
|
|
1772
1895
|
const content = fs.readFileSync(tsconfigPath, "utf-8");
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
.replace(/\/\*[\s\S]*?\*\//g, "")
|
|
1777
|
-
.replace(/,\s*([\]}])/g, "$1");
|
|
1896
|
+
const cleanContent = cleanTsconfigContent(content);
|
|
1897
|
+
|
|
1898
|
+
// Try to parse the cleaned content
|
|
1778
1899
|
const tsconfig = JSON.parse(cleanContent);
|
|
1900
|
+
parsedSuccessfully = true;
|
|
1779
1901
|
|
|
1780
1902
|
const paths = tsconfig.compilerOptions?.paths || {};
|
|
1781
|
-
const baseUrl = tsconfig.compilerOptions?.baseUrl || ".";
|
|
1782
1903
|
|
|
1783
1904
|
// Parse path mappings
|
|
1784
1905
|
for (const [alias, targets] of Object.entries(paths)) {
|
|
@@ -1791,8 +1912,30 @@ function getPathAliases(projectRoot: string): Map<string, string> {
|
|
|
1791
1912
|
}
|
|
1792
1913
|
|
|
1793
1914
|
debugLog("[apply] Loaded tsconfig path aliases", { aliases: Object.fromEntries(aliases) });
|
|
1915
|
+
|
|
1916
|
+
// Update cache with mtime
|
|
1917
|
+
try {
|
|
1918
|
+
const stat = fs.statSync(tsconfigPath);
|
|
1919
|
+
cachedTsconfigMtime = stat.mtimeMs;
|
|
1920
|
+
} catch {
|
|
1921
|
+
cachedTsconfigMtime = null;
|
|
1922
|
+
}
|
|
1794
1923
|
} catch (e) {
|
|
1795
|
-
|
|
1924
|
+
// Log the error with more context for debugging
|
|
1925
|
+
const errorStr = String(e);
|
|
1926
|
+
const posMatch = errorStr.match(/position (\d+)/);
|
|
1927
|
+
let context = "";
|
|
1928
|
+
if (posMatch) {
|
|
1929
|
+
const pos = parseInt(posMatch[1], 10);
|
|
1930
|
+
const content = fs.readFileSync(tsconfigPath, "utf-8");
|
|
1931
|
+
context = `Near: "${content.substring(Math.max(0, pos - 20), pos + 20)}"`;
|
|
1932
|
+
}
|
|
1933
|
+
debugLog("[apply] Failed to parse tsconfig.json", { error: errorStr, context });
|
|
1934
|
+
|
|
1935
|
+
// Clear cache on error so we retry next time
|
|
1936
|
+
cachedPathAliases = null;
|
|
1937
|
+
cachedProjectRoot = null;
|
|
1938
|
+
cachedTsconfigMtime = null;
|
|
1796
1939
|
}
|
|
1797
1940
|
}
|
|
1798
1941
|
|
|
@@ -1810,8 +1953,12 @@ function getPathAliases(projectRoot: string): Map<string, string> {
|
|
|
1810
1953
|
debugLog("[apply] Using default @/ alias", { alias: aliases.get("@/") });
|
|
1811
1954
|
}
|
|
1812
1955
|
|
|
1956
|
+
// Only cache if we parsed successfully or there's no tsconfig
|
|
1957
|
+
if (parsedSuccessfully || !fs.existsSync(tsconfigPath)) {
|
|
1813
1958
|
cachedPathAliases = aliases;
|
|
1814
1959
|
cachedProjectRoot = projectRoot;
|
|
1960
|
+
}
|
|
1961
|
+
|
|
1815
1962
|
return aliases;
|
|
1816
1963
|
}
|
|
1817
1964
|
|
|
@@ -1925,6 +2072,105 @@ interface ApplyPatchesResult {
|
|
|
1925
2072
|
failedPatches: { patch: Patch; error: string }[];
|
|
1926
2073
|
}
|
|
1927
2074
|
|
|
2075
|
+
/**
|
|
2076
|
+
* Normalize whitespace in a string for comparison
|
|
2077
|
+
* Collapses all whitespace runs to single spaces and trims
|
|
2078
|
+
*/
|
|
2079
|
+
function normalizeWhitespace(str: string): string {
|
|
2080
|
+
return str.replace(/\s+/g, " ").trim();
|
|
2081
|
+
}
|
|
2082
|
+
|
|
2083
|
+
/**
|
|
2084
|
+
* Find a fuzzy match for the search string in content
|
|
2085
|
+
* Returns the actual matched substring from content, or null if not found
|
|
2086
|
+
*/
|
|
2087
|
+
function findFuzzyMatch(search: string, content: string): { start: number; end: number; matched: string } | null {
|
|
2088
|
+
// Strategy 1: Try line-by-line matching with flexible indentation
|
|
2089
|
+
const searchLines = search.split("\n").map(l => l.trim()).filter(l => l.length > 0);
|
|
2090
|
+
if (searchLines.length === 0) return null;
|
|
2091
|
+
|
|
2092
|
+
// Find the first non-empty line in content
|
|
2093
|
+
const contentLines = content.split("\n");
|
|
2094
|
+
const firstSearchLine = searchLines[0];
|
|
2095
|
+
|
|
2096
|
+
for (let i = 0; i < contentLines.length; i++) {
|
|
2097
|
+
const contentLineTrimmed = contentLines[i].trim();
|
|
2098
|
+
|
|
2099
|
+
// Check if this line matches the first search line
|
|
2100
|
+
if (contentLineTrimmed === firstSearchLine) {
|
|
2101
|
+
// Try to match all subsequent lines
|
|
2102
|
+
let matched = true;
|
|
2103
|
+
let searchLineIdx = 1;
|
|
2104
|
+
let contentLineIdx = i + 1;
|
|
2105
|
+
|
|
2106
|
+
while (searchLineIdx < searchLines.length && contentLineIdx < contentLines.length) {
|
|
2107
|
+
const searchLineTrimmed = searchLines[searchLineIdx];
|
|
2108
|
+
const contentLineTrimmedNext = contentLines[contentLineIdx].trim();
|
|
2109
|
+
|
|
2110
|
+
// Skip empty lines in content
|
|
2111
|
+
if (contentLineTrimmedNext === "" && searchLineTrimmed !== "") {
|
|
2112
|
+
contentLineIdx++;
|
|
2113
|
+
continue;
|
|
2114
|
+
}
|
|
2115
|
+
|
|
2116
|
+
if (contentLineTrimmedNext !== searchLineTrimmed) {
|
|
2117
|
+
matched = false;
|
|
2118
|
+
break;
|
|
2119
|
+
}
|
|
2120
|
+
|
|
2121
|
+
searchLineIdx++;
|
|
2122
|
+
contentLineIdx++;
|
|
2123
|
+
}
|
|
2124
|
+
|
|
2125
|
+
if (matched && searchLineIdx === searchLines.length) {
|
|
2126
|
+
// Found a match! Calculate the actual positions
|
|
2127
|
+
let start = 0;
|
|
2128
|
+
for (let j = 0; j < i; j++) {
|
|
2129
|
+
start += contentLines[j].length + 1; // +1 for newline
|
|
2130
|
+
}
|
|
2131
|
+
|
|
2132
|
+
let end = start;
|
|
2133
|
+
for (let j = i; j < contentLineIdx; j++) {
|
|
2134
|
+
end += contentLines[j].length + (j < contentLineIdx - 1 ? 1 : 0);
|
|
2135
|
+
}
|
|
2136
|
+
|
|
2137
|
+
// Include trailing newline if the search had one
|
|
2138
|
+
if (search.endsWith("\n") && end < content.length && content[end] === "\n") {
|
|
2139
|
+
end++;
|
|
2140
|
+
}
|
|
2141
|
+
|
|
2142
|
+
return {
|
|
2143
|
+
start,
|
|
2144
|
+
end,
|
|
2145
|
+
matched: content.substring(start, end)
|
|
2146
|
+
};
|
|
2147
|
+
}
|
|
2148
|
+
}
|
|
2149
|
+
}
|
|
2150
|
+
|
|
2151
|
+
// Strategy 2: Normalized whitespace comparison
|
|
2152
|
+
const normalizedSearch = normalizeWhitespace(search);
|
|
2153
|
+
|
|
2154
|
+
// Sliding window approach - find a substring that when normalized matches
|
|
2155
|
+
for (let windowStart = 0; windowStart < content.length; windowStart++) {
|
|
2156
|
+
// Find a reasonable end point (look for similar length with some tolerance)
|
|
2157
|
+
for (let windowEnd = windowStart + search.length - 20; windowEnd <= Math.min(content.length, windowStart + search.length + 50); windowEnd++) {
|
|
2158
|
+
if (windowEnd <= windowStart) continue;
|
|
2159
|
+
|
|
2160
|
+
const candidate = content.substring(windowStart, windowEnd);
|
|
2161
|
+
if (normalizeWhitespace(candidate) === normalizedSearch) {
|
|
2162
|
+
return {
|
|
2163
|
+
start: windowStart,
|
|
2164
|
+
end: windowEnd,
|
|
2165
|
+
matched: candidate
|
|
2166
|
+
};
|
|
2167
|
+
}
|
|
2168
|
+
}
|
|
2169
|
+
}
|
|
2170
|
+
|
|
2171
|
+
return null;
|
|
2172
|
+
}
|
|
2173
|
+
|
|
1928
2174
|
/**
|
|
1929
2175
|
* Apply search/replace patches to file content
|
|
1930
2176
|
* This is the core of the patch-based editing system
|
|
@@ -1939,33 +2185,70 @@ function applyPatches(originalContent: string, patches: Patch[]): ApplyPatchesRe
|
|
|
1939
2185
|
const normalizedSearch = patch.search.replace(/\\n/g, "\n");
|
|
1940
2186
|
const normalizedReplace = patch.replace.replace(/\\n/g, "\n");
|
|
1941
2187
|
|
|
1942
|
-
//
|
|
1943
|
-
if (
|
|
1944
|
-
// Try with different whitespace normalization
|
|
1945
|
-
const flexibleSearch = normalizedSearch.replace(/\s+/g, "\\s+");
|
|
1946
|
-
const regex = new RegExp(flexibleSearch.replace(/[.*+?^${}()|[\]\\]/g, "\\$&").replace(/\\s\+/g, "\\s+"));
|
|
1947
|
-
|
|
1948
|
-
if (!regex.test(content)) {
|
|
1949
|
-
failedPatches.push({
|
|
1950
|
-
patch,
|
|
1951
|
-
error: `Search string not found in file. First 50 chars of search: "${normalizedSearch.substring(0, 50)}..."`,
|
|
1952
|
-
});
|
|
1953
|
-
continue;
|
|
1954
|
-
}
|
|
1955
|
-
|
|
1956
|
-
// If regex matched, use regex replace
|
|
1957
|
-
content = content.replace(regex, normalizedReplace);
|
|
1958
|
-
appliedPatches++;
|
|
1959
|
-
} else {
|
|
1960
|
-
// Exact match found - apply the replacement
|
|
1961
|
-
// Only replace the first occurrence to be safe
|
|
2188
|
+
// Strategy 1: Exact match
|
|
2189
|
+
if (content.includes(normalizedSearch)) {
|
|
1962
2190
|
const index = content.indexOf(normalizedSearch);
|
|
1963
2191
|
content =
|
|
1964
2192
|
content.substring(0, index) +
|
|
1965
2193
|
normalizedReplace +
|
|
1966
2194
|
content.substring(index + normalizedSearch.length);
|
|
1967
2195
|
appliedPatches++;
|
|
2196
|
+
debugLog("Patch applied (exact match)", {
|
|
2197
|
+
searchPreview: normalizedSearch.substring(0, 50)
|
|
2198
|
+
});
|
|
2199
|
+
continue;
|
|
1968
2200
|
}
|
|
2201
|
+
|
|
2202
|
+
// Strategy 2: Fuzzy match (handles indentation differences)
|
|
2203
|
+
const fuzzyMatch = findFuzzyMatch(normalizedSearch, content);
|
|
2204
|
+
if (fuzzyMatch) {
|
|
2205
|
+
// Apply the replacement, preserving the indentation from the original
|
|
2206
|
+
const originalIndent = fuzzyMatch.matched.match(/^(\s*)/)?.[1] || "";
|
|
2207
|
+
const replaceIndent = normalizedReplace.match(/^(\s*)/)?.[1] || "";
|
|
2208
|
+
|
|
2209
|
+
// If indentation differs, adjust the replacement to match original
|
|
2210
|
+
let adjustedReplace = normalizedReplace;
|
|
2211
|
+
if (originalIndent !== replaceIndent) {
|
|
2212
|
+
// Get the indentation difference
|
|
2213
|
+
const originalLines = fuzzyMatch.matched.split("\n");
|
|
2214
|
+
const replaceLines = normalizedReplace.split("\n");
|
|
2215
|
+
|
|
2216
|
+
if (originalLines.length > 0 && replaceLines.length > 0) {
|
|
2217
|
+
const baseIndent = originalLines[0].match(/^(\s*)/)?.[1] || "";
|
|
2218
|
+
const searchBaseIndent = normalizedSearch.split("\n")[0].match(/^(\s*)/)?.[1] || "";
|
|
2219
|
+
|
|
2220
|
+
// Adjust each line's indentation
|
|
2221
|
+
adjustedReplace = replaceLines.map((line, idx) => {
|
|
2222
|
+
if (idx === 0 || line.trim() === "") return line;
|
|
2223
|
+
const lineIndent = line.match(/^(\s*)/)?.[1] || "";
|
|
2224
|
+
const relativeIndent = lineIndent.length - (searchBaseIndent?.length || 0);
|
|
2225
|
+
const newIndent = baseIndent + " ".repeat(Math.max(0, relativeIndent));
|
|
2226
|
+
return newIndent + line.trim();
|
|
2227
|
+
}).join("\n");
|
|
2228
|
+
}
|
|
2229
|
+
}
|
|
2230
|
+
|
|
2231
|
+
content =
|
|
2232
|
+
content.substring(0, fuzzyMatch.start) +
|
|
2233
|
+
adjustedReplace +
|
|
2234
|
+
content.substring(fuzzyMatch.end);
|
|
2235
|
+
appliedPatches++;
|
|
2236
|
+
debugLog("Patch applied (fuzzy match)", {
|
|
2237
|
+
searchPreview: normalizedSearch.substring(0, 50),
|
|
2238
|
+
matchedPreview: fuzzyMatch.matched.substring(0, 50)
|
|
2239
|
+
});
|
|
2240
|
+
continue;
|
|
2241
|
+
}
|
|
2242
|
+
|
|
2243
|
+
// No match found
|
|
2244
|
+
failedPatches.push({
|
|
2245
|
+
patch,
|
|
2246
|
+
error: `Search string not found in file. First 50 chars of search: "${normalizedSearch.substring(0, 50)}..."`,
|
|
2247
|
+
});
|
|
2248
|
+
debugLog("Patch failed - no match found", {
|
|
2249
|
+
searchPreview: normalizedSearch.substring(0, 100),
|
|
2250
|
+
searchLength: normalizedSearch.length
|
|
2251
|
+
});
|
|
1969
2252
|
}
|
|
1970
2253
|
|
|
1971
2254
|
return {
|