sonance-brand-mcp 1.3.110 → 1.3.111

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,21 @@
1
1
  "use client";
2
2
 
3
- import React, { useState, useEffect, useCallback, useRef } from "react";
4
- import { Loader2, Send, Sparkles, Eye, AlertCircle, X, Crop } from "lucide-react";
3
+ import React, { useState, useEffect, useCallback, useRef, useMemo } from "react";
4
+ import { Loader2, Send, Sparkles, Eye, AlertCircle, X, Crop, User, Bot } from "lucide-react";
5
5
  import { cn } from "../../../lib/utils";
6
- import { ChatMessage, AIEditResult, PendingEdit, VisionFocusedElement, VisionPendingEdit, ApplyFirstSession } from "../types";
6
+ import {
7
+ ChatMessage,
8
+ ChatSession,
9
+ AIEditResult,
10
+ PendingEdit,
11
+ VisionFocusedElement,
12
+ VisionPendingEdit,
13
+ ApplyFirstSession
14
+ } from "../types";
7
15
  import html2canvas from "html2canvas-pro";
8
16
  import { ScreenshotAnnotator, Rectangle } from "./ScreenshotAnnotator";
17
+ import { ChatTabBar } from "./ChatTabBar";
18
+ import { ChatHistory } from "./ChatHistory";
9
19
 
10
20
  // Helper to detect location failure in explanation
11
21
  function isLocationFailure(explanation: string | undefined): boolean {
@@ -22,7 +32,6 @@ function isLocationFailure(explanation: string | undefined): boolean {
22
32
 
23
33
  /**
24
34
  * Draw a section highlight border on a screenshot image
25
- * This helps the LLM visually identify the target section for modifications
26
35
  */
27
36
  function drawSectionHighlight(
28
37
  screenshotDataUrl: string,
@@ -36,16 +45,13 @@ function drawSectionHighlight(
36
45
  canvas.height = img.height;
37
46
  const ctx = canvas.getContext('2d')!;
38
47
 
39
- // Draw original screenshot
40
48
  ctx.drawImage(img, 0, 0);
41
49
 
42
- // Draw section highlight border (teal/cyan to match Sonance brand)
43
50
  ctx.strokeStyle = '#00D3C8';
44
51
  ctx.lineWidth = 3;
45
- ctx.setLineDash([8, 4]); // Dashed line for visibility
52
+ ctx.setLineDash([8, 4]);
46
53
  ctx.strokeRect(sectionCoords.x, sectionCoords.y, sectionCoords.width, sectionCoords.height);
47
54
 
48
- // Semi-transparent fill to subtly highlight the area
49
55
  ctx.fillStyle = 'rgba(0, 211, 200, 0.08)';
50
56
  ctx.fillRect(sectionCoords.x, sectionCoords.y, sectionCoords.width, sectionCoords.height);
51
57
 
@@ -75,18 +81,21 @@ export interface ChatInterfaceProps {
75
81
  onSaveRequest: (edit: PendingEdit) => void;
76
82
  pendingEdit: PendingEdit | null;
77
83
  onClearPending: () => void;
78
- // Variant-scoped editing
79
84
  editScope?: "component" | "variant";
80
85
  variantId?: string | null;
81
86
  variantStyles?: VariantStyles | null;
82
- // Vision mode props
83
87
  visionMode?: boolean;
84
88
  visionFocusedElements?: VisionFocusedElement[];
85
89
  onVisionEditComplete?: (result: VisionPendingEdit) => void;
86
- // Apply-first mode - NEW: writes files immediately
87
90
  onApplyFirstComplete?: (session: ApplyFirstSession) => void;
88
91
  }
89
92
 
93
+ // Helper to generate a unique session ID
94
+ const generateSessionId = () => `session-${Date.now()}-${Math.random().toString(36).slice(2, 7)}`;
95
+
96
+ // Local storage key for sessions
97
+ const SESSIONS_STORAGE_KEY = 'sonance-devtools-chat-sessions';
98
+
90
99
  export function ChatInterface({
91
100
  componentType,
92
101
  componentName,
@@ -102,20 +111,83 @@ export function ChatInterface({
102
111
  onVisionEditComplete,
103
112
  onApplyFirstComplete,
104
113
  }: ChatInterfaceProps) {
105
- const [messages, setMessages] = useState<ChatMessage[]>([]);
106
- const [input, setInput] = useState("");
114
+ // Session management
115
+ const [sessions, setSessions] = useState<ChatSession[]>([]);
116
+ const [activeSessionId, setActiveSessionId] = useState<string | null>(null);
117
+
118
+ // Processing state
107
119
  const [isProcessing, setIsProcessing] = useState(false);
120
+ const [input, setInput] = useState("");
108
121
  const [toastMessage, setToastMessage] = useState<{ message: string; type: 'error' | 'warning' } | null>(null);
109
- const messagesEndRef = useRef<HTMLDivElement>(null);
122
+
110
123
  const inputRef = useRef<HTMLInputElement>(null);
111
124
 
112
125
  // Screenshot annotation state
113
126
  const [isAnnotating, setIsAnnotating] = useState(false);
114
127
  const [annotatedScreenshot, setAnnotatedScreenshot] = useState<string | null>(null);
115
128
  const [manualFocusBounds, setManualFocusBounds] = useState<Rectangle | null>(null);
116
- // Discovered elements from annotation tool (for targeting when no element was clicked)
117
129
  const [annotationDiscoveredElements, setAnnotationDiscoveredElements] = useState<VisionFocusedElement[]>([]);
118
130
 
131
+ // Get current session and messages
132
+ const activeSession = useMemo(() =>
133
+ sessions.find(s => s.id === activeSessionId) || null,
134
+ [sessions, activeSessionId]
135
+ );
136
+ const messages = activeSession?.messages || [];
137
+
138
+ // Initialize sessions from localStorage on mount
139
+ useEffect(() => {
140
+ try {
141
+ const stored = localStorage.getItem(SESSIONS_STORAGE_KEY);
142
+ if (stored) {
143
+ const parsed = JSON.parse(stored);
144
+ // Convert date strings back to Date objects
145
+ const hydratedSessions: ChatSession[] = parsed.map((s: ChatSession) => ({
146
+ ...s,
147
+ createdAt: new Date(s.createdAt),
148
+ updatedAt: new Date(s.updatedAt),
149
+ messages: s.messages.map((m: ChatMessage) => ({
150
+ ...m,
151
+ timestamp: new Date(m.timestamp),
152
+ })),
153
+ }));
154
+ setSessions(hydratedSessions);
155
+ if (hydratedSessions.length > 0) {
156
+ setActiveSessionId(hydratedSessions[0].id);
157
+ }
158
+ }
159
+ } catch (e) {
160
+ console.warn('Failed to load chat sessions from localStorage:', e);
161
+ }
162
+ }, []);
163
+
164
+ // Create initial session if none exist
165
+ useEffect(() => {
166
+ if (sessions.length === 0) {
167
+ const initialSession: ChatSession = {
168
+ id: generateSessionId(),
169
+ name: "New Chat",
170
+ messages: [],
171
+ createdAt: new Date(),
172
+ updatedAt: new Date(),
173
+ context: { visionMode },
174
+ };
175
+ setSessions([initialSession]);
176
+ setActiveSessionId(initialSession.id);
177
+ }
178
+ }, [sessions.length, visionMode]);
179
+
180
+ // Persist sessions to localStorage
181
+ useEffect(() => {
182
+ if (sessions.length > 0) {
183
+ try {
184
+ localStorage.setItem(SESSIONS_STORAGE_KEY, JSON.stringify(sessions));
185
+ } catch (e) {
186
+ console.warn('Failed to save chat sessions to localStorage:', e);
187
+ }
188
+ }
189
+ }, [sessions]);
190
+
119
191
  // Auto-dismiss toast after 5 seconds
120
192
  useEffect(() => {
121
193
  if (toastMessage) {
@@ -124,10 +196,63 @@ export function ChatInterface({
124
196
  }
125
197
  }, [toastMessage]);
126
198
 
127
- // Scroll to bottom when messages change
128
- useEffect(() => {
129
- messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
130
- }, [messages]);
199
+ // Session management handlers
200
+ const createSession = useCallback(() => {
201
+ const newSession: ChatSession = {
202
+ id: generateSessionId(),
203
+ name: "New Chat",
204
+ messages: [],
205
+ createdAt: new Date(),
206
+ updatedAt: new Date(),
207
+ context: { visionMode },
208
+ };
209
+ setSessions(prev => [newSession, ...prev]);
210
+ setActiveSessionId(newSession.id);
211
+ }, [visionMode]);
212
+
213
+ const closeSession = useCallback((sessionId: string) => {
214
+ setSessions(prev => {
215
+ const filtered = prev.filter(s => s.id !== sessionId);
216
+ // If closing active session, switch to first remaining
217
+ if (activeSessionId === sessionId && filtered.length > 0) {
218
+ setActiveSessionId(filtered[0].id);
219
+ }
220
+ return filtered;
221
+ });
222
+ }, [activeSessionId]);
223
+
224
+ const addMessage = useCallback((message: ChatMessage) => {
225
+ setSessions(prev => prev.map(s =>
226
+ s.id === activeSessionId
227
+ ? {
228
+ ...s,
229
+ messages: [...s.messages, message],
230
+ updatedAt: new Date(),
231
+ // Update session name based on first user message
232
+ name: s.name === "New Chat" && message.role === "user"
233
+ ? message.content.slice(0, 30) + (message.content.length > 30 ? "..." : "")
234
+ : s.name,
235
+ }
236
+ : s
237
+ ));
238
+ }, [activeSessionId]);
239
+
240
+ // Update a message's action status (for accept/revert)
241
+ const updateMessageAction = useCallback((messageId: string, status: "pending" | "accepted" | "reverted" | "error") => {
242
+ setSessions(prev => prev.map(s =>
243
+ s.id === activeSessionId
244
+ ? {
245
+ ...s,
246
+ messages: s.messages.map(m =>
247
+ m.id === messageId && m.action
248
+ ? { ...m, action: { ...m.action, status } }
249
+ : m
250
+ ),
251
+ updatedAt: new Date(),
252
+ }
253
+ : s
254
+ ));
255
+ }, [activeSessionId]);
131
256
 
132
257
  // Dynamically discover component file path via API
133
258
  const findComponentFile = useCallback(async (): Promise<string | null> => {
@@ -154,7 +279,6 @@ export function ChatInterface({
154
279
  try {
155
280
  const canvas = await html2canvas(document.body, {
156
281
  ignoreElements: (element) => {
157
- // Exclude DevTools overlay and vision mode border
158
282
  return (
159
283
  element.hasAttribute("data-sonance-devtools") ||
160
284
  element.hasAttribute("data-vision-mode-border")
@@ -162,7 +286,7 @@ export function ChatInterface({
162
286
  },
163
287
  useCORS: true,
164
288
  allowTaint: true,
165
- scale: 1, // Lower scale for smaller file size
289
+ scale: 1,
166
290
  });
167
291
 
168
292
  return canvas.toDataURL("image/png", 0.8);
@@ -172,38 +296,26 @@ export function ChatInterface({
172
296
  }
173
297
  }, []);
174
298
 
175
- // Start screenshot annotation - show drawing overlay on live app
299
+ // Start screenshot annotation
176
300
  const startAnnotation = useCallback(() => {
177
301
  console.log("[Vision Mode] Starting screenshot annotation overlay...");
178
302
  setIsAnnotating(true);
179
303
  }, []);
180
304
 
181
- // Handle annotation confirmation - screenshot is already captured and annotated
182
- // Now also receives discovered elements from within the drawn rectangle
305
+ // Handle annotation confirmation
183
306
  const handleAnnotationConfirm = useCallback((annotated: string, bounds: Rectangle, discoveredElements: VisionFocusedElement[]) => {
184
- console.log("[Vision Mode] Annotation confirmed:", {
185
- bounds,
186
- discoveredElementsCount: discoveredElements.length,
187
- discoveredElements: discoveredElements.map(e => ({
188
- name: e.name,
189
- text: e.textContent?.substring(0, 30),
190
- id: e.elementId,
191
- })),
192
- });
307
+ console.log("[Vision Mode] Annotation confirmed:", { bounds, discoveredElementsCount: discoveredElements.length });
193
308
  setAnnotatedScreenshot(annotated);
194
309
  setManualFocusBounds(bounds);
195
310
  setAnnotationDiscoveredElements(discoveredElements);
196
311
  setIsAnnotating(false);
197
- // Focus the input so user can type their prompt
198
312
  setTimeout(() => inputRef.current?.focus(), 100);
199
313
  }, []);
200
314
 
201
- // Handle annotation cancel
202
315
  const handleAnnotationCancel = useCallback(() => {
203
316
  setIsAnnotating(false);
204
317
  }, []);
205
318
 
206
- // Clear the current annotation and discovered elements
207
319
  const clearAnnotation = useCallback(() => {
208
320
  setAnnotatedScreenshot(null);
209
321
  setManualFocusBounds(null);
@@ -212,20 +324,14 @@ export function ChatInterface({
212
324
 
213
325
  // Handle vision mode edit request
214
326
  const handleVisionEdit = async (prompt: string) => {
215
- // Use Apply-First mode if callback is provided (new Cursor-style workflow)
216
327
  const useApplyFirst = !!onApplyFirstComplete;
217
328
 
218
- // Determine which focused elements to use:
219
- // - If user clicked an element, use visionFocusedElements (passed from parent)
220
- // - If user used annotation tool without clicking, use annotationDiscoveredElements
221
329
  const effectiveFocusedElements = visionFocusedElements.length > 0
222
330
  ? visionFocusedElements
223
331
  : annotationDiscoveredElements;
224
332
 
225
333
  console.log("[Vision Mode] Starting edit request:", {
226
334
  prompt,
227
- focusedElementsFromClick: visionFocusedElements.length,
228
- focusedElementsFromAnnotation: annotationDiscoveredElements.length,
229
335
  effectiveFocusedElements: effectiveFocusedElements.length,
230
336
  mode: useApplyFirst ? "apply-first" : "preview-first"
231
337
  });
@@ -237,7 +343,7 @@ export function ChatInterface({
237
343
  timestamp: new Date(),
238
344
  };
239
345
 
240
- setMessages((prev) => [...prev, userMessage]);
346
+ addMessage(userMessage);
241
347
  setInput("");
242
348
  if (inputRef.current) inputRef.current.value = "";
243
349
  setIsProcessing(true);
@@ -245,63 +351,39 @@ export function ChatInterface({
245
351
  try {
246
352
  let screenshot: string | null;
247
353
 
248
- // PRIORITY 1: Use manually annotated screenshot if available
249
- // This is when user drew a focus area using the annotation tool
250
354
  if (annotatedScreenshot) {
251
- console.log("[Vision Mode] Using manually annotated screenshot with discovered elements:", {
252
- discoveredCount: annotationDiscoveredElements.length,
253
- elements: annotationDiscoveredElements.slice(0, 3).map(e => ({
254
- name: e.name,
255
- text: e.textContent?.substring(0, 20),
256
- id: e.elementId,
257
- })),
258
- });
259
355
  screenshot = annotatedScreenshot;
260
- // Clear the annotation after use (but keep discovered elements for the API call)
261
356
  setAnnotatedScreenshot(null);
262
357
  setManualFocusBounds(null);
263
358
  } else {
264
- // PRIORITY 2: Capture fresh screenshot and auto-annotate with section highlight
265
- console.log("[Vision Mode] Capturing screenshot...");
266
359
  const rawScreenshot = await captureScreenshot();
267
- console.log("[Vision Mode] Screenshot captured:", rawScreenshot ? `${rawScreenshot.length} bytes` : "null");
268
-
269
- // Annotate screenshot with section highlight if parent section exists
270
- // This helps the LLM visually identify the target area for modifications
271
360
  screenshot = rawScreenshot;
272
361
  if (rawScreenshot && effectiveFocusedElements.length > 0) {
273
362
  const parentSection = effectiveFocusedElements[0].parentSection;
274
363
  if (parentSection?.coordinates) {
275
364
  screenshot = await drawSectionHighlight(rawScreenshot, parentSection.coordinates);
276
- console.log("[Vision Mode] Added section highlight to screenshot:", {
277
- sectionType: parentSection.type,
278
- sectionText: parentSection.sectionText?.substring(0, 30),
279
- });
280
365
  }
281
366
  }
282
367
  }
283
368
 
284
- // Choose API endpoint based on mode
285
369
  const endpoint = useApplyFirst ? "/api/sonance-vision-apply" : "/api/sonance-vision-edit";
286
- console.log("[Vision Mode] Sending to API:", endpoint, {
287
- effectiveFocusedElements: effectiveFocusedElements.length,
288
- });
370
+
371
+ // Build chat history for context
372
+ const chatHistory = messages.map(m => ({ role: m.role, content: m.content }));
289
373
 
290
374
  const response = await fetch(endpoint, {
291
375
  method: "POST",
292
376
  headers: { "Content-Type": "application/json" },
293
377
  body: JSON.stringify({
294
- // Apply-First: write files immediately so HMR shows changes
295
378
  action: useApplyFirst ? "apply" : "edit",
296
379
  screenshot,
297
380
  pageRoute: window.location.pathname,
298
381
  userPrompt: prompt,
299
- // Use effective focused elements (from click OR from annotation discovery)
300
382
  focusedElements: effectiveFocusedElements,
383
+ chatHistory,
301
384
  }),
302
385
  });
303
386
 
304
- // Clear annotation discovered elements after API call
305
387
  setAnnotationDiscoveredElements([]);
306
388
 
307
389
  const data = await response.json();
@@ -309,19 +391,14 @@ export function ChatInterface({
309
391
  success: data.success,
310
392
  sessionId: data.sessionId,
311
393
  modificationsCount: data.modifications?.length || 0,
312
- hasCss: !!data.aggregatedPreviewCSS,
313
- error: data.error,
314
394
  });
315
395
 
316
- // Check if this is a "location failure" case - element could not be found in code
317
396
  const hasLocationFailure = isLocationFailure(data.explanation);
318
397
  const hasNoModifications = !data.modifications || data.modifications.length === 0;
319
398
  const isElementNotFound = hasLocationFailure && hasNoModifications;
320
399
 
321
- // Build appropriate message based on result
322
400
  let messageContent: string;
323
401
  if (isElementNotFound) {
324
- // Element not found - provide helpful guidance
325
402
  messageContent = (data.explanation || "Could not locate the clicked element in the source code.") +
326
403
  "\n\nTry clicking on a different element or describe what you want to change in more detail.";
327
404
  } else if (data.success) {
@@ -332,36 +409,39 @@ export function ChatInterface({
332
409
  messageContent = data.error || "Failed to generate changes.";
333
410
  }
334
411
 
412
+ // Build assistant message with inline action for diff display
335
413
  const assistantMessage: ChatMessage = {
336
414
  id: `msg-${Date.now()}-response`,
337
415
  role: "assistant",
338
416
  content: messageContent,
339
417
  timestamp: new Date(),
418
+ // Add inline action if we have modifications
419
+ action: data.success && data.modifications && data.modifications.length > 0 ? {
420
+ type: "diff",
421
+ status: "pending",
422
+ sessionId: data.sessionId,
423
+ explanation: data.explanation,
424
+ files: data.modifications.map((m: { filePath: string; diff: string; originalContent?: string; modifiedContent?: string }) => ({
425
+ path: m.filePath,
426
+ diff: m.diff,
427
+ originalContent: m.originalContent,
428
+ modifiedContent: m.modifiedContent,
429
+ })),
430
+ } : undefined,
340
431
  };
341
432
 
342
- setMessages((prev) => [...prev, assistantMessage]);
433
+ addMessage(assistantMessage);
343
434
 
344
- // Handle element not found case - show toast and do NOT trigger page refresh
345
435
  if (isElementNotFound) {
346
- console.log("[Vision Mode] Element not found - blocking page refresh:", {
347
- explanation: data.explanation,
348
- modifications: data.modifications?.length || 0,
349
- });
350
436
  setToastMessage({
351
437
  message: "Could not locate the clicked element in the source code",
352
438
  type: 'warning'
353
439
  });
354
- // Do NOT call onApplyFirstComplete - this prevents page refresh
355
440
  return;
356
441
  }
357
442
 
358
443
  if (data.success && data.modifications && data.modifications.length > 0) {
359
444
  if (useApplyFirst && onApplyFirstComplete) {
360
- // Apply-First mode: files are already written, user can see changes via HMR
361
- console.log("[Apply-First] Calling onApplyFirstComplete with:", {
362
- sessionId: data.sessionId,
363
- modifications: data.modifications.map((m: { filePath: string }) => m.filePath),
364
- });
365
445
  onApplyFirstComplete({
366
446
  sessionId: data.sessionId,
367
447
  modifications: data.modifications,
@@ -370,19 +450,12 @@ export function ChatInterface({
370
450
  backupPaths: data.backupPaths || [],
371
451
  });
372
452
  } else if (onVisionEditComplete) {
373
- // Preview-First mode (legacy): just preview CSS
374
- console.log("[Vision Mode] Calling onVisionEditComplete with:", {
375
- modifications: data.modifications.map((m: { filePath: string }) => m.filePath),
376
- cssLength: data.aggregatedPreviewCSS?.length || 0,
377
- });
378
453
  onVisionEditComplete({
379
454
  modifications: data.modifications,
380
455
  aggregatedPreviewCSS: data.aggregatedPreviewCSS || "",
381
456
  explanation: data.explanation || "",
382
457
  });
383
458
  }
384
- } else if (!data.success) {
385
- console.error("[Vision Mode] API returned error:", data.error);
386
459
  }
387
460
  } catch (error) {
388
461
  console.error("[Vision Mode] Request failed:", error);
@@ -392,24 +465,21 @@ export function ChatInterface({
392
465
  content: error instanceof Error ? error.message : "Vision mode error occurred",
393
466
  timestamp: new Date(),
394
467
  };
395
- setMessages((prev) => [...prev, errorMessage]);
468
+ addMessage(errorMessage);
396
469
  } finally {
397
470
  setIsProcessing(false);
398
471
  }
399
472
  };
400
473
 
401
474
  const handleSend = async (prompt: string) => {
402
- // Fallback: read from DOM if React state is empty (browser automation compatibility)
403
475
  const actualPrompt = prompt || inputRef.current?.value || "";
404
476
 
405
477
  if (!actualPrompt.trim() || isProcessing) return;
406
478
 
407
- // Use vision mode handler if vision mode is active
408
479
  if (visionMode) {
409
480
  return handleVisionEdit(actualPrompt);
410
481
  }
411
482
 
412
- // If no component is selected, intercept the request
413
483
  if (componentType === "all") {
414
484
  const userMessage: ChatMessage = {
415
485
  id: `msg-${Date.now()}`,
@@ -417,7 +487,7 @@ export function ChatInterface({
417
487
  content: actualPrompt,
418
488
  timestamp: new Date(),
419
489
  };
420
- setMessages((prev) => [...prev, userMessage]);
490
+ addMessage(userMessage);
421
491
  setInput("");
422
492
  if (inputRef.current) inputRef.current.value = "";
423
493
 
@@ -428,7 +498,7 @@ export function ChatInterface({
428
498
  content: "Please select a component using the cursor icon in the header to edit it.",
429
499
  timestamp: new Date(),
430
500
  };
431
- setMessages((prev) => [...prev, assistantMessage]);
501
+ addMessage(assistantMessage);
432
502
  }, 300);
433
503
  return;
434
504
  }
@@ -440,20 +510,18 @@ export function ChatInterface({
440
510
  timestamp: new Date(),
441
511
  };
442
512
 
443
- setMessages((prev) => [...prev, userMessage]);
513
+ addMessage(userMessage);
444
514
  setInput("");
445
515
  if (inputRef.current) inputRef.current.value = "";
446
516
  setIsProcessing(true);
447
517
 
448
518
  try {
449
- // Dynamically find the component file
450
519
  const filePath = await findComponentFile();
451
520
 
452
521
  if (!filePath) {
453
- throw new Error(`Could not locate component file for "${componentType}". The component may not exist in the expected directories.`);
522
+ throw new Error(`Could not locate component file for "${componentType}".`);
454
523
  }
455
524
 
456
- // First, fetch the current component source
457
525
  const sourceResponse = await fetch(
458
526
  `/api/sonance-component-source?file=${encodeURIComponent(filePath)}`
459
527
  );
@@ -464,7 +532,8 @@ export function ChatInterface({
464
532
 
465
533
  const sourceData = await sourceResponse.json();
466
534
 
467
- // Then, send to AI for editing
535
+ const chatHistory = messages.map(m => ({ role: m.role, content: m.content }));
536
+
468
537
  const editResponse = await fetch("/api/sonance-ai-edit", {
469
538
  method: "POST",
470
539
  headers: { "Content-Type": "application/json" },
@@ -474,10 +543,10 @@ export function ChatInterface({
474
543
  filePath,
475
544
  currentCode: sourceData.content,
476
545
  userRequest: actualPrompt,
477
- // Variant-scoped editing context
478
546
  editScope,
479
547
  variantId: editScope === "variant" ? variantId : undefined,
480
548
  variantStyles: editScope === "variant" ? variantStyles : undefined,
549
+ chatHistory,
481
550
  }),
482
551
  });
483
552
 
@@ -491,20 +560,30 @@ export function ChatInterface({
491
560
  : editData.error || "Failed to generate changes.",
492
561
  timestamp: new Date(),
493
562
  editResult: editData,
563
+ // Add inline action for component edit diffs
564
+ action: editData.success && editData.modifiedCode ? {
565
+ type: "diff",
566
+ status: "pending",
567
+ explanation: editData.explanation,
568
+ files: [{
569
+ path: filePath,
570
+ diff: editData.diff || "",
571
+ originalContent: sourceData.content,
572
+ modifiedContent: editData.modifiedCode,
573
+ }],
574
+ } : undefined,
494
575
  };
495
576
 
496
- setMessages((prev) => [...prev, assistantMessage]);
577
+ addMessage(assistantMessage);
497
578
 
498
579
  if (editData.success && editData.modifiedCode) {
499
580
  onEditComplete(editData);
500
- // Set up pending edit for save
501
581
  onSaveRequest({
502
582
  filePath,
503
583
  originalCode: sourceData.content,
504
584
  modifiedCode: editData.modifiedCode,
505
585
  diff: editData.diff || "",
506
586
  explanation: editData.explanation || "",
507
- // AI-provided CSS for live preview (no parsing needed)
508
587
  previewCSS: editData.previewCSS || "",
509
588
  });
510
589
  }
@@ -516,238 +595,267 @@ export function ChatInterface({
516
595
  timestamp: new Date(),
517
596
  editResult: { success: false, error: String(error) },
518
597
  };
519
- setMessages((prev) => [...prev, errorMessage]);
598
+ addMessage(errorMessage);
520
599
  } finally {
521
600
  setIsProcessing(false);
522
601
  }
523
602
  };
524
603
 
604
+ // Handle accept/revert from inline diff preview
605
+ const handleAcceptChanges = useCallback((messageId: string) => {
606
+ console.log("[Chat] Accept changes for message:", messageId);
607
+ updateMessageAction(messageId, "accepted");
608
+ // TODO: Call actual accept API based on message's action.sessionId
609
+ }, [updateMessageAction]);
610
+
611
+ const handleRevertChanges = useCallback((messageId: string) => {
612
+ console.log("[Chat] Revert changes for message:", messageId);
613
+ updateMessageAction(messageId, "reverted");
614
+ // TODO: Call actual revert API based on message's action.sessionId
615
+ }, [updateMessageAction]);
616
+
525
617
  return (
526
- <div className="space-y-3">
618
+ <div className="flex flex-col h-full">
527
619
  {/* Toast Notification */}
528
620
  {toastMessage && (
529
621
  <div
530
622
  className={cn(
531
- "flex items-center gap-2 p-3 rounded-md text-sm animate-in slide-in-from-top-2",
623
+ "flex items-center gap-2 px-3 py-2 rounded-lg text-xs shadow-lg mb-2",
624
+ "animate-in slide-in-from-top-2 duration-200",
532
625
  toastMessage.type === 'error'
533
- ? "bg-red-50 border border-red-200 text-red-700"
534
- : "bg-amber-50 border border-amber-200 text-amber-700"
626
+ ? "bg-red-500 text-white"
627
+ : "bg-amber-500 text-white"
535
628
  )}
536
629
  >
537
- <AlertCircle className="h-4 w-4 flex-shrink-0" />
630
+ <AlertCircle className="h-3.5 w-3.5 flex-shrink-0" />
538
631
  <span className="flex-1">{toastMessage.message}</span>
539
632
  <button
540
633
  onClick={() => setToastMessage(null)}
541
- className="p-0.5 hover:bg-black/5 rounded"
634
+ className="p-0.5 hover:bg-white/20 rounded flex-shrink-0 transition-colors"
542
635
  >
543
636
  <X className="h-3 w-3" />
544
637
  </button>
545
638
  </div>
546
639
  )}
547
640
 
548
- {/* Vision Mode Banner */}
641
+ {/* Chat Tab Bar */}
642
+ <ChatTabBar
643
+ sessions={sessions}
644
+ activeSessionId={activeSessionId}
645
+ onSelectSession={setActiveSessionId}
646
+ onCreateSession={createSession}
647
+ onCloseSession={closeSession}
648
+ />
649
+
650
+ {/* Vision Mode Header */}
549
651
  {visionMode && (
550
- <div className="p-2 bg-purple-50 border border-purple-200 rounded-md">
551
- <div className="flex items-center gap-2 text-purple-700 font-medium text-xs mb-1">
552
- <Eye className="h-3 w-3" />
553
- <span>Vision Mode Active</span>
652
+ <div className="flex items-center gap-2 px-3 py-2 bg-gradient-to-r from-purple-500/10 to-purple-600/5 dark:from-purple-500/20 dark:to-purple-600/10 border-b border-purple-200/50 dark:border-purple-700/50">
653
+ <div className="w-6 h-6 rounded-full bg-purple-500 flex items-center justify-center">
654
+ <Eye className="h-3 w-3 text-white" />
554
655
  </div>
555
- {visionFocusedElements.length > 0 ? (
556
- <div className="text-purple-600 text-xs">
557
- {visionFocusedElements.length} element{visionFocusedElements.length !== 1 ? "s" : ""} focused
558
- </div>
559
- ) : (
560
- <div className="text-purple-500 text-xs">
561
- Click elements to focus AI attention, then describe your changes
562
- </div>
656
+ <div className="flex-1">
657
+ <p className="text-[11px] font-semibold text-purple-700 dark:text-purple-300">Vision Mode Active</p>
658
+ <p className="text-[10px] text-purple-500 dark:text-purple-400">
659
+ {visionFocusedElements.length > 0
660
+ ? `${visionFocusedElements.length} element${visionFocusedElements.length > 1 ? 's' : ''} selected`
661
+ : 'Click elements to focus AI attention'}
662
+ </p>
663
+ </div>
664
+ {visionFocusedElements.length > 0 && (
665
+ <span className="text-xs px-2 py-0.5 bg-purple-500 text-white rounded-full font-medium">
666
+ {visionFocusedElements.length}
667
+ </span>
563
668
  )}
564
669
  </div>
565
670
  )}
566
671
 
567
- {/* AI Hint - only show when no messages yet and not in vision mode */}
568
- {messages.length === 0 && componentType !== "all" && !visionMode && (
569
- <p className="text-xs text-gray-500 italic">
570
- Describe any styling changes you'd like to make to this component.
571
- </p>
672
+ {/* Chat History - Scrollable */}
673
+ {messages.length > 0 ? (
674
+ <ChatHistory
675
+ messages={messages}
676
+ onAcceptChanges={handleAcceptChanges}
677
+ onRevertChanges={handleRevertChanges}
678
+ visionMode={visionMode}
679
+ />
680
+ ) : (
681
+ /* Empty State */
682
+ <div className="flex-1 flex flex-col items-center justify-center py-6 px-4 text-center bg-background">
683
+ <div className="w-10 h-10 rounded-full bg-gradient-to-br from-[#00A3E1] to-[#00D3C8] flex items-center justify-center mb-3 shadow-lg">
684
+ <Sparkles className="h-5 w-5 text-white" />
685
+ </div>
686
+ <p className="text-xs font-medium text-foreground mb-1">AI Design Assistant</p>
687
+ <p className="text-[10px] text-foreground-muted max-w-[180px]">
688
+ {visionMode
689
+ ? "Click elements or draw a focus area, then describe changes"
690
+ : componentType === "all"
691
+ ? "Select a component to start editing"
692
+ : `Describe changes for ${componentName}`}
693
+ </p>
694
+ </div>
572
695
  )}
573
696
 
574
- {/* Chat Messages */}
575
- {messages.length > 0 && (
576
- <div className="max-h-48 overflow-y-auto space-y-2 p-2 rounded border border-gray-200 bg-gray-50">
577
- {messages.map((msg) => (
578
- <div
579
- key={msg.id}
580
- className={cn(
581
- "text-xs p-2 rounded",
582
- msg.role === "user"
583
- ? "bg-[#00A3E1]/10 text-gray-800 ml-4"
584
- : "bg-white border border-gray-200 mr-4"
585
- )}
697
+ {/* Input Area */}
698
+ <div className="mt-auto pt-2 border-t border-border px-3 pb-2 bg-background">
699
+ {/* Annotation indicator */}
700
+ {annotatedScreenshot && visionMode && (
701
+ <div className="flex items-center justify-between text-[10px] text-[#00D3C8] bg-gradient-to-r from-[#00D3C8]/10 to-transparent dark:from-[#00D3C8]/20 px-3 py-1.5 rounded-lg mb-2">
702
+ <span className="flex items-center gap-1.5 font-medium">
703
+ <Crop className="h-3 w-3" />
704
+ Focus area selected
705
+ </span>
706
+ <button
707
+ onClick={clearAnnotation}
708
+ className="text-[#00D3C8] hover:text-[#00b3a8] p-1 hover:bg-[#00D3C8]/10 rounded transition-colors"
709
+ title="Clear focus"
586
710
  >
587
- <div className="flex items-start gap-2">
588
- {msg.role === "assistant" && (
589
- <Sparkles className="h-3 w-3 text-[#00A3E1] mt-0.5 flex-shrink-0" />
590
- )}
591
- <span id="summary-row-span-msgcontent">{msg.content}</span>
592
- </div>
711
+ <X className="h-3 w-3" />
712
+ </button>
713
+ </div>
714
+ )}
715
+
716
+ {/* Processing Indicator */}
717
+ {isProcessing && (
718
+ <div className={cn(
719
+ "flex items-center gap-2 text-xs px-3 py-2 rounded-lg mb-2",
720
+ visionMode
721
+ ? "bg-gradient-to-r from-purple-500/10 to-purple-600/5 dark:from-purple-500/20 dark:to-purple-600/10 text-purple-600 dark:text-purple-400"
722
+ : "bg-gradient-to-r from-[#00A3E1]/10 to-[#00D3C8]/5 dark:from-[#00A3E1]/20 dark:to-[#00D3C8]/10 text-[#00A3E1]"
723
+ )}>
724
+ <div className="relative">
725
+ <Loader2 className="h-4 w-4 animate-spin" />
726
+ <div className="absolute inset-0 rounded-full animate-ping opacity-20 bg-current" />
593
727
  </div>
594
- ))}
595
- <div ref={messagesEndRef} />
596
- </div>
597
- )}
728
+ <span className="font-medium">
729
+ {visionMode ? "Analyzing page..." : "Generating changes..."}
730
+ </span>
731
+ </div>
732
+ )}
598
733
 
599
- {/* Input */}
600
- <div
601
- className="flex gap-2"
602
- onPointerDown={(e) => {
603
- // Force focus to input when clicking anywhere in this container
604
- // This bypasses modal focus traps by using requestAnimationFrame
605
- e.stopPropagation();
606
- const input = inputRef.current;
607
- if (input && !isProcessing) {
608
- // Blur any currently focused element first (escape focus trap)
609
- if (document.activeElement && document.activeElement !== input) {
610
- (document.activeElement as HTMLElement).blur?.();
611
- }
612
- // Use rAF to ensure focus happens after any focus trap logic runs
613
- requestAnimationFrame(() => {
614
- input.focus();
615
- // Also try native focus method as fallback
616
- input.click();
617
- });
618
- }
619
- }}
620
- >
621
- <input
622
- ref={inputRef}
623
- type="text"
624
- value={input}
625
- onChange={(e) => setInput(e.target.value)}
626
- onClick={(e) => {
734
+ {/* Input Row */}
735
+ <div
736
+ className="flex items-center gap-2"
737
+ onPointerDown={(e) => {
627
738
  e.stopPropagation();
628
- e.preventDefault();
629
- // Force focus using multiple strategies
630
- const input = inputRef.current;
631
- if (input) {
632
- // Escape any focus trap
633
- if (document.activeElement && document.activeElement !== input) {
739
+ const inputEl = inputRef.current;
740
+ if (inputEl && !isProcessing) {
741
+ if (document.activeElement && document.activeElement !== inputEl) {
634
742
  (document.activeElement as HTMLElement).blur?.();
635
743
  }
636
- input.focus();
744
+ requestAnimationFrame(() => {
745
+ inputEl.focus();
746
+ inputEl.click();
747
+ });
637
748
  }
638
749
  }}
639
- onPointerDown={(e) => {
640
- e.stopPropagation();
641
- // Don't preventDefault here - let native click handling work
642
- const input = inputRef.current;
643
- if (input) {
644
- requestAnimationFrame(() => input.focus());
645
- }
646
- }}
647
- onMouseDown={(e) => {
648
- e.stopPropagation();
649
- const input = inputRef.current;
650
- if (input) {
651
- requestAnimationFrame(() => input.focus());
652
- }
653
- }}
654
- onKeyDown={(e) => {
655
- if (e.key === "Enter" && !e.shiftKey) {
656
- e.preventDefault();
657
- handleSend(input || inputRef.current?.value || "");
658
- }
659
- }}
660
- placeholder={
661
- visionMode
662
- ? "Describe what changes you want to make on this page..."
663
- : componentType === "all"
664
- ? "Select a component to start editing..."
665
- : `Describe changes to ${componentName}...`
666
- }
667
- disabled={isProcessing}
668
- className={cn(
669
- "flex-1 px-3 py-2 text-xs rounded border",
670
- visionMode
671
- ? "border-purple-200 focus:ring-purple-500 focus:border-purple-500"
672
- : "border-gray-200 focus:ring-[#00A3E1] focus:border-[#00A3E1]",
673
- "focus:outline-none focus:ring-1",
674
- "placeholder:text-gray-400",
675
- "disabled:opacity-50 disabled:bg-gray-50"
750
+ >
751
+ {/* Annotate button - only in vision mode */}
752
+ {visionMode && (
753
+ <button
754
+ onClick={startAnnotation}
755
+ onPointerDown={(e) => e.stopPropagation()}
756
+ disabled={isProcessing}
757
+ title="Draw focus area"
758
+ className={cn(
759
+ "p-2 rounded-lg transition-all duration-200",
760
+ annotatedScreenshot
761
+ ? "bg-[#00D3C8] text-white shadow-md shadow-[#00D3C8]/30"
762
+ : "bg-secondary text-foreground-secondary hover:bg-secondary-hover hover:text-foreground",
763
+ "disabled:opacity-50 disabled:cursor-not-allowed"
764
+ )}
765
+ >
766
+ <Crop className="h-4 w-4" />
767
+ </button>
676
768
  )}
677
- />
678
-
679
- {/* Annotate screenshot button - only in vision mode */}
680
- {visionMode && (
769
+
770
+ {/* Input Field */}
771
+ <div className="flex-1 relative">
772
+ <input
773
+ ref={inputRef}
774
+ type="text"
775
+ value={input}
776
+ onChange={(e) => setInput(e.target.value)}
777
+ onClick={(e) => {
778
+ e.stopPropagation();
779
+ e.preventDefault();
780
+ const inputEl = inputRef.current;
781
+ if (inputEl) {
782
+ if (document.activeElement && document.activeElement !== inputEl) {
783
+ (document.activeElement as HTMLElement).blur?.();
784
+ }
785
+ inputEl.focus();
786
+ }
787
+ }}
788
+ onPointerDown={(e) => {
789
+ e.stopPropagation();
790
+ const inputEl = inputRef.current;
791
+ if (inputEl) {
792
+ requestAnimationFrame(() => inputEl.focus());
793
+ }
794
+ }}
795
+ onMouseDown={(e) => {
796
+ e.stopPropagation();
797
+ const inputEl = inputRef.current;
798
+ if (inputEl) {
799
+ requestAnimationFrame(() => inputEl.focus());
800
+ }
801
+ }}
802
+ onKeyDown={(e) => {
803
+ if (e.key === "Enter" && !e.shiftKey) {
804
+ e.preventDefault();
805
+ handleSend(input || inputRef.current?.value || "");
806
+ }
807
+ }}
808
+ placeholder={
809
+ visionMode
810
+ ? "Describe the changes you want..."
811
+ : componentType === "all"
812
+ ? "Select a component first..."
813
+ : `What would you like to change?`
814
+ }
815
+ disabled={isProcessing}
816
+ className={cn(
817
+ "w-full px-3 py-2.5 text-xs rounded-xl border-2 transition-all duration-200",
818
+ "bg-background text-foreground",
819
+ visionMode
820
+ ? "border-purple-200 dark:border-purple-700 focus:border-purple-400 dark:focus:border-purple-500 focus:ring-2 focus:ring-purple-500/20"
821
+ : "border-border focus:border-[#00A3E1] focus:ring-2 focus:ring-[#00A3E1]/20",
822
+ "focus:outline-none",
823
+ "placeholder:text-foreground-muted",
824
+ "disabled:opacity-50 disabled:bg-secondary",
825
+ "shadow-sm"
826
+ )}
827
+ />
828
+ </div>
829
+
830
+ {/* Send button */}
681
831
  <button
682
- onClick={startAnnotation}
832
+ onClick={() => handleSend(input || inputRef.current?.value || "")}
683
833
  onPointerDown={(e) => e.stopPropagation()}
684
- disabled={isProcessing}
685
- title="Draw on screenshot to focus AI attention"
834
+ disabled={isProcessing || !input.trim()}
686
835
  className={cn(
687
- "px-3 py-2 rounded transition-colors",
688
- annotatedScreenshot
689
- ? "bg-[#00D3C8] text-[#1a1a1a]" // Teal when annotation is active
690
- : "bg-gray-100 text-gray-600 hover:bg-gray-200",
691
- "disabled:opacity-50 disabled:cursor-not-allowed"
836
+ "p-2.5 rounded-xl transition-all duration-200 shadow-md",
837
+ visionMode
838
+ ? "bg-gradient-to-br from-purple-500 to-purple-600 text-white hover:from-purple-600 hover:to-purple-700 shadow-purple-500/30"
839
+ : "bg-gradient-to-br from-[#00A3E1] to-[#0090c8] text-white hover:from-[#0090c8] hover:to-[#007ab3] shadow-[#00A3E1]/30",
840
+ "disabled:opacity-40 disabled:cursor-not-allowed disabled:shadow-none",
841
+ !isProcessing && input.trim() && "hover:scale-105 active:scale-95"
692
842
  )}
693
843
  >
694
- <Crop className="h-4 w-4" />
695
- </button>
696
- )}
697
-
698
- <button
699
- onClick={() => handleSend(input || inputRef.current?.value || "")}
700
- onPointerDown={(e) => e.stopPropagation()}
701
- disabled={isProcessing}
702
- className={cn(
703
- "px-3 py-2 rounded transition-colors",
704
- visionMode
705
- ? "bg-purple-600 text-white hover:bg-purple-700"
706
- : "bg-[#00A3E1] text-white hover:bg-[#0090c8]",
707
- "disabled:opacity-50 disabled:cursor-not-allowed"
708
- )}
709
- >
710
- {isProcessing ? (
711
- <Loader2 className="h-4 w-4 animate-spin" />
712
- ) : (
713
- <Send className="h-4 w-4" />
714
- )}
715
- </button>
716
- </div>
717
-
718
- {/* Annotation indicator */}
719
- {annotatedScreenshot && visionMode && (
720
- <div className="flex items-center justify-between text-xs text-[#00D3C8] bg-[#00D3C8]/10 px-2 py-1 rounded">
721
- <span className="flex items-center gap-1">
722
- <Crop className="h-3 w-3" />
723
- Focus area selected - your prompt will target this region
724
- </span>
725
- <button
726
- onClick={clearAnnotation}
727
- className="text-[#00D3C8] hover:text-[#00b3a8] p-0.5"
728
- title="Clear annotation"
729
- >
730
- <X className="h-3 w-3" />
844
+ {isProcessing ? (
845
+ <Loader2 className="h-4 w-4 animate-spin" />
846
+ ) : (
847
+ <Send className="h-4 w-4" />
848
+ )}
731
849
  </button>
732
850
  </div>
733
- )}
734
851
 
735
- {/* Processing Indicator */}
736
- {isProcessing && (
737
- <div className={cn(
738
- "flex items-center gap-2 text-xs",
739
- visionMode ? "text-purple-600" : "text-gray-500"
740
- )}>
741
- <Loader2 className="h-3 w-3 animate-spin" />
742
- <span>
743
- {visionMode
744
- ? "AI is analyzing the page screenshot and generating changes..."
745
- : "AI is analyzing and generating changes..."}
746
- </span>
747
- </div>
748
- )}
852
+ {/* Quick tip */}
853
+ <p className="text-[9px] text-foreground-muted text-center mt-2">
854
+ Press Enter to send • Keep chatting to refine changes
855
+ </p>
856
+ </div>
749
857
 
750
- {/* Screenshot Annotator Overlay - draws on live app */}
858
+ {/* Screenshot Annotator Overlay */}
751
859
  {isAnnotating && (
752
860
  <ScreenshotAnnotator
753
861
  onConfirm={handleAnnotationConfirm}