carvus-lens 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/render.js ADDED
@@ -0,0 +1,651 @@
1
+ const { ipcRenderer } = require('electron');
2
+
3
+ // ─── CONFIG ──────────────────────────────────────────────────────────────────
4
+ const GROQ_API_KEY = "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";
5
+ const IMG_HOST_KEY = "6d207e02198a847aa98d0a2a901485a5";
6
+
7
+ // ─── DOM ─────────────────────────────────────────────────────────────────────
8
+ const canvas = document.getElementById("captureCanvas");
9
+ const ctx = canvas.getContext("2d");
10
+ const resultPopup = document.getElementById("resultPopup");
11
+ const resultContent = document.getElementById("resultContent");
12
+ const loadingEl = document.getElementById("loading");
13
+ const loadingText = document.getElementById("loadingText");
14
+ const closeBtn = document.getElementById("closeBtn");
15
+ const actionBar = document.getElementById("actionBar");
16
+ const btnCopy = document.getElementById("btnCopy");
17
+ const btnRead = document.getElementById("btnRead");
18
+ const btnAskAI = document.getElementById("btnAskAI");
19
+ const btnTranslate = document.getElementById("btnTranslate");
20
+ const btnSearch = document.getElementById("btnSearch");
21
+ const gcseContainer = document.getElementById("gcseContainer");
22
+ const statusPill = document.getElementById("statusPill");
23
+ const statusText = document.getElementById("statusText");
24
+ const hintOverlay = document.getElementById("hintOverlay");
25
+ const edgeLighting = document.getElementById("edgeLighting");
26
+ const edgeLightCanvas = document.getElementById("edgeLightingCanvas");
27
+ const particleCanvas = document.getElementById("particleCanvas");
28
+
29
+ // ─── STATE ───────────────────────────────────────────────────────────────────
30
+ let drawing = false, points = [], screenImage = null;
31
+ let animFrameId = null, dashOffset = 0;
32
+ let currentImageData = "", currentExtractedText = "";
33
+ let drawParticles = [], ambientParticles = [];
34
+ let isReading = false;
35
+ let tesseractWorker = null;
36
+
37
+ // Set canvas to fill window
38
+ canvas.width = window.innerWidth;
39
+ canvas.height = window.innerHeight;
40
+
41
+ // ─── PRE-INIT TESSERACT (for AI/Translate only) ─────────────────────────────
42
+ (async function initTesseract() {
43
+ try {
44
+ tesseractWorker = await Tesseract.createWorker('eng', 1, { logger: () => {} });
45
+ console.log('[Carvus Lens] Tesseract ready');
46
+ } catch (e) {
47
+ console.warn('[Carvus Lens] Tesseract pre-init failed:', e);
48
+ }
49
+ })();
50
+
51
+ // ─── KEYBOARD ────────────────────────────────────────────────────────────────
52
+ window.addEventListener('keydown', (e) => {
53
+ if ((e.key.toLowerCase() === 'c' && e.ctrlKey) || e.key === 'Escape') {
54
+ ipcRenderer.send('dismiss-overlay');
55
+ }
56
+ });
57
+
58
+ // ─── DRAGGABLE POPUP ─────────────────────────────────────────────────────────
59
+ let isDragging = false, dragOX = 0, dragOY = 0;
60
+ const popupHeader = document.getElementById('popupHeader');
61
+ popupHeader.style.cursor = 'grab';
62
+
63
+ popupHeader.addEventListener('mousedown', (e) => {
64
+ if (e.target.closest('.close-btn')) return;
65
+ isDragging = true;
66
+ dragOX = e.clientX - resultPopup.offsetLeft;
67
+ dragOY = e.clientY - resultPopup.offsetTop;
68
+ popupHeader.style.cursor = 'grabbing';
69
+ e.preventDefault();
70
+ });
71
+
72
+ window.addEventListener('mousemove', (e) => {
73
+ if (!isDragging) return;
74
+ resultPopup.style.left = (e.clientX - dragOX) + 'px';
75
+ resultPopup.style.top = (e.clientY - dragOY) + 'px';
76
+ resultPopup.style.bottom = 'auto';
77
+ resultPopup.style.transform = 'none';
78
+ });
79
+
80
+ window.addEventListener('mouseup', () => {
81
+ isDragging = false;
82
+ popupHeader.style.cursor = 'grab';
83
+ });
84
+
85
+ // ─── HELPERS ─────────────────────────────────────────────────────────────────
86
+ function show(el) { el.classList.add('visible'); }
87
+ function hide(el) { el.classList.remove('visible'); }
88
+ function setStatus(msg) { statusText.textContent = msg; show(statusPill); }
89
+
90
+ // ─── EDGE LIGHTING ──────────────────────────────────────────────────────────
91
+ function initEdgeLighting() {
92
+ const elc = edgeLightCanvas;
93
+ elc.width = window.innerWidth;
94
+ elc.height = window.innerHeight;
95
+ const ectx = elc.getContext('2d');
96
+ let t = 0;
97
+ const trails = [
98
+ { speed: 0.25, color: [201,100,66], alpha: 0.5 },
99
+ { speed: 0.2, color: [212,132,90], alpha: 0.35 },
100
+ { speed: 0.15, color: [226,169,109], alpha: 0.25 }
101
+ ];
102
+
103
+ function draw() {
104
+ t += 0.008;
105
+ ectx.clearRect(0, 0, elc.width, elc.height);
106
+ const w = elc.width, h = elc.height, perim = 2*(w+h), len = perim*0.25;
107
+ for (let i = 0; i < trails.length; i++) {
108
+ const tr = trails[i];
109
+ const offset = ((t * tr.speed + i * 0.33) % 1) * perim;
110
+ for (let d = 0; d < len; d += 3) {
111
+ const pos = (offset + d) % perim;
112
+ let x, y;
113
+ if (pos < w) { x = pos; y = 0; }
114
+ else if (pos < w + h) { x = w; y = pos - w; }
115
+ else if (pos < 2*w + h) { x = w - (pos - w - h); y = h; }
116
+ else { x = 0; y = h - (pos - 2*w - h); }
117
+ const a = (1 - d / len) * tr.alpha;
118
+ ectx.fillStyle = `rgba(${tr.color.join(',')},${a.toFixed(3)})`;
119
+ ectx.fillRect(x - 3, y - 3, 6, 6);
120
+ }
121
+ }
122
+ requestAnimationFrame(draw);
123
+ }
124
+ draw();
125
+ show(edgeLighting);
126
+ }
127
+
128
+ // ─── AMBIENT PARTICLES ──────────────────────────────────────────────────────
129
+ function initAmbientParticles() {
130
+ const pc = particleCanvas, pctx = pc.getContext('2d');
131
+ pc.width = window.innerWidth;
132
+ pc.height = window.innerHeight;
133
+ const colors = ['#c96442','#d4845a','#e2a96d','#bfb5a8'];
134
+
135
+ for (let i = 0; i < 25; i++) {
136
+ ambientParticles.push({
137
+ x: Math.random() * pc.width, y: Math.random() * pc.height,
138
+ vx: (Math.random() - 0.5) * 0.2, vy: (Math.random() - 0.5) * 0.2,
139
+ r: Math.random() * 1.5 + 0.4,
140
+ color: colors[Math.floor(Math.random() * colors.length)],
141
+ alpha: Math.random() * 0.25 + 0.05
142
+ });
143
+ }
144
+
145
+ function draw() {
146
+ pctx.clearRect(0, 0, pc.width, pc.height);
147
+ for (const p of ambientParticles) {
148
+ p.x += p.vx; p.y += p.vy;
149
+ if (p.x < 0) p.x = pc.width; if (p.x > pc.width) p.x = 0;
150
+ if (p.y < 0) p.y = pc.height; if (p.y > pc.height) p.y = 0;
151
+ pctx.globalAlpha = p.alpha;
152
+ pctx.fillStyle = p.color;
153
+ pctx.beginPath();
154
+ pctx.arc(p.x, p.y, p.r, 0, Math.PI * 2);
155
+ pctx.fill();
156
+ }
157
+ pctx.globalAlpha = 1;
158
+ requestAnimationFrame(draw);
159
+ }
160
+ draw();
161
+ }
162
+
163
+ // ─── SCREEN CAPTURE (background only — NOT displayed) ───────────────────────
164
+ async function captureScreen() {
165
+ try {
166
+ const sourceId = await ipcRenderer.invoke('get-desktop-stream-id');
167
+ const stream = await navigator.mediaDevices.getUserMedia({
168
+ audio: false,
169
+ video: {
170
+ mandatory: {
171
+ chromeMediaSource: 'desktop',
172
+ chromeMediaSourceId: sourceId,
173
+ minWidth: 1, maxWidth: window.screen.width * 2,
174
+ minHeight: 1, maxHeight: window.screen.height * 2
175
+ }
176
+ }
177
+ });
178
+
179
+ const video = document.createElement('video');
180
+ video.srcObject = stream;
181
+ video.play();
182
+ video.onloadedmetadata = () => {
183
+ setTimeout(() => {
184
+ // Capture to hidden canvas — do NOT display
185
+ const hiddenCanvas = document.createElement('canvas');
186
+ hiddenCanvas.width = video.videoWidth;
187
+ hiddenCanvas.height = video.videoHeight;
188
+ const hCtx = hiddenCanvas.getContext('2d');
189
+ hCtx.drawImage(video, 0, 0, hiddenCanvas.width, hiddenCanvas.height);
190
+ screenImage = hCtx.getImageData(0, 0, hiddenCanvas.width, hiddenCanvas.height);
191
+ // Store dimensions for coordinate mapping
192
+ canvas._captureW = hiddenCanvas.width;
193
+ canvas._captureH = hiddenCanvas.height;
194
+ stream.getTracks().forEach(t => t.stop());
195
+ ipcRenderer.send('show-window');
196
+
197
+ show(hintOverlay);
198
+ setStatus('Draw a circle to search');
199
+ initEdgeLighting();
200
+ initAmbientParticles();
201
+ }, 150);
202
+ };
203
+ } catch (err) {
204
+ console.error("Screen capture failed:", err);
205
+ setStatus('Capture failed — try again');
206
+ }
207
+ }
208
+
209
+ captureScreen();
210
+
211
+ // ─── DRAWING ON TRANSPARENT CANVAS ──────────────────────────────────────────
212
+ canvas.addEventListener("mousedown", (e) => {
213
+ if (animFrameId) cancelAnimationFrame(animFrameId);
214
+ resultPopup.classList.remove('visible');
215
+ hide(gcseContainer);
216
+ hide(hintOverlay);
217
+ drawing = true;
218
+ points = [{ x: e.clientX, y: e.clientY }];
219
+ setStatus('Drawing...');
220
+ });
221
+
222
+ canvas.addEventListener("mousemove", (e) => {
223
+ if (!drawing) return;
224
+ points.push({ x: e.clientX, y: e.clientY });
225
+ const colors = ['#c96442','#d4845a','#e2a96d'];
226
+ for (let i = 0; i < 3; i++) {
227
+ drawParticles.push({
228
+ x: e.clientX + (Math.random() - 0.5) * 10,
229
+ y: e.clientY + (Math.random() - 0.5) * 10,
230
+ vx: (Math.random() - 0.5) * 2,
231
+ vy: (Math.random() - 0.5) * 2,
232
+ life: 1.0,
233
+ color: colors[Math.floor(Math.random() * 3)]
234
+ });
235
+ }
236
+ renderSelection();
237
+ });
238
+
239
+ function renderSelection() {
240
+ if (points.length === 0) return;
241
+
242
+ // Clear transparent canvas
243
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
244
+
245
+ // Draw filled selection area with subtle highlight
246
+ if (points.length > 2) {
247
+ ctx.save();
248
+ ctx.beginPath();
249
+ ctx.moveTo(points[0].x, points[0].y);
250
+ for (let i = 1; i < points.length; i++) ctx.lineTo(points[i].x, points[i].y);
251
+ ctx.closePath();
252
+ ctx.fillStyle = "rgba(201, 100, 66, 0.06)";
253
+ ctx.fill();
254
+ ctx.restore();
255
+ }
256
+
257
+ // Draw border with thick glow
258
+ ctx.save();
259
+ ctx.beginPath();
260
+ ctx.moveTo(points[0].x, points[0].y);
261
+ for (let i = 1; i < points.length; i++) ctx.lineTo(points[i].x, points[i].y);
262
+ if (!drawing) ctx.closePath();
263
+
264
+ ctx.setLineDash([18, 10]);
265
+ ctx.lineDashOffset = -dashOffset;
266
+ ctx.lineWidth = 5;
267
+ ctx.lineCap = "round";
268
+ ctx.lineJoin = "round";
269
+
270
+ // Outer warm glow
271
+ ctx.shadowColor = "#c96442";
272
+ ctx.shadowBlur = 30;
273
+ ctx.strokeStyle = "rgba(201, 100, 66, 0.85)";
274
+ ctx.stroke();
275
+
276
+ // Inner bright stroke
277
+ ctx.shadowColor = "#e2a96d";
278
+ ctx.shadowBlur = 18;
279
+ ctx.strokeStyle = "rgba(245, 239, 231, 0.9)";
280
+ ctx.stroke();
281
+ ctx.restore();
282
+
283
+ // Draw particles
284
+ for (let i = drawParticles.length - 1; i >= 0; i--) {
285
+ const p = drawParticles[i];
286
+ p.x += p.vx; p.y += p.vy; p.life -= 0.025;
287
+ if (p.life <= 0) { drawParticles.splice(i, 1); continue; }
288
+ ctx.globalAlpha = p.life * 0.7;
289
+ ctx.fillStyle = p.color;
290
+ ctx.shadowColor = p.color;
291
+ ctx.shadowBlur = 5;
292
+ ctx.beginPath();
293
+ ctx.arc(p.x, p.y, Math.random() * 2 + 0.8, 0, Math.PI * 2);
294
+ ctx.fill();
295
+ }
296
+ ctx.globalAlpha = 1;
297
+ ctx.shadowBlur = 0;
298
+ }
299
+
300
+ function animateCircle() {
301
+ dashOffset += 1.2;
302
+ if (!drawing && points.length > 0 && Math.random() > 0.5) {
303
+ const rp = points[Math.floor(Math.random() * points.length)];
304
+ drawParticles.push({
305
+ x: rp.x + (Math.random() - 0.5) * 20,
306
+ y: rp.y + (Math.random() - 0.5) * 20,
307
+ vx: (Math.random() - 0.5) * 1,
308
+ vy: (Math.random() - 0.5) * 1,
309
+ life: 1.0, color: '#c96442'
310
+ });
311
+ }
312
+ renderSelection();
313
+ animFrameId = requestAnimationFrame(animateCircle);
314
+ }
315
+
316
+ // ─── PROCESS SELECTION — IMAGE BASED ────────────────────────────────────────
317
+ canvas.addEventListener("mouseup", async () => {
318
+ if (!drawing) return;
319
+ drawing = false;
320
+ if (points.length < 5) { ctx.clearRect(0, 0, canvas.width, canvas.height); return; }
321
+
322
+ animateCircle();
323
+ setStatus('Processing...');
324
+
325
+ const minX = Math.min(...points.map(p => p.x));
326
+ const maxX = Math.max(...points.map(p => p.x));
327
+ const minY = Math.min(...points.map(p => p.y));
328
+ const maxY = Math.max(...points.map(p => p.y));
329
+ const w = maxX - minX, h = maxY - minY;
330
+
331
+ if (w < 10 || h < 10) {
332
+ cancelAnimationFrame(animFrameId);
333
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
334
+ return;
335
+ }
336
+
337
+ if (!screenImage) {
338
+ setStatus('No screen data — try again');
339
+ return;
340
+ }
341
+
342
+ // Map screen coordinates to captured image coordinates
343
+ const scaleX = (canvas._captureW || canvas.width) / window.innerWidth;
344
+ const scaleY = (canvas._captureH || canvas.height) / window.innerHeight;
345
+ const cropX = Math.round(minX * scaleX);
346
+ const cropY = Math.round(minY * scaleY);
347
+ const cropW = Math.round(w * scaleX);
348
+ const cropH = Math.round(h * scaleY);
349
+
350
+ // Crop from the hidden screen capture
351
+ const tempCanvas = document.createElement("canvas");
352
+ tempCanvas.width = canvas._captureW || canvas.width;
353
+ tempCanvas.height = canvas._captureH || canvas.height;
354
+ tempCanvas.getContext("2d").putImageData(screenImage, 0, 0);
355
+
356
+ const cropCanvas = document.createElement("canvas");
357
+ cropCanvas.width = cropW;
358
+ cropCanvas.height = cropH;
359
+ cropCanvas.getContext("2d").drawImage(tempCanvas, cropX, cropY, cropW, cropH, 0, 0, cropW, cropH);
360
+ currentImageData = cropCanvas.toDataURL("image/png");
361
+
362
+ // Show popup + loading
363
+ show(resultPopup);
364
+ show(loadingEl);
365
+ hide(resultContent);
366
+ hide(actionBar);
367
+ hide(gcseContainer);
368
+
369
+ // IMMEDIATELY send image to Google Lens (primary action)
370
+ loadingText.textContent = 'Uploading to Google Lens...';
371
+ setStatus('Searching Google Lens...');
372
+
373
+ try {
374
+ // Upload image to get a public URL for Google Lens
375
+ const imageUrl = await uploadImage(currentImageData);
376
+
377
+ hide(loadingEl);
378
+ show(actionBar);
379
+
380
+ if (imageUrl) {
381
+ openGoogleLens(imageUrl);
382
+ } else {
383
+ // Fallback: open Google Lens upload page directly
384
+ openGoogleLensUpload();
385
+ }
386
+
387
+ // Run OCR in background for AI/Translate (non-blocking)
388
+ extractTextInBackground(currentImageData);
389
+
390
+ } catch (err) {
391
+ hide(loadingEl);
392
+ show(actionBar);
393
+ show(resultContent);
394
+ resultContent.textContent = "Error: " + err.message;
395
+ setStatus('Error occurred');
396
+ // Still try OCR
397
+ extractTextInBackground(currentImageData);
398
+ }
399
+ });
400
+
401
+ // ─── IMAGE UPLOAD (for Google Lens URL) ──────────────────────────────────────
402
+ async function uploadImage(dataUrl) {
403
+ try {
404
+ const base64 = dataUrl.split(',')[1];
405
+ const formData = new FormData();
406
+ formData.append("source", base64);
407
+ formData.append("type", "base64");
408
+ formData.append("action", "upload");
409
+ formData.append("format", "json");
410
+
411
+ const res = await fetch(`https://freeimage.host/api/1/upload?key=${IMG_HOST_KEY}`, {
412
+ method: "POST",
413
+ body: formData
414
+ });
415
+ const data = await res.json();
416
+
417
+ if (data && data.image && data.image.url) {
418
+ return data.image.url;
419
+ }
420
+ return null;
421
+ } catch (e) {
422
+ console.error('[Carvus Lens] Image upload failed:', e);
423
+ return null;
424
+ }
425
+ }
426
+
427
+ // ─── GOOGLE LENS ─────────────────────────────────────────────────────────────
428
+ function openGoogleLens(imageUrl) {
429
+ show(gcseContainer);
430
+ hide(resultContent);
431
+
432
+ let wv = document.getElementById("searchWebview");
433
+ if (!wv) {
434
+ wv = document.createElement("webview");
435
+ wv.id = "searchWebview";
436
+ wv.style.width = "100%";
437
+ wv.style.height = "100%";
438
+ wv.setAttribute("useragent",
439
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36");
440
+ gcseContainer.appendChild(wv);
441
+ }
442
+ wv.src = `https://lens.google.com/uploadbyurl?url=${encodeURIComponent(imageUrl)}`;
443
+ setStatus('Google Lens results loading...');
444
+ }
445
+
446
+ function openGoogleLensUpload() {
447
+ show(gcseContainer);
448
+ hide(resultContent);
449
+
450
+ let wv = document.getElementById("searchWebview");
451
+ if (!wv) {
452
+ wv = document.createElement("webview");
453
+ wv.id = "searchWebview";
454
+ wv.style.width = "100%";
455
+ wv.style.height = "100%";
456
+ wv.setAttribute("useragent",
457
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36");
458
+ gcseContainer.appendChild(wv);
459
+ }
460
+ wv.src = "https://lens.google.com/";
461
+ setStatus('Open Google Lens — upload your image');
462
+ }
463
+
464
+ // ─── BACKGROUND OCR (for AI/Translate) ──────────────────────────────────────
465
+ async function extractTextInBackground(imageData) {
466
+ try {
467
+ let text = '';
468
+ if (tesseractWorker) {
469
+ const { data } = await tesseractWorker.recognize(imageData);
470
+ text = data.text.trim();
471
+ } else {
472
+ const result = await Tesseract.recognize(imageData, 'eng');
473
+ text = result.data.text.trim();
474
+ }
475
+ currentExtractedText = text;
476
+ if (text) {
477
+ console.log('[Carvus Lens] OCR extracted:', text.substring(0, 100));
478
+ }
479
+ } catch (e) {
480
+ console.warn('[Carvus Lens] Background OCR failed:', e);
481
+ currentExtractedText = '';
482
+ }
483
+ }
484
+
485
+ // ─── GROQ AI ─────────────────────────────────────────────────────────────────
486
+ async function queryGroq(prompt, statusLabel) {
487
+ hide(actionBar);
488
+ show(resultContent);
489
+ show(loadingEl);
490
+ hide(gcseContainer);
491
+ loadingText.textContent = statusLabel || 'Thinking...';
492
+ setStatus(statusLabel || 'AI is thinking...');
493
+ resultContent.textContent = '';
494
+
495
+ try {
496
+ const model = document.getElementById("modelSelect").value;
497
+ const res = await fetch("https://api.groq.com/openai/v1/chat/completions", {
498
+ method: "POST",
499
+ headers: {
500
+ "Content-Type": "application/json",
501
+ "Authorization": `Bearer ${GROQ_API_KEY}`
502
+ },
503
+ body: JSON.stringify({
504
+ model,
505
+ messages: [{ role: "user", content: prompt }],
506
+ temperature: 0.6,
507
+ max_tokens: 2048
508
+ })
509
+ });
510
+
511
+ const data = await res.json();
512
+ hide(loadingEl);
513
+ show(actionBar);
514
+
515
+ if (data.error) {
516
+ resultContent.textContent = "API Error: " + data.error.message;
517
+ setStatus('API error');
518
+ return;
519
+ }
520
+
521
+ const reply = data.choices[0].message.content;
522
+ resultContent.textContent = reply;
523
+ currentExtractedText = reply;
524
+ setStatus('Response ready');
525
+ } catch (err) {
526
+ hide(loadingEl);
527
+ show(actionBar);
528
+ resultContent.textContent = "Connection Error: " + err.message;
529
+ setStatus('Connection failed');
530
+ }
531
+ }
532
+
533
+ // ─── BUTTON: COPY ────────────────────────────────────────────────────────────
534
+ btnCopy.addEventListener('click', () => {
535
+ const text = currentExtractedText || resultContent.textContent || "";
536
+ if (!text) { setStatus('No text to copy'); return; }
537
+ navigator.clipboard.writeText(text).then(() => {
538
+ const orig = btnCopy.innerHTML;
539
+ btnCopy.innerHTML = `<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5"><path d="M20 6L9 17l-5-5"/></svg>Copied!`;
540
+ btnCopy.classList.add('success');
541
+ setStatus('Copied to clipboard');
542
+ setTimeout(() => { btnCopy.innerHTML = orig; btnCopy.classList.remove('success'); }, 2000);
543
+ });
544
+ });
545
+
546
+ // ─── BUTTON: READ ALOUD (toggle) ─────────────────────────────────────────────
547
+ btnRead.addEventListener('click', () => {
548
+ if (!('speechSynthesis' in window)) return;
549
+ const text = currentExtractedText || resultContent.textContent || "";
550
+ if (!text) { setStatus('No text to read'); return; }
551
+
552
+ if (isReading) {
553
+ window.speechSynthesis.cancel();
554
+ isReading = false;
555
+ btnRead.classList.remove('reading');
556
+ btnRead.innerHTML = `<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"/><path d="M15.54 8.46a5 5 0 010 7.07"/></svg>Read Aloud`;
557
+ setStatus('Stopped');
558
+ return;
559
+ }
560
+
561
+ window.speechSynthesis.cancel();
562
+ const utterance = new SpeechSynthesisUtterance(text);
563
+ utterance.rate = 0.95;
564
+ const voices = window.speechSynthesis.getVoices();
565
+ const pref = voices.find(v => v.name.includes('Google') || v.name.includes('Daniel'));
566
+ if (pref) utterance.voice = pref;
567
+
568
+ isReading = true;
569
+ btnRead.classList.add('reading');
570
+ btnRead.innerHTML = `<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5"><rect x="6" y="4" width="4" height="16"/><rect x="14" y="4" width="4" height="16"/></svg>Stop`;
571
+ setStatus('Reading aloud...');
572
+
573
+ utterance.onend = () => {
574
+ isReading = false;
575
+ btnRead.classList.remove('reading');
576
+ btnRead.innerHTML = `<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2.5" stroke-linecap="round" stroke-linejoin="round"><polygon points="11 5 6 9 2 9 2 15 6 15 11 19 11 5"/><path d="M15.54 8.46a5 5 0 010 7.07"/></svg>Read Aloud`;
577
+ setStatus('Finished');
578
+ };
579
+ utterance.onerror = () => { isReading = false; btnRead.classList.remove('reading'); };
580
+ window.speechSynthesis.speak(utterance);
581
+ });
582
+
583
+ // ─── BUTTON: GOOGLE LENS (re-search) ────────────────────────────────────────
584
+ btnSearch.addEventListener('click', async () => {
585
+ if (!currentImageData) { setStatus('No image to search'); return; }
586
+ show(loadingEl);
587
+ loadingText.textContent = 'Uploading to Google Lens...';
588
+ setStatus('Re-searching...');
589
+ hide(resultContent);
590
+
591
+ try {
592
+ const url = await uploadImage(currentImageData);
593
+ hide(loadingEl);
594
+ if (url) {
595
+ openGoogleLens(url);
596
+ } else {
597
+ openGoogleLensUpload();
598
+ }
599
+ } catch (e) {
600
+ hide(loadingEl);
601
+ setStatus('Upload failed');
602
+ }
603
+ });
604
+
605
+ // ─── BUTTON: ASK AI ──────────────────────────────────────────────────────────
606
+ btnAskAI.addEventListener('click', () => {
607
+ const text = currentExtractedText || "";
608
+ if (!text) {
609
+ setStatus('Waiting for OCR... try again in a moment');
610
+ // If OCR hasn't finished, try again after a short delay
611
+ setTimeout(() => {
612
+ if (currentExtractedText) btnAskAI.click();
613
+ }, 1500);
614
+ return;
615
+ }
616
+ hide(gcseContainer);
617
+ queryGroq(
618
+ `You are Carvus Lens AI — a concise, helpful assistant. The user circled something on their screen and wants analysis. Provide a clear, well-structured response.\n\nExtracted text from screen:\n"""${text}"""`,
619
+ 'Analyzing with AI...'
620
+ );
621
+ });
622
+
623
+ // ─── BUTTON: TRANSLATE ──────────────────────────────────────────────────────
624
+ btnTranslate.addEventListener('click', () => {
625
+ const text = currentExtractedText || "";
626
+ if (!text) {
627
+ setStatus('Waiting for OCR... try again in a moment');
628
+ setTimeout(() => {
629
+ if (currentExtractedText) btnTranslate.click();
630
+ }, 1500);
631
+ return;
632
+ }
633
+ hide(gcseContainer);
634
+ queryGroq(
635
+ `Translate the following text. If English, translate to Spanish. If any other language, translate to English. Provide ONLY the translation:\n\n"""${text}"""`,
636
+ 'Translating...'
637
+ );
638
+ });
639
+
640
+ // ─── CLOSE BUTTON ────────────────────────────────────────────────────────────
641
+ closeBtn.addEventListener('click', () => {
642
+ if (animFrameId) cancelAnimationFrame(animFrameId);
643
+ resultPopup.classList.remove('visible');
644
+ hide(gcseContainer); hide(actionBar); hide(loadingEl); hide(resultContent);
645
+ if (isReading) { window.speechSynthesis.cancel(); isReading = false; }
646
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
647
+ show(hintOverlay);
648
+ setStatus('Draw a circle to search');
649
+ points = [];
650
+ drawParticles = [];
651
+ });