cyclecad 3.4.0 → 3.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1344 @@
1
+ /**
2
+ * Photo-to-CAD Reverse Engineering Module
3
+ * Converts photographs of parts into parametric 3D CAD models
4
+ *
5
+ * Features:
6
+ * - Image input: drag-drop, camera, clipboard
7
+ * - Edge detection: Sobel + Canny + contour tracing
8
+ * - Geometry reconstruction: 2D contours → 3D primitives
9
+ * - Interactive refinement: overlay, sliders, reference dimensions
10
+ * - AI enhancement: Gemini Flash Vision API integration
11
+ * - UI panel with side-by-side photo + 3D preview
12
+ */
13
+
14
+ (function() {
15
+ 'use strict';
16
+
17
+ // ============================================================================
18
+ // STATE
19
+ // ============================================================================
20
+
21
+ const state = {
22
+ originalImage: null,
23
+ processedImage: null,
24
+ canvas: null,
25
+ ctx: null,
26
+ detectedFeatures: {
27
+ lines: [],
28
+ circles: [],
29
+ arcs: [],
30
+ corners: [],
31
+ rectangles: [],
32
+ contours: []
33
+ },
34
+ selectedFeatures: new Set(),
35
+ referenceDimension: { pixels: 0, mm: 0, scale: 0 },
36
+ edgeSensitivity: 0.5,
37
+ threeDPreview: null,
38
+ threeDScene: null,
39
+ threeDRenderer: null,
40
+ aiMetadata: null
41
+ };
42
+
43
+ // ============================================================================
44
+ // SECTION 1: IMAGE INPUT SYSTEM (~200 lines)
45
+ // ============================================================================
46
+
47
+ /**
48
+ * Initialize image input handlers
49
+ */
50
+ function initImageInput() {
51
+ const dropZone = document.getElementById('photo-cad-drop-zone');
52
+ if (!dropZone) return;
53
+
54
+ // Prevent default drag behaviors
55
+ ['dragenter', 'dragover', 'dragleave', 'drop'].forEach(eventName => {
56
+ dropZone.addEventListener(eventName, preventDefaults, false);
57
+ document.body.addEventListener(eventName, preventDefaults, false);
58
+ });
59
+
60
+ // Highlight drop zone when item is dragged over it
61
+ ['dragenter', 'dragover'].forEach(eventName => {
62
+ dropZone.addEventListener(eventName, highlight, false);
63
+ });
64
+
65
+ ['dragleave', 'drop'].forEach(eventName => {
66
+ dropZone.addEventListener(eventName, unhighlight, false);
67
+ });
68
+
69
+ // Handle dropped files
70
+ dropZone.addEventListener('drop', handleDrop, false);
71
+
72
+ // File input change
73
+ const fileInput = document.getElementById('photo-cad-file-input');
74
+ if (fileInput) {
75
+ fileInput.addEventListener('change', (e) => handleFileSelect(e.target.files[0]));
76
+ }
77
+
78
+ // Camera button
79
+ const cameraBtn = document.getElementById('photo-cad-camera-btn');
80
+ if (cameraBtn) {
81
+ cameraBtn.addEventListener('click', startCameraCapture);
82
+ }
83
+
84
+ // Paste from clipboard
85
+ document.addEventListener('paste', handlePaste);
86
+ }
87
+
88
+ function preventDefaults(e) {
89
+ e.preventDefault();
90
+ e.stopPropagation();
91
+ }
92
+
93
+ function highlight(e) {
94
+ const dropZone = document.getElementById('photo-cad-drop-zone');
95
+ if (dropZone) dropZone.classList.add('highlight');
96
+ }
97
+
98
+ function unhighlight(e) {
99
+ const dropZone = document.getElementById('photo-cad-drop-zone');
100
+ if (dropZone) dropZone.classList.remove('highlight');
101
+ }
102
+
103
+ function handleDrop(e) {
104
+ const dt = e.dataTransfer;
105
+ const files = dt.files;
106
+ if (files.length > 0) {
107
+ handleFileSelect(files[0]);
108
+ }
109
+ }
110
+
111
+ function handleFileSelect(file) {
112
+ if (!file || !file.type.match('image.*')) {
113
+ alert('Please select an image file (JPEG, PNG, WebP)');
114
+ return;
115
+ }
116
+
117
+ const reader = new FileReader();
118
+ reader.onload = (e) => {
119
+ loadImage(e.target.result);
120
+ };
121
+ reader.readAsDataURL(file);
122
+ }
123
+
124
+ function handlePaste(e) {
125
+ const items = e.clipboardData?.items || [];
126
+ for (let item of items) {
127
+ if (item.type.match('image.*')) {
128
+ const blob = item.getAsFile();
129
+ const reader = new FileReader();
130
+ reader.onload = (ev) => loadImage(ev.target.result);
131
+ reader.readAsDataURL(blob);
132
+ break;
133
+ }
134
+ }
135
+ }
136
+
137
+ /**
138
+ * Load image and preprocess
139
+ * @param {string} dataUrl - Data URL of image
140
+ */
141
+ function loadImage(dataUrl) {
142
+ const img = new Image();
143
+ img.onload = () => {
144
+ // Resize to max 1024px while maintaining aspect ratio
145
+ const maxSize = 1024;
146
+ const scale = Math.min(1, maxSize / Math.max(img.width, img.height));
147
+ const width = Math.floor(img.width * scale);
148
+ const height = Math.floor(img.height * scale);
149
+
150
+ // Create canvas and draw image
151
+ state.canvas = document.createElement('canvas');
152
+ state.canvas.width = width;
153
+ state.canvas.height = height;
154
+ state.ctx = state.canvas.getContext('2d');
155
+ state.ctx.drawImage(img, 0, 0, width, height);
156
+
157
+ // Normalize brightness/contrast
158
+ normalizeImage();
159
+
160
+ state.originalImage = dataUrl;
161
+ state.processedImage = state.canvas.toDataURL('image/png');
162
+
163
+ // Update UI
164
+ updateImagePreview();
165
+
166
+ // Auto-detect edges
167
+ detectEdges();
168
+ };
169
+ img.src = dataUrl;
170
+ }
171
+
172
+ /**
173
+ * Normalize brightness and contrast
174
+ */
175
+ function normalizeImage() {
176
+ const imageData = state.ctx.getImageData(0, 0, state.canvas.width, state.canvas.height);
177
+ const data = imageData.data;
178
+
179
+ // Get brightness histogram
180
+ const histogram = new Array(256).fill(0);
181
+ for (let i = 0; i < data.length; i += 4) {
182
+ const brightness = (data[i] + data[i + 1] + data[i + 2]) / 3;
183
+ histogram[Math.floor(brightness)]++;
184
+ }
185
+
186
+ // Find min/max brightness
187
+ let minBright = 0, maxBright = 255;
188
+ for (let i = 0; i < 256; i++) {
189
+ if (histogram[i] > 0) {
190
+ minBright = i;
191
+ break;
192
+ }
193
+ }
194
+ for (let i = 255; i >= 0; i--) {
195
+ if (histogram[i] > 0) {
196
+ maxBright = i;
197
+ break;
198
+ }
199
+ }
200
+
201
+ // Stretch contrast
202
+ const brightnessRange = maxBright - minBright;
203
+ if (brightnessRange > 0) {
204
+ for (let i = 0; i < data.length; i += 4) {
205
+ const brightness = (data[i] + data[i + 1] + data[i + 2]) / 3;
206
+ const normalized = ((brightness - minBright) / brightnessRange) * 255;
207
+ data[i] = data[i + 1] = data[i + 2] = Math.floor(normalized);
208
+ }
209
+ }
210
+
211
+ state.ctx.putImageData(imageData, 0, 0);
212
+ }
213
+
214
+ /**
215
+ * Start camera capture via getUserMedia
216
+ */
217
+ async function startCameraCapture() {
218
+ try {
219
+ const stream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: 'environment' } });
220
+ const video = document.createElement('video');
221
+ video.srcObject = stream;
222
+ video.play();
223
+
224
+ // Create capture dialog
225
+ const dialog = document.createElement('div');
226
+ dialog.className = 'photo-cad-camera-dialog';
227
+ dialog.innerHTML = `
228
+ <div class="photo-cad-camera-modal">
229
+ <video id="photo-cad-preview" style="width:100%;max-width:100%;border-radius:8px;"></video>
230
+ <div style="display:flex;gap:10px;margin-top:15px;">
231
+ <button id="photo-cad-capture-btn" class="photo-cad-btn-primary">Capture</button>
232
+ <button id="photo-cad-cancel-capture-btn" class="photo-cad-btn-secondary">Cancel</button>
233
+ </div>
234
+ </div>
235
+ `;
236
+ document.body.appendChild(dialog);
237
+
238
+ const preview = document.getElementById('photo-cad-preview');
239
+ preview.srcObject = stream;
240
+
241
+ document.getElementById('photo-cad-capture-btn').onclick = () => {
242
+ const canvas = document.createElement('canvas');
243
+ canvas.width = video.videoWidth;
244
+ canvas.height = video.videoHeight;
245
+ const ctx = canvas.getContext('2d');
246
+ ctx.drawImage(video, 0, 0);
247
+ loadImage(canvas.toDataURL('image/jpeg'));
248
+ stream.getTracks().forEach(t => t.stop());
249
+ dialog.remove();
250
+ };
251
+
252
+ document.getElementById('photo-cad-cancel-capture-btn').onclick = () => {
253
+ stream.getTracks().forEach(t => t.stop());
254
+ dialog.remove();
255
+ };
256
+ } catch (err) {
257
+ console.error('Camera error:', err);
258
+ alert('Camera access denied or not available');
259
+ }
260
+ }
261
+
262
+ /**
263
+ * Update image preview in UI
264
+ */
265
+ function updateImagePreview() {
266
+ const preview = document.getElementById('photo-cad-image-preview');
267
+ if (preview) {
268
+ preview.innerHTML = `<img src="${state.processedImage}" style="max-width:100%;max-height:300px;border-radius:4px;">`;
269
+ }
270
+ }
271
+
272
+ // ============================================================================
273
+ // SECTION 2: EDGE DETECTION & CONTOUR EXTRACTION (~400 lines)
274
+ // ============================================================================
275
+
276
+ /**
277
+ * Detect edges using Sobel operator + Canny-style thinning
278
+ */
279
+ function detectEdges() {
280
+ if (!state.canvas) return;
281
+
282
+ const width = state.canvas.width;
283
+ const height = state.canvas.height;
284
+ const imageData = state.ctx.getImageData(0, 0, width, height);
285
+ const data = imageData.data;
286
+
287
+ // Convert to grayscale
288
+ const grayscale = new Uint8Array(width * height);
289
+ for (let i = 0; i < data.length; i += 4) {
290
+ grayscale[i / 4] = (data[i] + data[i + 1] + data[i + 2]) / 3;
291
+ }
292
+
293
+ // Sobel edge detection
294
+ const edges = sobelEdgeDetection(grayscale, width, height);
295
+
296
+ // Non-maximum suppression (edge thinning)
297
+ const thinned = nonMaximumSuppression(edges, width, height);
298
+
299
+ // Threshold based on sensitivity
300
+ const threshold = (1 - state.edgeSensitivity) * 255;
301
+ const binary = new Uint8Array(width * height);
302
+ for (let i = 0; i < thinned.length; i++) {
303
+ binary[i] = thinned[i] > threshold ? 255 : 0;
304
+ }
305
+
306
+ // Extract contours
307
+ extractContours(binary, width, height);
308
+
309
+ // Detect primitives
310
+ detectPrimitives();
311
+
312
+ // Update preview
313
+ displayDetectedEdges(binary, width, height);
314
+
315
+ // Send to AI if API key available
316
+ if (window.GEMINI_API_KEY) {
317
+ enhanceWithAI();
318
+ }
319
+ }
320
+
321
+ /**
322
+ * Sobel edge detection
323
+ * @param {Uint8Array} gray - Grayscale image
324
+ * @param {number} width
325
+ * @param {number} height
326
+ * @returns {Uint8Array} Edge magnitude
327
+ */
328
+ function sobelEdgeDetection(gray, width, height) {
329
+ const sobelX = [[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]];
330
+ const sobelY = [[-1, -2, -1], [0, 0, 0], [1, 2, 1]];
331
+ const edges = new Uint8Array(width * height);
332
+
333
+ for (let y = 1; y < height - 1; y++) {
334
+ for (let x = 1; x < width - 1; x++) {
335
+ let gx = 0, gy = 0;
336
+
337
+ for (let dy = -1; dy <= 1; dy++) {
338
+ for (let dx = -1; dx <= 1; dx++) {
339
+ const pixel = gray[(y + dy) * width + (x + dx)];
340
+ gx += sobelX[dy + 1][dx + 1] * pixel;
341
+ gy += sobelY[dy + 1][dx + 1] * pixel;
342
+ }
343
+ }
344
+
345
+ edges[y * width + x] = Math.sqrt(gx * gx + gy * gy);
346
+ }
347
+ }
348
+
349
+ return edges;
350
+ }
351
+
352
+ /**
353
+ * Non-maximum suppression for edge thinning
354
+ * @param {Uint8Array} edges
355
+ * @param {number} width
356
+ * @param {number} height
357
+ * @returns {Uint8Array} Thinned edges
358
+ */
359
+ function nonMaximumSuppression(edges, width, height) {
360
+ const thinned = new Uint8Array(width * height);
361
+
362
+ for (let y = 1; y < height - 1; y++) {
363
+ for (let x = 1; x < width - 1; x++) {
364
+ const idx = y * width + x;
365
+ const center = edges[idx];
366
+
367
+ // Check 8 neighbors
368
+ let isMaximum = true;
369
+ for (let dy = -1; dy <= 1; dy++) {
370
+ for (let dx = -1; dx <= 1; dx++) {
371
+ if (dx === 0 && dy === 0) continue;
372
+ if (edges[(y + dy) * width + (x + dx)] > center) {
373
+ isMaximum = false;
374
+ break;
375
+ }
376
+ }
377
+ if (!isMaximum) break;
378
+ }
379
+
380
+ thinned[idx] = isMaximum ? center : 0;
381
+ }
382
+ }
383
+
384
+ return thinned;
385
+ }
386
+
387
+ /**
388
+ * Extract contours using Moore-Neighbor tracing
389
+ * @param {Uint8Array} binary - Binary edge image
390
+ * @param {number} width
391
+ * @param {number} height
392
+ */
393
+ function extractContours(binary, width, height) {
394
+ const visited = new Uint8Array(width * height);
395
+ const contours = [];
396
+
397
+ for (let y = 0; y < height; y++) {
398
+ for (let x = 0; x < width; x++) {
399
+ const idx = y * width + x;
400
+ if (binary[idx] > 128 && !visited[idx]) {
401
+ const contour = traceContour(binary, visited, x, y, width, height);
402
+ if (contour.length > 4) {
403
+ contours.push(contour);
404
+ }
405
+ }
406
+ }
407
+ }
408
+
409
+ state.detectedFeatures.contours = contours;
410
+ }
411
+
412
+ /**
413
+ * Trace a single contour using Moore-Neighbor algorithm
414
+ * @param {Uint8Array} binary
415
+ * @param {Uint8Array} visited
416
+ * @param {number} startX
417
+ * @param {number} startY
418
+ * @param {number} width
419
+ * @param {number} height
420
+ * @returns {Array<{x, y}>} Contour points
421
+ */
422
+ function traceContour(binary, visited, startX, startY, width, height) {
423
+ const contour = [];
424
+ let x = startX, y = startY;
425
+ let dir = 0; // 0=right, 1=down-right, 2=down, etc.
426
+ const dirs = [[1,0], [1,1], [0,1], [-1,1], [-1,0], [-1,-1], [0,-1], [1,-1]];
427
+
428
+ const maxIterations = width * height;
429
+ let iterations = 0;
430
+
431
+ do {
432
+ visited[y * width + x] = 1;
433
+ contour.push({x, y});
434
+
435
+ // Find next edge pixel
436
+ let found = false;
437
+ for (let i = 0; i < 8; i++) {
438
+ const nextDir = (dir + i) % 8;
439
+ const nx = x + dirs[nextDir][0];
440
+ const ny = y + dirs[nextDir][1];
441
+
442
+ if (nx >= 0 && nx < width && ny >= 0 && ny < height) {
443
+ if (binary[ny * width + nx] > 128) {
444
+ x = nx;
445
+ y = ny;
446
+ dir = nextDir;
447
+ found = true;
448
+ break;
449
+ }
450
+ }
451
+ }
452
+
453
+ if (!found) break;
454
+ } while ((x !== startX || y !== startY) && iterations++ < maxIterations);
455
+
456
+ return contour;
457
+ }
458
+
459
+ /**
460
+ * Detect primitives: circles, lines, rectangles, arcs
461
+ */
462
+ function detectPrimitives() {
463
+ const contours = state.detectedFeatures.contours;
464
+ state.detectedFeatures.circles = [];
465
+ state.detectedFeatures.lines = [];
466
+ state.detectedFeatures.rectangles = [];
467
+ state.detectedFeatures.arcs = [];
468
+
469
+ contours.forEach((contour, idx) => {
470
+ // Detect if contour is a circle
471
+ const circle = detectCircle(contour);
472
+ if (circle && circle.confidence > 0.7) {
473
+ state.detectedFeatures.circles.push({...circle, id: `circle-${idx}`, contourIdx: idx});
474
+ }
475
+
476
+ // Detect if contour is a rectangle
477
+ const rect = detectRectangle(contour);
478
+ if (rect && rect.confidence > 0.7) {
479
+ state.detectedFeatures.rectangles.push({...rect, id: `rect-${idx}`, contourIdx: idx});
480
+ }
481
+
482
+ // Fit line segments
483
+ const lines = detectLineSegments(contour);
484
+ if (lines.length > 0) {
485
+ state.detectedFeatures.lines.push(...lines.map((l, i) => ({...l, id: `line-${idx}-${i}`, contourIdx: idx})));
486
+ }
487
+ });
488
+ }
489
+
490
+ /**
491
+ * Detect circle in contour using Hough circle transform (simplified)
492
+ * @param {Array<{x, y}>} contour
493
+ * @returns {{x, y, radius, confidence} | null}
494
+ */
495
+ function detectCircle(contour) {
496
+ if (contour.length < 8) return null;
497
+
498
+ // Fit circle using least squares
499
+ let sumX = 0, sumY = 0, sumX2 = 0, sumY2 = 0, sumXY = 0, sumX3 = 0, sumY3 = 0, sumX2Y = 0, sumXY2 = 0;
500
+ const n = contour.length;
501
+
502
+ contour.forEach(p => {
503
+ sumX += p.x;
504
+ sumY += p.y;
505
+ sumX2 += p.x * p.x;
506
+ sumY2 += p.y * p.y;
507
+ sumXY += p.x * p.y;
508
+ sumX3 += p.x * p.x * p.x;
509
+ sumY3 += p.y * p.y * p.y;
510
+ sumX2Y += p.x * p.x * p.y;
511
+ sumXY2 += p.x * p.y * p.y;
512
+ });
513
+
514
+ const A = n * sumX2 - sumX * sumX;
515
+ const B = n * sumXY - sumX * sumY;
516
+ const C = n * sumY2 - sumY * sumY;
517
+ const D = 0.5 * (n * (sumX3 + sumXY2) - sumX * (sumX2 + sumY2));
518
+ const E = 0.5 * (n * (sumX2Y + sumY3) - sumY * (sumX2 + sumY2));
519
+
520
+ const denom = A * C - B * B;
521
+ if (Math.abs(denom) < 1e-10) return null;
522
+
523
+ const cx = (D * C - B * E) / denom;
524
+ const cy = (A * E - B * D) / denom;
525
+
526
+ // Calculate radius and confidence
527
+ let radiusSum = 0;
528
+ contour.forEach(p => {
529
+ radiusSum += Math.sqrt((p.x - cx) ** 2 + (p.y - cy) ** 2);
530
+ });
531
+ const radius = radiusSum / n;
532
+
533
+ // Confidence based on deviation
534
+ let deviation = 0;
535
+ contour.forEach(p => {
536
+ const r = Math.sqrt((p.x - cx) ** 2 + (p.y - cy) ** 2);
537
+ deviation += Math.abs(r - radius);
538
+ });
539
+ const avgDeviation = deviation / n;
540
+ const confidence = Math.max(0, 1 - (avgDeviation / radius));
541
+
542
+ return {x: cx, y: cy, radius, confidence};
543
+ }
544
+
545
+ /**
546
+ * Detect rectangle in contour
547
+ * @param {Array<{x, y}>} contour
548
+ * @returns {{x, y, width, height, angle, confidence} | null}
549
+ */
550
+ function detectRectangle(contour) {
551
+ if (contour.length < 8) return null;
552
+
553
+ // Get convex hull corners
554
+ const hull = convexHull(contour);
555
+ if (hull.length < 4) return null;
556
+
557
+ // Find if 4 corners form rectangle
558
+ const corners = hull.slice(0, Math.min(4, hull.length));
559
+ if (corners.length !== 4) return null;
560
+
561
+ // Check if corners form rectangle (angles close to 90 degrees)
562
+ let sumAngle = 0;
563
+ for (let i = 0; i < 4; i++) {
564
+ const p0 = corners[i];
565
+ const p1 = corners[(i + 1) % 4];
566
+ const p2 = corners[(i + 2) % 4];
567
+
568
+ const v1 = {x: p1.x - p0.x, y: p1.y - p0.y};
569
+ const v2 = {x: p2.x - p1.x, y: p2.y - p1.y};
570
+ const dot = v1.x * v2.x + v1.y * v2.y;
571
+ const angle = Math.acos(dot / (Math.hypot(v1.x, v1.y) * Math.hypot(v2.x, v2.y)));
572
+ sumAngle += Math.abs(angle - Math.PI / 2);
573
+ }
574
+
575
+ const angleDeviation = sumAngle / 4;
576
+ const confidence = Math.max(0, 1 - angleDeviation);
577
+
578
+ if (confidence > 0.7) {
579
+ const minX = Math.min(...corners.map(p => p.x));
580
+ const maxX = Math.max(...corners.map(p => p.x));
581
+ const minY = Math.min(...corners.map(p => p.y));
582
+ const maxY = Math.max(...corners.map(p => p.y));
583
+
584
+ return {
585
+ x: minX,
586
+ y: minY,
587
+ width: maxX - minX,
588
+ height: maxY - minY,
589
+ angle: 0,
590
+ confidence
591
+ };
592
+ }
593
+
594
+ return null;
595
+ }
596
+
597
+ /**
598
+ * Detect line segments in contour
599
+ * @param {Array<{x, y}>} contour
600
+ * @returns {Array<{x1, y1, x2, y2}>}
601
+ */
602
+ function detectLineSegments(contour) {
603
+ const lines = [];
604
+ if (contour.length < 4) return lines;
605
+
606
+ // Simple line fitting using endpoints and key points
607
+ const first = contour[0];
608
+ const last = contour[contour.length - 1];
609
+
610
+ // Fit line through contour
611
+ let sumX = 0, sumY = 0, sumX2 = 0, sumXY = 0;
612
+ contour.forEach(p => {
613
+ sumX += p.x;
614
+ sumY += p.y;
615
+ sumX2 += p.x * p.x;
616
+ sumXY += p.x * p.y;
617
+ });
618
+
619
+ const n = contour.length;
620
+ const slope = (n * sumXY - sumX * sumY) / (n * sumX2 - sumX * sumX);
621
+ const intercept = (sumY - slope * sumX) / n;
622
+
623
+ lines.push({
624
+ x1: first.x,
625
+ y1: slope * first.x + intercept,
626
+ x2: last.x,
627
+ y2: slope * last.x + intercept
628
+ });
629
+
630
+ return lines;
631
+ }
632
+
633
+ /**
634
+ * Simple convex hull using Graham scan
635
+ * @param {Array<{x, y}>} points
636
+ * @returns {Array<{x, y}>} Hull points
637
+ */
638
+ function convexHull(points) {
639
+ if (points.length <= 3) return points;
640
+
641
+ // Find starting point (lowest y, then leftmost x)
642
+ let start = 0;
643
+ for (let i = 1; i < points.length; i++) {
644
+ if (points[i].y < points[start].y ||
645
+ (points[i].y === points[start].y && points[i].x < points[start].x)) {
646
+ start = i;
647
+ }
648
+ }
649
+
650
+ const sorted = points.slice().sort((a, b) => {
651
+ const angle1 = Math.atan2(a.y - points[start].y, a.x - points[start].x);
652
+ const angle2 = Math.atan2(b.y - points[start].y, b.x - points[start].x);
653
+ return angle1 - angle2;
654
+ });
655
+
656
+ const hull = [];
657
+ for (const p of sorted) {
658
+ while (hull.length >= 2) {
659
+ const o = hull[hull.length - 2];
660
+ const a = hull[hull.length - 1];
661
+ const cross = (a.x - o.x) * (p.y - o.y) - (a.y - o.y) * (p.x - o.x);
662
+ if (cross <= 0) break;
663
+ hull.pop();
664
+ }
665
+ hull.push(p);
666
+ }
667
+
668
+ return hull;
669
+ }
670
+
671
+ /**
672
+ * Display detected edges on canvas
673
+ * @param {Uint8Array} binary
674
+ * @param {number} width
675
+ * @param {number} height
676
+ */
677
+ function displayDetectedEdges(binary, width, height) {
678
+ const canvas = document.getElementById('photo-cad-edges-canvas');
679
+ if (!canvas) return;
680
+
681
+ canvas.width = width;
682
+ canvas.height = height;
683
+ const ctx = canvas.getContext('2d');
684
+ const imageData = ctx.createImageData(width, height);
685
+ const data = imageData.data;
686
+
687
+ // White background, black edges
688
+ for (let i = 0; i < binary.length; i++) {
689
+ const idx = i * 4;
690
+ const val = binary[i];
691
+ data[idx] = data[idx + 1] = data[idx + 2] = val;
692
+ data[idx + 3] = 255;
693
+ }
694
+
695
+ ctx.putImageData(imageData, 0, 0);
696
+
697
+ // Draw detected features
698
+ ctx.strokeStyle = '#00FF00';
699
+ ctx.lineWidth = 2;
700
+
701
+ state.detectedFeatures.circles.forEach(circle => {
702
+ ctx.beginPath();
703
+ ctx.arc(circle.x, circle.y, circle.radius, 0, Math.PI * 2);
704
+ ctx.stroke();
705
+ });
706
+
707
+ ctx.strokeStyle = '#FF0000';
708
+ state.detectedFeatures.rectangles.forEach(rect => {
709
+ ctx.strokeRect(rect.x, rect.y, rect.width, rect.height);
710
+ });
711
+
712
+ ctx.strokeStyle = '#0000FF';
713
+ state.detectedFeatures.lines.forEach(line => {
714
+ ctx.beginPath();
715
+ ctx.moveTo(line.x1, line.y1);
716
+ ctx.lineTo(line.x2, line.y2);
717
+ ctx.stroke();
718
+ });
719
+ }
720
+
721
+ // ============================================================================
722
+ // SECTION 3: GEOMETRY RECONSTRUCTION (~400 lines)
723
+ // ============================================================================
724
+
725
+ /**
726
+ * Reconstruct 3D geometry from detected features
727
+ */
728
+ function reconstruct3D() {
729
+ const features = Array.from(state.selectedFeatures);
730
+ if (features.length === 0) {
731
+ alert('Please select features to reconstruct');
732
+ return;
733
+ }
734
+
735
+ // Create base geometry
736
+ let baseGeometry = null;
737
+ const baseFeature = features.find(f => f.startsWith('rect-'));
738
+
739
+ if (baseFeature) {
740
+ const rect = state.detectedFeatures.rectangles.find(r => r.id === baseFeature);
741
+ if (rect) {
742
+ baseGeometry = createBox(rect.width, rect.height, 20);
743
+ }
744
+ } else {
745
+ const circle = state.detectedFeatures.circles[0];
746
+ if (circle) {
747
+ baseGeometry = createCylinder(circle.radius, circle.radius * 0.5, 20);
748
+ }
749
+ }
750
+
751
+ if (!baseGeometry) {
752
+ baseGeometry = createBox(100, 100, 20);
753
+ }
754
+
755
+ // Apply features
756
+ features.forEach(featureId => {
757
+ if (featureId.startsWith('circle-')) {
758
+ const circle = state.detectedFeatures.circles.find(c => c.id === featureId);
759
+ if (circle) {
760
+ // Add hole
761
+ const hole = createCylinder(circle.radius, 30, 16);
762
+ // Boolean union would go here (stub for now)
763
+ }
764
+ }
765
+ });
766
+
767
+ // Update 3D preview
768
+ update3DPreview(baseGeometry);
769
+ }
770
+
771
+ /**
772
+ * Create box geometry
773
+ * @param {number} width
774
+ * @param {number} height
775
+ * @param {number} depth
776
+ * @returns {THREE.BufferGeometry}
777
+ */
778
+ function createBox(width, height, depth) {
779
+ return new THREE.BoxGeometry(width, height, depth);
780
+ }
781
+
782
+ /**
783
+ * Create cylinder geometry
784
+ * @param {number} radius
785
+ * @param {number} height
786
+ * @param {number} segments
787
+ * @returns {THREE.BufferGeometry}
788
+ */
789
+ function createCylinder(radius, height, segments = 16) {
790
+ return new THREE.CylinderGeometry(radius, radius, height, segments);
791
+ }
792
+
793
+ /**
794
+ * Create revolved geometry from profile
795
+ * @param {Array<{x, y}>} profile - 2D profile curve
796
+ * @returns {THREE.BufferGeometry}
797
+ */
798
+ function createRevolved(profile) {
799
+ const curve = new THREE.LineCurve3(
800
+ new THREE.Vector3(-profile[0].x, profile[0].y, 0),
801
+ new THREE.Vector3(-profile[profile.length - 1].x, profile[profile.length - 1].y, 0)
802
+ );
803
+
804
+ const geometry = new THREE.LatheGeometry(
805
+ profile.map(p => new THREE.Vector3(p.x, p.y, 0)),
806
+ 16
807
+ );
808
+
809
+ return geometry;
810
+ }
811
+
812
+ /**
813
+ * Update 3D preview
814
+ * @param {THREE.BufferGeometry} geometry
815
+ */
816
+ function update3DPreview(geometry) {
817
+ const preview = document.getElementById('photo-cad-3d-preview');
818
+ if (!preview) return;
819
+
820
+ // Clear existing
821
+ if (state.threeDRenderer) {
822
+ preview.innerHTML = '';
823
+ }
824
+
825
+ // Create scene
826
+ const width = preview.clientWidth;
827
+ const height = preview.clientHeight;
828
+
829
+ state.threeDScene = new THREE.Scene();
830
+ state.threeDScene.background = new THREE.Color(0x1a1a1a);
831
+
832
+ state.threeDRenderer = new THREE.WebGLRenderer({antialias: true});
833
+ state.threeDRenderer.setSize(width, height);
834
+ state.threeDRenderer.setPixelRatio(window.devicePixelRatio);
835
+ preview.appendChild(state.threeDRenderer.domElement);
836
+
837
+ // Camera
838
+ const camera = new THREE.PerspectiveCamera(75, width / height, 0.1, 1000);
839
+ camera.position.set(150, 100, 150);
840
+ camera.lookAt(0, 0, 0);
841
+
842
+ // Lighting
843
+ const light1 = new THREE.DirectionalLight(0xffffff, 0.8);
844
+ light1.position.set(1, 1, 1);
845
+ state.threeDScene.add(light1);
846
+
847
+ const light2 = new THREE.AmbientLight(0x666666);
848
+ state.threeDScene.add(light2);
849
+
850
+ // Add geometry
851
+ const material = new THREE.MeshPhongMaterial({color: 0x4a90e2});
852
+ const mesh = new THREE.Mesh(geometry, material);
853
+ state.threeDScene.add(mesh);
854
+
855
+ // Edges
856
+ const edges = new THREE.EdgesGeometry(geometry);
857
+ const line = new THREE.LineSegments(edges, new THREE.LineBasicMaterial({color: 0xffffff}));
858
+ state.threeDScene.add(line);
859
+
860
+ // Render
861
+ const animate = () => {
862
+ requestAnimationFrame(animate);
863
+ mesh.rotation.x += 0.005;
864
+ mesh.rotation.y += 0.008;
865
+ state.threeDRenderer.render(state.threeDScene, camera);
866
+ };
867
+ animate();
868
+
869
+ // Handle resize
870
+ window.addEventListener('resize', () => {
871
+ const w = preview.clientWidth;
872
+ const h = preview.clientHeight;
873
+ camera.aspect = w / h;
874
+ camera.updateProjectionMatrix();
875
+ state.threeDRenderer.setSize(w, h);
876
+ });
877
+ }
878
+
879
+ /**
880
+ * Detect symmetry in contour for revolve operations
881
+ * @param {Array<{x, y}>} contour
882
+ * @returns {boolean} True if symmetric
883
+ */
884
+ function detectSymmetry(contour) {
885
+ if (contour.length < 4) return false;
886
+
887
+ const mid = Math.floor(contour.length / 2);
888
+ let symmetricCount = 0;
889
+
890
+ for (let i = 0; i < mid; i++) {
891
+ const p1 = contour[i];
892
+ const p2 = contour[contour.length - 1 - i];
893
+ const dist = Math.hypot(p1.x - p2.x, p1.y - p2.y);
894
+ if (dist < 10) symmetricCount++;
895
+ }
896
+
897
+ return (symmetricCount / mid) > 0.7;
898
+ }
899
+
900
+ // ============================================================================
901
+ // SECTION 4: INTERACTIVE REFINEMENT (~300 lines)
902
+ // ============================================================================
903
+
904
+ /**
905
+ * Toggle feature selection
906
+ * @param {string} featureId
907
+ */
908
+ function toggleFeature(featureId) {
909
+ if (state.selectedFeatures.has(featureId)) {
910
+ state.selectedFeatures.delete(featureId);
911
+ } else {
912
+ state.selectedFeatures.add(featureId);
913
+ }
914
+ updateFeatureList();
915
+ }
916
+
917
+ /**
918
+ * Update feature list UI
919
+ */
920
+ function updateFeatureList() {
921
+ const list = document.getElementById('photo-cad-feature-list');
922
+ if (!list) return;
923
+
924
+ list.innerHTML = '';
925
+
926
+ const allFeatures = [
927
+ ...state.detectedFeatures.circles.map(c => ({...c, type: 'circle'})),
928
+ ...state.detectedFeatures.rectangles.map(r => ({...r, type: 'rectangle'})),
929
+ ...state.detectedFeatures.lines.map(l => ({...l, type: 'line'}))
930
+ ];
931
+
932
+ allFeatures.forEach(feature => {
933
+ const item = document.createElement('div');
934
+ item.className = 'photo-cad-feature-item';
935
+ item.innerHTML = `
936
+ <input type="checkbox" id="feat-${feature.id}" ${state.selectedFeatures.has(feature.id) ? 'checked' : ''}>
937
+ <label for="feat-${feature.id}" class="photo-cad-feature-label">
938
+ <span class="photo-cad-feature-type">${feature.type}</span>
939
+ ${feature.type === 'circle' ? `r=${Math.round(feature.radius)}px` : ''}
940
+ ${feature.type === 'rectangle' ? `${Math.round(feature.width)}×${Math.round(feature.height)}px` : ''}
941
+ </label>
942
+ `;
943
+ item.querySelector('input').addEventListener('change', () => toggleFeature(feature.id));
944
+ list.appendChild(item);
945
+ });
946
+ }
947
+
948
+ /**
949
+ * Set reference dimension
950
+ * Point 1 and point 2 are clicked in canvas, then real dimension entered
951
+ */
952
+ function setReferenceDimension() {
953
+ const pixelInput = document.getElementById('photo-cad-ref-pixels');
954
+ const mmInput = document.getElementById('photo-cad-ref-mm');
955
+
956
+ if (!pixelInput || !mmInput) return;
957
+
958
+ const pixels = parseFloat(pixelInput.value);
959
+ const mm = parseFloat(mmInput.value);
960
+
961
+ if (!isNaN(pixels) && !isNaN(mm) && pixels > 0 && mm > 0) {
962
+ state.referenceDimension = {pixels, mm, scale: mm / pixels};
963
+ alert(`Scale set: 1 pixel = ${(state.referenceDimension.scale).toFixed(4)} mm`);
964
+ } else {
965
+ alert('Please enter valid pixel and mm values');
966
+ }
967
+ }
968
+
969
+ /**
970
+ * Adjust edge detection sensitivity
971
+ * @param {number} value 0-1
972
+ */
973
+ function setEdgeSensitivity(value) {
974
+ state.edgeSensitivity = value;
975
+ detectEdges();
976
+ }
977
+
978
+ // ============================================================================
979
+ // SECTION 5: AI ENHANCEMENT (~200 lines)
980
+ // ============================================================================
981
+
982
+ /**
983
+ * Enhance detection with Gemini Flash Vision API
984
+ */
985
+ async function enhanceWithAI() {
986
+ if (!state.originalImage || !window.GEMINI_API_KEY) return;
987
+
988
+ try {
989
+ const response = await fetch('https://generativelanguage.googleapis.com/v1/models/gemini-2.0-flash:generateContent?key=' + window.GEMINI_API_KEY, {
990
+ method: 'POST',
991
+ headers: {
992
+ 'Content-Type': 'application/json',
993
+ },
994
+ body: JSON.stringify({
995
+ contents: [{
996
+ parts: [
997
+ {
998
+ text: 'Analyze this CAD part image. Identify: 1) Part type (e.g., "cylindrical shaft", "rectangular plate"), 2) Estimated dimensions (width, height, depth in relative units), 3) Key features (holes, fillets, threads, etc.), 4) Likely material (metal, plastic, composite). Respond in JSON format: {partType, estimatedDimensions: {w, h, d}, features: [], material}'
999
+ },
1000
+ {
1001
+ inlineData: {
1002
+ mimeType: 'image/png',
1003
+ data: state.originalImage.split(',')[1]
1004
+ }
1005
+ }
1006
+ ]
1007
+ }]
1008
+ })
1009
+ });
1010
+
1011
+ const data = await response.json();
1012
+ const text = data.candidates[0]?.content.parts[0]?.text || '';
1013
+
1014
+ try {
1015
+ const jsonMatch = text.match(/\{[\s\S]*\}/);
1016
+ if (jsonMatch) {
1017
+ state.aiMetadata = JSON.parse(jsonMatch[0]);
1018
+ updateAIMetadata();
1019
+ }
1020
+ } catch (e) {
1021
+ console.log('AI metadata parsing failed:', e);
1022
+ }
1023
+ } catch (err) {
1024
+ console.error('AI enhancement error:', err);
1025
+ }
1026
+ }
1027
+
1028
+ /**
1029
+ * Update UI with AI metadata
1030
+ */
1031
+ function updateAIMetadata() {
1032
+ const panel = document.getElementById('photo-cad-ai-metadata');
1033
+ if (!panel || !state.aiMetadata) return;
1034
+
1035
+ panel.innerHTML = `
1036
+ <div class="photo-cad-ai-section">
1037
+ <strong>Part Type:</strong> ${state.aiMetadata.partType || 'Unknown'}<br>
1038
+ <strong>Material:</strong> ${state.aiMetadata.material || 'Unknown'}<br>
1039
+ <strong>Features:</strong> ${(state.aiMetadata.features || []).join(', ') || 'None detected'}
1040
+ </div>
1041
+ `;
1042
+ }
1043
+
1044
+ // ============================================================================
1045
+ // SECTION 6: UI PANEL (~200 lines)
1046
+ // ============================================================================
1047
+
1048
+ /**
1049
+ * Initialize module
1050
+ */
1051
+ function init() {
1052
+ initImageInput();
1053
+ setupUIEventListeners();
1054
+ }
1055
+
1056
+ /**
1057
+ * Setup event listeners
1058
+ */
1059
+ function setupUIEventListeners() {
1060
+ const sensitivitySlider = document.getElementById('photo-cad-sensitivity');
1061
+ if (sensitivitySlider) {
1062
+ sensitivitySlider.addEventListener('input', (e) => setEdgeSensitivity(parseFloat(e.target.value)));
1063
+ }
1064
+
1065
+ const refBtn = document.getElementById('photo-cad-set-ref-btn');
1066
+ if (refBtn) {
1067
+ refBtn.addEventListener('click', setReferenceDimension);
1068
+ }
1069
+
1070
+ const reconstructBtn = document.getElementById('photo-cad-reconstruct-btn');
1071
+ if (reconstructBtn) {
1072
+ reconstructBtn.addEventListener('click', reconstruct3D);
1073
+ }
1074
+
1075
+ const exportBtn = document.getElementById('photo-cad-export-btn');
1076
+ if (exportBtn) {
1077
+ exportBtn.addEventListener('click', exportModel);
1078
+ }
1079
+ }
1080
+
1081
+ /**
1082
+ * Get UI panel HTML
1083
+ * @returns {HTMLElement}
1084
+ */
1085
+ function getUI() {
1086
+ const panel = document.createElement('div');
1087
+ panel.className = 'photo-cad-panel';
1088
+ panel.innerHTML = `
1089
+ <div class="photo-cad-header">
1090
+ <h3>Photo-to-CAD</h3>
1091
+ <p style="font-size:0.85rem;color:var(--text-secondary);">Convert photos to 3D models</p>
1092
+ </div>
1093
+
1094
+ <div class="photo-cad-section">
1095
+ <h4>1. Upload Image</h4>
1096
+ <div id="photo-cad-drop-zone" class="photo-cad-drop-zone">
1097
+ <p>Drag image or click to browse</p>
1098
+ <input id="photo-cad-file-input" type="file" accept="image/*" style="display:none;">
1099
+ </div>
1100
+ <button id="photo-cad-camera-btn" class="photo-cad-btn-secondary" style="width:100%;margin-top:10px;">
1101
+ 📷 Camera Capture
1102
+ </button>
1103
+ </div>
1104
+
1105
+ <div class="photo-cad-section">
1106
+ <h4>2. Edge Detection</h4>
1107
+ <label>Sensitivity:
1108
+ <input id="photo-cad-sensitivity" type="range" min="0" max="1" step="0.1" value="0.5" style="width:100%;">
1109
+ </label>
1110
+ <div id="photo-cad-image-preview" style="margin-top:10px;"></div>
1111
+ <canvas id="photo-cad-edges-canvas" style="width:100%;max-width:100%;margin-top:10px;border-radius:4px;display:none;"></canvas>
1112
+ </div>
1113
+
1114
+ <div class="photo-cad-section">
1115
+ <h4>3. Detected Features</h4>
1116
+ <div id="photo-cad-feature-list" style="max-height:200px;overflow-y:auto;border:1px solid var(--border);padding:8px;border-radius:4px;"></div>
1117
+ </div>
1118
+
1119
+ <div class="photo-cad-section">
1120
+ <h4>4. Reference Dimension</h4>
1121
+ <div style="display:grid;grid-template-columns:1fr 1fr;gap:8px;">
1122
+ <div>
1123
+ <label>Pixels:</label>
1124
+ <input id="photo-cad-ref-pixels" type="number" placeholder="0" style="width:100%;padding:6px;border:1px solid var(--border);border-radius:4px;">
1125
+ </div>
1126
+ <div>
1127
+ <label>mm:</label>
1128
+ <input id="photo-cad-ref-mm" type="number" placeholder="0" style="width:100%;padding:6px;border:1px solid var(--border);border-radius:4px;">
1129
+ </div>
1130
+ </div>
1131
+ <button id="photo-cad-set-ref-btn" class="photo-cad-btn-secondary" style="width:100%;margin-top:8px;">Set Scale</button>
1132
+ </div>
1133
+
1134
+ <div class="photo-cad-section">
1135
+ <h4>5. AI Analysis</h4>
1136
+ <div id="photo-cad-ai-metadata" style="font-size:0.9rem;padding:8px;background:var(--bg-secondary);border-radius:4px;"></div>
1137
+ </div>
1138
+
1139
+ <div class="photo-cad-section">
1140
+ <h4>3D Preview</h4>
1141
+ <div id="photo-cad-3d-preview" style="width:100%;height:300px;border:1px solid var(--border);border-radius:4px;background:var(--bg-secondary);"></div>
1142
+ </div>
1143
+
1144
+ <div class="photo-cad-section">
1145
+ <button id="photo-cad-reconstruct-btn" class="photo-cad-btn-primary" style="width:100%;">Reconstruct 3D</button>
1146
+ <button id="photo-cad-export-btn" class="photo-cad-btn-secondary" style="width:100%;margin-top:8px;">Export Model</button>
1147
+ </div>
1148
+ `;
1149
+
1150
+ // Add styles
1151
+ const style = document.createElement('style');
1152
+ style.textContent = `
1153
+ .photo-cad-panel {
1154
+ padding: 15px;
1155
+ font-size: 0.9rem;
1156
+ }
1157
+
1158
+ .photo-cad-header {
1159
+ border-bottom: 1px solid var(--border);
1160
+ padding-bottom: 10px;
1161
+ margin-bottom: 15px;
1162
+ }
1163
+
1164
+ .photo-cad-header h3 {
1165
+ margin: 0 0 5px 0;
1166
+ font-size: 1.1rem;
1167
+ }
1168
+
1169
+ .photo-cad-section {
1170
+ margin-bottom: 15px;
1171
+ }
1172
+
1173
+ .photo-cad-section h4 {
1174
+ margin: 0 0 10px 0;
1175
+ font-size: 0.95rem;
1176
+ font-weight: 600;
1177
+ }
1178
+
1179
+ .photo-cad-drop-zone {
1180
+ border: 2px dashed var(--border);
1181
+ border-radius: 8px;
1182
+ padding: 30px;
1183
+ text-align: center;
1184
+ cursor: pointer;
1185
+ transition: all 0.2s;
1186
+ background: var(--bg-secondary);
1187
+ }
1188
+
1189
+ .photo-cad-drop-zone:hover {
1190
+ border-color: var(--accent);
1191
+ background: var(--bg-secondary);
1192
+ }
1193
+
1194
+ .photo-cad-drop-zone.highlight {
1195
+ border-color: var(--accent);
1196
+ background: rgba(74, 144, 226, 0.1);
1197
+ }
1198
+
1199
+ .photo-cad-feature-item {
1200
+ display: flex;
1201
+ align-items: center;
1202
+ padding: 6px 0;
1203
+ border-bottom: 1px solid var(--border);
1204
+ }
1205
+
1206
+ .photo-cad-feature-item:last-child {
1207
+ border-bottom: none;
1208
+ }
1209
+
1210
+ .photo-cad-feature-item input {
1211
+ margin-right: 8px;
1212
+ }
1213
+
1214
+ .photo-cad-feature-label {
1215
+ display: flex;
1216
+ align-items: center;
1217
+ gap: 8px;
1218
+ flex: 1;
1219
+ cursor: pointer;
1220
+ }
1221
+
1222
+ .photo-cad-feature-type {
1223
+ display: inline-block;
1224
+ padding: 2px 6px;
1225
+ background: var(--accent);
1226
+ color: white;
1227
+ border-radius: 3px;
1228
+ font-size: 0.75rem;
1229
+ font-weight: 600;
1230
+ }
1231
+
1232
+ .photo-cad-btn-primary {
1233
+ width: 100%;
1234
+ padding: 10px;
1235
+ background: var(--accent);
1236
+ color: white;
1237
+ border: none;
1238
+ border-radius: 4px;
1239
+ cursor: pointer;
1240
+ font-weight: 600;
1241
+ transition: all 0.2s;
1242
+ }
1243
+
1244
+ .photo-cad-btn-primary:hover {
1245
+ opacity: 0.9;
1246
+ }
1247
+
1248
+ .photo-cad-btn-secondary {
1249
+ width: 100%;
1250
+ padding: 8px;
1251
+ background: var(--bg-secondary);
1252
+ color: var(--text-primary);
1253
+ border: 1px solid var(--border);
1254
+ border-radius: 4px;
1255
+ cursor: pointer;
1256
+ transition: all 0.2s;
1257
+ }
1258
+
1259
+ .photo-cad-btn-secondary:hover {
1260
+ border-color: var(--accent);
1261
+ }
1262
+
1263
+ .photo-cad-ai-section {
1264
+ line-height: 1.5;
1265
+ }
1266
+ `;
1267
+ document.head.appendChild(style);
1268
+
1269
+ return panel;
1270
+ }
1271
+
1272
+ /**
1273
+ * Execute command
1274
+ * @param {string} command
1275
+ * @param {*} params
1276
+ */
1277
+ function execute(command, params) {
1278
+ switch (command) {
1279
+ case 'processImage':
1280
+ loadImage(params.dataUrl);
1281
+ break;
1282
+ case 'detectEdges':
1283
+ detectEdges();
1284
+ break;
1285
+ case 'toggleFeature':
1286
+ toggleFeature(params.featureId);
1287
+ break;
1288
+ case 'reconstruct':
1289
+ reconstruct3D();
1290
+ break;
1291
+ case 'setReference':
1292
+ state.referenceDimension = params;
1293
+ break;
1294
+ case 'exportModel':
1295
+ exportModel();
1296
+ break;
1297
+ default:
1298
+ console.warn('Unknown command:', command);
1299
+ }
1300
+ }
1301
+
1302
+ /**
1303
+ * Export model as JSON or STL
1304
+ */
1305
+ function exportModel() {
1306
+ if (!state.threeDScene) {
1307
+ alert('Please reconstruct 3D model first');
1308
+ return;
1309
+ }
1310
+
1311
+ // Export as glTF
1312
+ const data = JSON.stringify({
1313
+ metadata: state.aiMetadata,
1314
+ referenceDimension: state.referenceDimension,
1315
+ detectedFeatures: state.detectedFeatures,
1316
+ selectedFeatures: Array.from(state.selectedFeatures)
1317
+ });
1318
+
1319
+ const blob = new Blob([data], {type: 'application/json'});
1320
+ const url = URL.createObjectURL(blob);
1321
+ const a = document.createElement('a');
1322
+ a.href = url;
1323
+ a.download = 'model-data.json';
1324
+ a.click();
1325
+ URL.revokeObjectURL(url);
1326
+ }
1327
+
1328
+ // ============================================================================
1329
+ // PUBLIC API
1330
+ // ============================================================================
1331
+
1332
+ window.CycleCAD = window.CycleCAD || {};
1333
+ window.CycleCAD.PhotoToCAD = {
1334
+ init,
1335
+ getUI,
1336
+ execute,
1337
+ processImage: loadImage,
1338
+ detectEdges,
1339
+ reconstruct3D,
1340
+ state: () => state
1341
+ };
1342
+
1343
+ console.log('Photo-to-CAD module loaded');
1344
+ })();