design-clone 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.env.example +14 -0
  2. package/LICENSE +21 -0
  3. package/README.md +166 -0
  4. package/SKILL.md +239 -0
  5. package/bin/cli.js +45 -0
  6. package/bin/commands/help.js +29 -0
  7. package/bin/commands/init.js +126 -0
  8. package/bin/commands/verify.js +99 -0
  9. package/bin/utils/copy.js +65 -0
  10. package/bin/utils/validate.js +122 -0
  11. package/docs/basic-clone.md +63 -0
  12. package/docs/cli-reference.md +94 -0
  13. package/docs/design-clone-architecture.md +247 -0
  14. package/docs/pixel-perfect.md +86 -0
  15. package/docs/troubleshooting.md +97 -0
  16. package/package.json +57 -0
  17. package/requirements.txt +5 -0
  18. package/src/ai/analyze-structure.py +305 -0
  19. package/src/ai/extract-design-tokens.py +439 -0
  20. package/src/ai/prompts/__init__.py +2 -0
  21. package/src/ai/prompts/design_tokens.py +183 -0
  22. package/src/ai/prompts/structure_analysis.py +273 -0
  23. package/src/core/cookie-handler.js +76 -0
  24. package/src/core/css-extractor.js +107 -0
  25. package/src/core/dimension-extractor.js +366 -0
  26. package/src/core/dimension-output.js +208 -0
  27. package/src/core/extract-assets.js +468 -0
  28. package/src/core/filter-css.js +499 -0
  29. package/src/core/html-extractor.js +102 -0
  30. package/src/core/lazy-loader.js +188 -0
  31. package/src/core/page-readiness.js +161 -0
  32. package/src/core/screenshot.js +380 -0
  33. package/src/post-process/enhance-assets.js +157 -0
  34. package/src/post-process/fetch-images.js +398 -0
  35. package/src/post-process/inject-icons.js +311 -0
  36. package/src/utils/__init__.py +16 -0
  37. package/src/utils/__pycache__/__init__.cpython-313.pyc +0 -0
  38. package/src/utils/__pycache__/env.cpython-313.pyc +0 -0
  39. package/src/utils/browser.js +103 -0
  40. package/src/utils/env.js +153 -0
  41. package/src/utils/env.py +134 -0
  42. package/src/utils/helpers.js +71 -0
  43. package/src/utils/puppeteer.js +281 -0
  44. package/src/verification/verify-layout.js +424 -0
  45. package/src/verification/verify-menu.js +422 -0
  46. package/templates/base.css +705 -0
  47. package/templates/base.html +293 -0
@@ -0,0 +1,380 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Multi-viewport screenshot capture for design cloning
4
+ *
5
+ * Usage:
6
+ * node screenshot.js --url https://example.com --output ./analysis
7
+ *
8
+ * Options:
9
+ * --url Target website URL (required)
10
+ * --output Output directory for screenshots (required)
11
+ * --viewports Comma-separated viewport names: desktop,tablet,mobile (default: all)
12
+ * --full-page Capture full page height (default: true)
13
+ * --max-size Max file size in MB before compression (default: 5)
14
+ * --headless Run in headless mode (default: false)
15
+ * --scroll-delay Pause time in ms between scroll steps (default: 1500)
16
+ * --close Close browser after capture (default: false)
17
+ * --extract-html Extract cleaned HTML (default: false)
18
+ * --extract-css Extract all CSS from page (default: false)
19
+ * --filter-unused Filter CSS to remove unused selectors (default: true)
20
+ */
21
+
22
+ import path from 'path';
23
+ import fs from 'fs/promises';
24
+
25
+ // Import modules
26
+ import { filterCssFile } from './filter-css.js';
27
+ import { getBrowser, getPage, closeBrowser, disconnectBrowser, parseArgs, outputJSON, outputError } from '../utils/browser.js';
28
+
29
+ // Import extracted modules
30
+ import { waitForDomStable, waitForFontsLoaded, waitForStylesStable, waitForPageReady } from './page-readiness.js';
31
+ import { dismissCookieBanner } from './cookie-handler.js';
32
+ import { forceLazyImages, forceAnimatedElementsVisible, triggerLazyLoad, waitForAllImages, LAZY_LOAD_MAX_ITERATIONS } from './lazy-loader.js';
33
+ import { extractCleanHtml, JS_FRAMEWORK_PATTERNS, MAX_HTML_SIZE } from './html-extractor.js';
34
+ import { extractAllCss, MAX_CSS_SIZE } from './css-extractor.js';
35
+ import { extractComponentDimensions } from './dimension-extractor.js';
36
+ import { buildDimensionsOutput, generateAISummary } from './dimension-output.js';
37
+
38
+ // Try to import Sharp for compression
39
+ let sharp = null;
40
+ try {
41
+ sharp = (await import('sharp')).default;
42
+ } catch {
43
+ // Sharp not available
44
+ }
45
+
46
+ // Constants
47
+ const VIEWPORTS = {
48
+ desktop: { width: 1440, height: 900, deviceScaleFactor: 1 },
49
+ tablet: { width: 768, height: 1024, deviceScaleFactor: 1 },
50
+ mobile: { width: 375, height: 812, deviceScaleFactor: 2 }
51
+ };
52
+
53
+ const VIEWPORT_SETTLE_DELAY = 1500;
54
+ const NETWORK_IDLE_TIMEOUT = 8000;
55
+ const DEFAULT_SCROLL_DELAY = 1500;
56
+
57
+ /**
58
+ * Compress image if it exceeds max size
59
+ */
60
+ async function compressIfNeeded(filePath, maxSizeMB = 5) {
61
+ const stats = await fs.stat(filePath);
62
+ const originalSize = stats.size;
63
+ const maxBytes = maxSizeMB * 1024 * 1024;
64
+
65
+ if (originalSize <= maxBytes || !sharp) {
66
+ return { compressed: false, originalSize, finalSize: originalSize };
67
+ }
68
+
69
+ try {
70
+ const buffer = await fs.readFile(filePath);
71
+ const meta = await sharp(buffer).metadata();
72
+
73
+ const newWidth = Math.round(meta.width * 0.85);
74
+ let output = await sharp(buffer)
75
+ .resize(newWidth)
76
+ .png({ quality: 80, compressionLevel: 9 })
77
+ .toBuffer();
78
+
79
+ if (output.length > maxBytes) {
80
+ const smallerWidth = Math.round(meta.width * 0.7);
81
+ output = await sharp(buffer)
82
+ .resize(smallerWidth)
83
+ .png({ quality: 70, compressionLevel: 9 })
84
+ .toBuffer();
85
+ }
86
+
87
+ await fs.writeFile(filePath, output);
88
+ return { compressed: true, originalSize, finalSize: output.length };
89
+ } catch (err) {
90
+ return { compressed: false, originalSize, finalSize: originalSize, error: err.message };
91
+ }
92
+ }
93
+
94
+ /**
95
+ * Capture screenshot for a single viewport
96
+ */
97
+ async function captureViewport(page, viewport, outputPath, fullPage = true, maxSize = 5, scrollDelay = DEFAULT_SCROLL_DELAY) {
98
+ await page.setViewport(VIEWPORTS[viewport]);
99
+ await new Promise(r => setTimeout(r, VIEWPORT_SETTLE_DELAY));
100
+ await waitForDomStable(page, 300, 5000);
101
+ await waitForFontsLoaded(page, 3000);
102
+ await waitForStylesStable(page, 200, 2000);
103
+
104
+ const componentDimensions = await extractComponentDimensions(page, viewport);
105
+
106
+ const lazyStats = await forceLazyImages(page);
107
+ const scrollInfo = await triggerLazyLoad(page, LAZY_LOAD_MAX_ITERATIONS, scrollDelay);
108
+ await forceLazyImages(page);
109
+ const imageStats = await waitForAllImages(page, 15000);
110
+
111
+ try {
112
+ await page.waitForNetworkIdle({ timeout: NETWORK_IDLE_TIMEOUT });
113
+ } catch {
114
+ // Timeout ok
115
+ }
116
+
117
+ await new Promise(r => setTimeout(r, 2000));
118
+ await waitForDomStable(page, 300, 3000);
119
+ await waitForFontsLoaded(page, 2000);
120
+ const animStats = await forceAnimatedElementsVisible(page);
121
+ await new Promise(r => setTimeout(r, 300));
122
+
123
+ await page.evaluate(() => {
124
+ window.scrollTo(0, 0);
125
+ document.documentElement.scrollTop = 0;
126
+ document.body.scrollTop = 0;
127
+ });
128
+ await new Promise(r => setTimeout(r, 500));
129
+
130
+ await page.screenshot({ path: outputPath, type: 'png', fullPage: fullPage });
131
+ const compression = await compressIfNeeded(outputPath, maxSize);
132
+
133
+ return {
134
+ viewport,
135
+ path: path.resolve(outputPath),
136
+ dimensions: VIEWPORTS[viewport],
137
+ componentDimensions,
138
+ scrollInfo,
139
+ imageStats,
140
+ size: compression.finalSize,
141
+ compressed: compression.compressed
142
+ };
143
+ }
144
+
145
+ /**
146
+ * Main capture function
147
+ */
148
+ async function captureMultiViewport() {
149
+ const args = parseArgs(process.argv.slice(2));
150
+
151
+ if (!args.url) {
152
+ outputError(new Error('--url is required'));
153
+ process.exit(1);
154
+ }
155
+ if (!args.output) {
156
+ outputError(new Error('--output directory is required'));
157
+ process.exit(1);
158
+ }
159
+
160
+ const requestedViewports = args.viewports
161
+ ? args.viewports.split(',').map(v => v.trim().toLowerCase())
162
+ : ['desktop', 'tablet', 'mobile'];
163
+ const fullPage = args['full-page'] !== 'false';
164
+ const maxSize = args['max-size'] ? parseFloat(args['max-size']) : 5;
165
+ const scrollDelay = args['scroll-delay'] ? parseInt(args['scroll-delay'], 10) : DEFAULT_SCROLL_DELAY;
166
+ const extractHtml = args['extract-html'] === 'true';
167
+ const extractCss = args['extract-css'] === 'true';
168
+ const filterUnused = args['filter-unused'] !== 'false';
169
+
170
+ for (const vp of requestedViewports) {
171
+ if (!VIEWPORTS[vp]) {
172
+ outputError(new Error(`Invalid viewport: ${vp}. Valid: desktop, tablet, mobile`));
173
+ process.exit(1);
174
+ }
175
+ }
176
+
177
+ try {
178
+ await fs.mkdir(args.output, { recursive: true });
179
+
180
+ const cliHeadless = args.headless === 'true';
181
+ const getHeadlessForViewport = (viewport) => viewport === 'desktop' ? true : cliHeadless;
182
+
183
+ let currentHeadless = null;
184
+ let browser = null;
185
+ let page = null;
186
+ let cookieResult = null;
187
+
188
+ const initBrowser = async (headless, navigateUrl = null) => {
189
+ if (browser && currentHeadless !== headless) {
190
+ await closeBrowser();
191
+ browser = null;
192
+ page = null;
193
+ }
194
+
195
+ if (!browser) {
196
+ browser = await getBrowser({
197
+ headless,
198
+ args: headless ? [] : ['--start-maximized', '--window-position=0,0']
199
+ });
200
+ page = await getPage(browser);
201
+ currentHeadless = headless;
202
+
203
+ if (navigateUrl) {
204
+ await page.setViewport(VIEWPORTS.desktop);
205
+ await page.goto(navigateUrl, { waitUntil: ['load', 'networkidle0'], timeout: 60000 });
206
+ await new Promise(r => setTimeout(r, 3000));
207
+ cookieResult = await dismissCookieBanner(page);
208
+ await waitForPageReady(page);
209
+ }
210
+ }
211
+ return { browser, page };
212
+ };
213
+
214
+ const firstViewportHeadless = getHeadlessForViewport(requestedViewports[0]);
215
+ await initBrowser(firstViewportHeadless, args.url);
216
+
217
+ // Extract HTML/CSS
218
+ let extraction = null;
219
+ const extractionWarnings = [];
220
+
221
+ if (extractHtml || extractCss) {
222
+ extraction = { html: null, css: null, warnings: [] };
223
+
224
+ if (extractHtml) {
225
+ try {
226
+ const htmlResult = await extractCleanHtml(page, JS_FRAMEWORK_PATTERNS);
227
+ const html = htmlResult.html;
228
+ const htmlSize = Buffer.byteLength(html, 'utf-8');
229
+
230
+ if (htmlSize > MAX_HTML_SIZE) {
231
+ throw new Error(`HTML size exceeds ${MAX_HTML_SIZE / 1024 / 1024}MB limit`);
232
+ }
233
+
234
+ const htmlPath = path.join(args.output, 'source.html');
235
+ await fs.writeFile(htmlPath, html, 'utf-8');
236
+ extraction.html = { path: path.resolve(htmlPath), size: htmlSize, elementCount: htmlResult.elementCount };
237
+ if (htmlResult.warnings.length > 0) extractionWarnings.push(...htmlResult.warnings);
238
+ } catch (error) {
239
+ extraction.html = { error: error.message, failed: true };
240
+ extractionWarnings.push(`HTML extraction failed: ${error.message}`);
241
+ }
242
+ }
243
+
244
+ if (extractCss) {
245
+ try {
246
+ const cssData = await extractAllCss(page, args.url);
247
+ const rawCss = cssData.cssBlocks.map(b => `/* Source: ${b.source} */\n${b.css}`).join('\n\n');
248
+ const cssSize = Buffer.byteLength(rawCss, 'utf-8');
249
+
250
+ if (cssSize > MAX_CSS_SIZE) {
251
+ throw new Error(`CSS size exceeds ${MAX_CSS_SIZE / 1024 / 1024}MB limit`);
252
+ }
253
+
254
+ const rawCssPath = path.join(args.output, 'source-raw.css');
255
+ await fs.writeFile(rawCssPath, rawCss, 'utf-8');
256
+
257
+ extraction.css = {
258
+ path: path.resolve(rawCssPath),
259
+ size: cssSize,
260
+ blocks: cssData.cssBlocks.length,
261
+ totalRules: cssData.totalRules,
262
+ corsBlocked: cssData.corsBlocked,
263
+ computedStyles: cssData.computedStyles
264
+ };
265
+
266
+ if (Object.keys(cssData.computedStyles).length > 0) {
267
+ const stylesPath = path.join(args.output, 'computed-styles.json');
268
+ await fs.writeFile(stylesPath, JSON.stringify(cssData.computedStyles, null, 2));
269
+ }
270
+
271
+ if (cssData.warnings.length > 0) extractionWarnings.push(...cssData.warnings);
272
+ if (cssData.corsBlocked.length > 0) extractionWarnings.push(`${cssData.corsBlocked.length} CORS-blocked stylesheets`);
273
+ } catch (error) {
274
+ extraction.css = { error: error.message, failed: true };
275
+ extractionWarnings.push(`CSS extraction failed: ${error.message}`);
276
+ }
277
+ }
278
+
279
+ // Filter CSS
280
+ if (filterUnused && extraction?.html?.path && extraction?.css?.path && !extraction.html.failed && !extraction.css.failed) {
281
+ try {
282
+ const filteredCssPath = path.join(args.output, 'source.css');
283
+ const filterResult = await filterCssFile(extraction.html.path, extraction.css.path, filteredCssPath, false, args.output);
284
+ extraction.filtered = {
285
+ path: filterResult.output.path,
286
+ size: filterResult.output.size,
287
+ reduction: filterResult.stats.reduction,
288
+ stats: { totalRules: filterResult.stats.totalRules, keptRules: filterResult.stats.keptRules, removedRules: filterResult.stats.removedRules }
289
+ };
290
+ if (process.stderr.isTTY) console.error(`[INFO] CSS filtered: ${filterResult.stats.reduction} reduction`);
291
+ } catch (error) {
292
+ extraction.filtered = { error: error.message, failed: true };
293
+ extractionWarnings.push(`CSS filtering failed: ${error.message}`);
294
+ }
295
+ }
296
+
297
+ extraction.warnings = extractionWarnings;
298
+ if (extractionWarnings.length > 0 && process.stderr.isTTY) {
299
+ extractionWarnings.forEach(w => console.error(`[WARN] ${w}`));
300
+ }
301
+ }
302
+
303
+ // Capture viewports
304
+ const screenshots = [];
305
+ const browserRestarts = [];
306
+ for (const viewport of requestedViewports) {
307
+ const viewportHeadless = getHeadlessForViewport(viewport);
308
+ if (currentHeadless !== viewportHeadless) {
309
+ browserRestarts.push({ viewport, from: currentHeadless ? 'headless' : 'headed', to: viewportHeadless ? 'headless' : 'headed' });
310
+ if (process.stderr.isTTY) console.error(`[INFO] Switching to ${viewportHeadless ? 'headless' : 'headed'} for ${viewport}`);
311
+ await initBrowser(viewportHeadless, args.url);
312
+ }
313
+
314
+ const outputPath = path.join(args.output, `${viewport}.png`);
315
+ const result = await captureViewport(page, viewport, outputPath, fullPage, maxSize, scrollDelay);
316
+ screenshots.push(result);
317
+ }
318
+
319
+ // Build dimension output
320
+ const allViewportDimensions = {};
321
+ for (const screenshot of screenshots) {
322
+ if (screenshot.componentDimensions) {
323
+ allViewportDimensions[screenshot.viewport] = screenshot.componentDimensions;
324
+ }
325
+ }
326
+
327
+ const dimensionsOutput = buildDimensionsOutput(allViewportDimensions, args.url);
328
+ const dimensionsPath = path.join(args.output, 'component-dimensions.json');
329
+ await fs.writeFile(dimensionsPath, JSON.stringify(dimensionsOutput, null, 2));
330
+
331
+ const aiSummary = generateAISummary(dimensionsOutput);
332
+ const summaryPath = path.join(args.output, 'dimensions-summary.json');
333
+ await fs.writeFile(summaryPath, JSON.stringify(aiSummary, null, 2));
334
+
335
+ const totalContainers = Object.values(dimensionsOutput.viewports).reduce((sum, vp) => sum + (vp.containers?.length || 0), 0);
336
+ const totalCards = Object.values(dimensionsOutput.viewports).reduce((sum, vp) => sum + (vp.cards?.length || 0), 0);
337
+ const totalGrids = Object.values(dimensionsOutput.viewports).reduce((sum, vp) => sum + (vp.gridLayouts?.length || 0), 0);
338
+
339
+ if (process.stderr.isTTY) {
340
+ console.error(`[INFO] Extracted: ${totalContainers} containers, ${totalCards} card groups, ${totalGrids} grid layouts`);
341
+ }
342
+
343
+ const result = {
344
+ success: true,
345
+ url: args.url,
346
+ outputDir: path.resolve(args.output),
347
+ cookieHandling: cookieResult,
348
+ extraction,
349
+ componentDimensions: {
350
+ full: path.resolve(dimensionsPath),
351
+ summary: path.resolve(summaryPath),
352
+ viewports: Object.keys(dimensionsOutput.viewports),
353
+ stats: { containers: totalContainers, cards: totalCards, gridLayouts: totalGrids,
354
+ typography: Object.values(dimensionsOutput.viewports).reduce((sum, vp) => sum + (vp.typography?.length || 0), 0) }
355
+ },
356
+ screenshots,
357
+ browserRestarts: browserRestarts.length > 0 ? browserRestarts : undefined,
358
+ scrollDelay,
359
+ totalSize: screenshots.reduce((sum, s) => sum + s.size, 0),
360
+ capturedAt: new Date().toISOString()
361
+ };
362
+
363
+ outputJSON(result);
364
+
365
+ if (args.close === 'true') {
366
+ await closeBrowser();
367
+ } else {
368
+ await disconnectBrowser();
369
+ }
370
+
371
+ process.exit(0);
372
+ } catch (error) {
373
+ outputError(error);
374
+ process.exit(1);
375
+ } finally {
376
+ try { await closeBrowser(); } catch { /* ignore */ }
377
+ }
378
+ }
379
+
380
+ captureMultiViewport();
@@ -0,0 +1,157 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Asset Enhancement Orchestrator for Design Clone
4
+ *
5
+ * Enhances generated HTML with:
6
+ * 1. Real images from Unsplash (replaces placeholders)
7
+ * 2. Japanese-style SVG icons
8
+ *
9
+ * Usage:
10
+ * node enhance-assets.js <output-dir> [--verbose]
11
+ *
12
+ * Environment:
13
+ * UNSPLASH_ACCESS_KEY - Optional, for image fetching
14
+ *
15
+ * This script is called automatically by /design:clone after HTML generation.
16
+ */
17
+
18
+ import fs from 'fs/promises';
19
+ import path from 'path';
20
+ import { fetchImages } from './fetch-images.js';
21
+ import { injectIcons } from './inject-icons.js';
22
+
23
+ /**
24
+ * Parse command line arguments
25
+ */
26
+ function parseArgs() {
27
+ const args = process.argv.slice(2);
28
+ const options = {
29
+ outputDir: null,
30
+ verbose: false,
31
+ skipImages: false,
32
+ skipIcons: false
33
+ };
34
+
35
+ for (let i = 0; i < args.length; i++) {
36
+ const arg = args[i];
37
+ if (arg === '--verbose' || arg === '-v') {
38
+ options.verbose = true;
39
+ } else if (arg === '--skip-images') {
40
+ options.skipImages = true;
41
+ } else if (arg === '--skip-icons') {
42
+ options.skipIcons = true;
43
+ } else if (!arg.startsWith('-')) {
44
+ options.outputDir = arg;
45
+ }
46
+ }
47
+
48
+ return options;
49
+ }
50
+
51
+ /**
52
+ * Check if file exists
53
+ */
54
+ async function fileExists(filePath) {
55
+ try {
56
+ await fs.access(filePath);
57
+ return true;
58
+ } catch {
59
+ return false;
60
+ }
61
+ }
62
+
63
+ /**
64
+ * Main enhancement function
65
+ */
66
+ async function enhanceAssets(outputDir, options = {}) {
67
+ const {
68
+ verbose = false,
69
+ skipImages = false,
70
+ skipIcons = false
71
+ } = options;
72
+
73
+ const htmlPath = path.join(outputDir, 'index.html');
74
+ const structurePath = path.join(outputDir, 'analysis', 'structure.md');
75
+
76
+ console.log('🎨 Enhancing assets...');
77
+
78
+ // Verify HTML exists
79
+ if (!await fileExists(htmlPath)) {
80
+ console.error(` ✗ HTML not found: ${htmlPath}`);
81
+ return {
82
+ success: false,
83
+ error: 'HTML file not found'
84
+ };
85
+ }
86
+
87
+ const results = {
88
+ success: true,
89
+ images: null,
90
+ icons: null
91
+ };
92
+
93
+ // Step 1: Fetch and replace images
94
+ if (!skipImages) {
95
+ if (process.env.UNSPLASH_ACCESS_KEY) {
96
+ console.log('📷 Fetching images from Unsplash...');
97
+ try {
98
+ results.images = await fetchImages(htmlPath, outputDir, verbose);
99
+ } catch (error) {
100
+ console.warn(` âš  Image fetch failed: ${error.message}`);
101
+ results.images = { success: false, error: error.message };
102
+ }
103
+ } else {
104
+ console.log(' → Skipping images (set UNSPLASH_ACCESS_KEY to enable)');
105
+ results.images = { skipped: true };
106
+ }
107
+ }
108
+
109
+ // Step 2: Inject Japanese-style icons
110
+ if (!skipIcons) {
111
+ console.log('🎌 Injecting Japanese-style icons...');
112
+ try {
113
+ results.icons = await injectIcons(htmlPath, verbose);
114
+ } catch (error) {
115
+ console.warn(` âš  Icon injection failed: ${error.message}`);
116
+ results.icons = { success: false, error: error.message };
117
+ }
118
+ }
119
+
120
+ console.log('✅ Asset enhancement complete');
121
+
122
+ return results;
123
+ }
124
+
125
+ // CLI execution
126
+ const args = parseArgs();
127
+
128
+ if (!args.outputDir) {
129
+ console.error('Usage: node enhance-assets.js <output-dir> [--verbose] [--skip-images] [--skip-icons]');
130
+ console.error('');
131
+ console.error('Options:');
132
+ console.error(' --verbose, -v Show detailed progress');
133
+ console.error(' --skip-images Skip Unsplash image fetching');
134
+ console.error(' --skip-icons Skip icon injection');
135
+ console.error('');
136
+ console.error('Environment:');
137
+ console.error(' UNSPLASH_ACCESS_KEY Your Unsplash API key (optional)');
138
+ process.exit(1);
139
+ }
140
+
141
+ enhanceAssets(args.outputDir, {
142
+ verbose: args.verbose,
143
+ skipImages: args.skipImages,
144
+ skipIcons: args.skipIcons
145
+ })
146
+ .then(result => {
147
+ if (args.verbose) {
148
+ console.log(JSON.stringify(result, null, 2));
149
+ }
150
+ process.exit(result.success ? 0 : 1);
151
+ })
152
+ .catch(error => {
153
+ console.error('Error:', error.message);
154
+ process.exit(1);
155
+ });
156
+
157
+ export { enhanceAssets };