design-clone 1.2.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -39
- package/SKILL.md +69 -45
- package/bin/cli.js +22 -4
- package/bin/commands/clone-site.js +31 -106
- package/bin/commands/help.js +19 -6
- package/bin/commands/init.js +11 -56
- package/bin/commands/uninstall.js +105 -0
- package/bin/commands/update.js +70 -0
- package/bin/commands/verify.js +11 -16
- package/bin/utils/paths.js +28 -0
- package/bin/utils/validate.js +24 -28
- package/bin/utils/version.js +23 -0
- package/docs/code-standards.md +789 -0
- package/docs/codebase-summary.md +556 -0
- package/docs/index.md +74 -0
- package/docs/project-overview-pdr.md +797 -0
- package/docs/system-architecture.md +718 -0
- package/package.json +20 -21
- package/src/ai/prompts/design-tokens/basic.md +80 -0
- package/src/ai/prompts/design-tokens/section-with-css.md +41 -0
- package/src/ai/prompts/design-tokens/section.md +48 -0
- package/src/ai/prompts/design-tokens/with-css.md +87 -0
- package/src/ai/prompts/structure-analysis/basic.md +55 -0
- package/src/ai/prompts/structure-analysis/with-context.md +59 -0
- package/src/ai/prompts/structure-analysis/with-dimensions.md +63 -0
- package/src/ai/prompts/structure-analysis/with-hierarchy.md +73 -0
- package/src/ai/prompts/ux-audit/aggregation.md +42 -0
- package/src/ai/prompts/ux-audit/desktop.md +92 -0
- package/src/ai/prompts/ux-audit/mobile.md +93 -0
- package/src/ai/prompts/ux-audit/tablet.md +92 -0
- package/src/core/animation/animation-extractor-ast.js +183 -0
- package/src/core/animation/animation-extractor-output.js +152 -0
- package/src/core/animation/animation-extractor.js +178 -0
- package/src/core/animation/state-capture-detection.js +200 -0
- package/src/core/animation/state-capture.js +193 -0
- package/src/core/capture/browser-context-pool.js +96 -0
- package/src/core/capture/multi-page-screenshot-page.js +110 -0
- package/src/core/capture/multi-page-screenshot.js +208 -0
- package/src/core/capture/screenshot-extraction.js +186 -0
- package/src/core/capture/screenshot-helpers.js +175 -0
- package/src/core/capture/screenshot-orchestrator.js +174 -0
- package/src/core/capture/screenshot-viewport.js +93 -0
- package/src/core/capture/screenshot.js +192 -0
- package/src/core/content/content-counter-dom.js +191 -0
- package/src/core/content/content-counter.js +76 -0
- package/src/core/css/breakpoint-detector.js +66 -0
- package/src/core/css/chromium-defaults.json +23 -0
- package/src/core/css/computed-style-extractor.js +102 -0
- package/src/core/css/css-chunker.js +103 -0
- package/src/core/{css-extractor.js → css/css-extractor.js} +4 -4
- package/src/core/css/filter-css-dead-code.js +120 -0
- package/src/core/css/filter-css-html-analyzer.js +110 -0
- package/src/core/css/filter-css-selector-matcher.js +172 -0
- package/src/core/css/filter-css.js +206 -0
- package/src/core/css/merge-css-atrule-processor.js +158 -0
- package/src/core/css/merge-css-file-io.js +68 -0
- package/src/core/css/merge-css.js +148 -0
- package/src/core/detection/framework-detector-routing.js +68 -0
- package/src/core/detection/framework-detector-signals.js +65 -0
- package/src/core/detection/framework-detector.js +198 -0
- package/src/core/dimension/dimension-extractor-card-detector.js +82 -0
- package/src/core/dimension/dimension-extractor.js +317 -0
- package/src/core/dimension/dimension-output-ai-summary.js +111 -0
- package/src/core/dimension/dimension-output.js +173 -0
- package/src/core/dimension/dom-tree-analyzer-tree-builders.js +95 -0
- package/src/core/dimension/dom-tree-analyzer.js +191 -0
- package/src/core/discovery/app-state-snapshot-capture.js +195 -0
- package/src/core/discovery/app-state-snapshot-utils.js +178 -0
- package/src/core/discovery/app-state-snapshot.js +131 -0
- package/src/core/discovery/discover-pages-routes.js +84 -0
- package/src/core/discovery/discover-pages-utils.js +177 -0
- package/src/core/discovery/discover-pages.js +191 -0
- package/src/core/html/html-extractor-inline-styler.js +70 -0
- package/src/core/html/html-extractor.js +147 -0
- package/src/core/html/semantic-enhancer-mappings.js +200 -0
- package/src/core/html/semantic-enhancer-page.js +148 -0
- package/src/core/html/semantic-enhancer.js +135 -0
- package/src/core/links/rewrite-links-css-rewriter.js +53 -0
- package/src/core/links/rewrite-links.js +173 -0
- package/src/core/media/asset-validator.js +118 -0
- package/src/core/media/extract-assets-downloader.js +187 -0
- package/src/core/media/extract-assets-page-scraper.js +115 -0
- package/src/core/media/extract-assets.js +159 -0
- package/src/core/media/video-capture-convert.js +200 -0
- package/src/core/media/video-capture.js +201 -0
- package/src/core/{cookie-handler.js → page-prep/cookie-handler.js} +1 -1
- package/src/core/{lazy-loader.js → page-prep/lazy-loader.js} +44 -46
- package/src/core/{page-readiness.js → page-prep/page-readiness.js} +8 -8
- package/src/core/section/section-cropper-helpers.js +43 -0
- package/src/core/section/section-cropper.js +132 -0
- package/src/core/section/section-detector-strategies.js +139 -0
- package/src/core/section/section-detector-utils.js +100 -0
- package/src/core/section/section-detector.js +88 -0
- package/src/core/tests/test-section-cropper.js +177 -0
- package/src/core/tests/test-section-detector.js +55 -0
- package/src/post-process/enhance-assets.js +29 -4
- package/src/post-process/fetch-images-unsplash-client.js +123 -0
- package/src/post-process/fetch-images.js +60 -263
- package/src/post-process/inject-gosnap.js +88 -0
- package/src/post-process/inject-icons-svg-replacer.js +76 -0
- package/src/post-process/inject-icons.js +47 -200
- package/src/route-discoverers/angular-discoverer.js +157 -0
- package/src/route-discoverers/astro-discoverer.js +123 -0
- package/src/route-discoverers/base-discoverer-utils.js +137 -0
- package/src/route-discoverers/base-discoverer.js +153 -0
- package/src/route-discoverers/index.js +106 -0
- package/src/route-discoverers/next-discoverer.js +130 -0
- package/src/route-discoverers/nuxt-discoverer.js +138 -0
- package/src/route-discoverers/react-discoverer.js +139 -0
- package/src/route-discoverers/svelte-discoverer.js +109 -0
- package/src/route-discoverers/universal-discoverer.js +227 -0
- package/src/route-discoverers/vue-discoverer.js +118 -0
- package/src/shared/config.js +38 -0
- package/src/shared/error-codes.js +31 -0
- package/src/shared/viewports.js +46 -0
- package/src/utils/browser.js +11 -44
- package/src/utils/helpers.js +4 -0
- package/src/utils/log.js +12 -0
- package/src/utils/playwright-loader.js +76 -0
- package/src/utils/playwright.js +147 -0
- package/src/utils/progress.js +32 -0
- package/src/verification/generate-audit-report-css-fixes.js +52 -0
- package/src/verification/generate-audit-report-sections.js +158 -0
- package/src/verification/generate-audit-report.js +122 -0
- package/src/verification/quality-scorer.js +92 -0
- package/src/verification/verify-footer-checks.js +103 -0
- package/src/verification/verify-footer-helpers.js +178 -0
- package/src/verification/verify-footer.js +135 -0
- package/src/verification/verify-header-checks.js +104 -0
- package/src/verification/verify-header-helpers.js +156 -0
- package/src/verification/verify-header.js +144 -0
- package/src/verification/verify-layout-report.js +101 -0
- package/src/verification/verify-layout.js +14 -260
- package/src/verification/verify-menu-checks.js +104 -0
- package/src/verification/verify-menu-helpers.js +112 -0
- package/src/verification/verify-menu.js +18 -302
- package/src/verification/verify-slider-checks.js +115 -0
- package/src/verification/verify-slider-constants.js +65 -0
- package/src/verification/verify-slider-helpers.js +164 -0
- package/src/verification/verify-slider.js +142 -0
- package/.env.example +0 -14
- package/docs/basic-clone.md +0 -63
- package/docs/cli-reference.md +0 -118
- package/docs/design-clone-architecture.md +0 -275
- package/docs/pixel-perfect.md +0 -86
- package/docs/troubleshooting.md +0 -169
- package/requirements.txt +0 -5
- package/src/ai/analyze-structure.py +0 -305
- package/src/ai/extract-design-tokens.py +0 -439
- package/src/ai/prompts/__init__.py +0 -2
- package/src/ai/prompts/__pycache__/__init__.cpython-313.pyc +0 -0
- package/src/ai/prompts/__pycache__/design_tokens.cpython-313.pyc +0 -0
- package/src/ai/prompts/__pycache__/structure_analysis.cpython-313.pyc +0 -0
- package/src/ai/prompts/design_tokens.py +0 -183
- package/src/ai/prompts/structure_analysis.py +0 -273
- package/src/core/animation-extractor.js +0 -526
- package/src/core/design-tokens.js +0 -103
- package/src/core/dimension-extractor.js +0 -366
- package/src/core/dimension-output.js +0 -208
- package/src/core/discover-pages.js +0 -314
- package/src/core/extract-assets.js +0 -468
- package/src/core/filter-css.js +0 -499
- package/src/core/html-extractor.js +0 -171
- package/src/core/merge-css.js +0 -407
- package/src/core/multi-page-screenshot.js +0 -377
- package/src/core/rewrite-links.js +0 -226
- package/src/core/screenshot.js +0 -572
- package/src/core/state-capture.js +0 -602
- package/src/core/video-capture.js +0 -540
- package/src/utils/__init__.py +0 -16
- package/src/utils/__pycache__/__init__.cpython-313.pyc +0 -0
- package/src/utils/__pycache__/env.cpython-313.pyc +0 -0
- package/src/utils/env.py +0 -134
- package/src/utils/puppeteer.js +0 -281
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-Page Screenshot: Per-Page Capture
|
|
3
|
+
*
|
|
4
|
+
* Single-page capture logic: navigate and take viewport screenshots.
|
|
5
|
+
* Called once per page by captureMultiplePages.
|
|
6
|
+
*
|
|
7
|
+
* @module multi-page-screenshot-page
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import path from 'path';
|
|
11
|
+
import fs from 'fs/promises';
|
|
12
|
+
|
|
13
|
+
import { captureViewport } from './screenshot-viewport.js';
|
|
14
|
+
import { VIEWPORTS, DEFAULT_SCROLL_DELAY } from './screenshot-helpers.js';
|
|
15
|
+
import { waitForDomStable, waitForPageReady } from '../page-prep/page-readiness.js';
|
|
16
|
+
import { dismissCookieBanner } from '../page-prep/cookie-handler.js';
|
|
17
|
+
|
|
18
|
+
export const DEFAULT_OPTIONS = {
|
|
19
|
+
viewports: ['desktop', 'tablet', 'mobile'],
|
|
20
|
+
fullPage: true,
|
|
21
|
+
maxSize: 5,
|
|
22
|
+
scrollDelay: DEFAULT_SCROLL_DELAY,
|
|
23
|
+
timeout: 60000,
|
|
24
|
+
onProgress: null
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
/** Convert URL path to kebab-case filename ('/about' → 'about', '/' → 'index'). */
|
|
28
|
+
export function pathToFilename(pagePath) {
|
|
29
|
+
if (!pagePath || pagePath === '/') return 'index';
|
|
30
|
+
return pagePath
|
|
31
|
+
.replace(/^\//, '')
|
|
32
|
+
.replace(/\/$/, '')
|
|
33
|
+
.replace(/\//g, '-')
|
|
34
|
+
.replace(/[^a-z0-9-]/gi, '-')
|
|
35
|
+
.replace(/-+/g, '-')
|
|
36
|
+
.toLowerCase();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/** Create outputDir + analysis/{viewport}/ subdirectories. */
|
|
40
|
+
export async function createOutputStructure(outputDir, viewports) {
|
|
41
|
+
const dirs = [
|
|
42
|
+
outputDir,
|
|
43
|
+
...viewports.map(vp => path.join(outputDir, 'analysis', vp))
|
|
44
|
+
];
|
|
45
|
+
for (const dir of dirs) {
|
|
46
|
+
await fs.mkdir(dir, { recursive: true });
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/** Capture all configured viewport screenshots for one page. Returns {screenshots, warnings}. */
|
|
51
|
+
async function capturePageViewports(page, outputDir, filename, options) {
|
|
52
|
+
const screenshots = {};
|
|
53
|
+
const warnings = [];
|
|
54
|
+
|
|
55
|
+
for (const viewport of options.viewports) {
|
|
56
|
+
if (!VIEWPORTS[viewport]) {
|
|
57
|
+
warnings.push(`Invalid viewport: ${viewport}`);
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
try {
|
|
61
|
+
const screenshotPath = path.join(outputDir, 'analysis', viewport, `${filename}.png`);
|
|
62
|
+
const vpResult = await captureViewport({
|
|
63
|
+
page,
|
|
64
|
+
viewport,
|
|
65
|
+
outputPath: screenshotPath,
|
|
66
|
+
fullPage: options.fullPage,
|
|
67
|
+
maxSize: options.maxSize,
|
|
68
|
+
scrollDelay: options.scrollDelay
|
|
69
|
+
});
|
|
70
|
+
screenshots[viewport] = {
|
|
71
|
+
path: vpResult.path,
|
|
72
|
+
size: vpResult.size,
|
|
73
|
+
compressed: vpResult.compressed
|
|
74
|
+
};
|
|
75
|
+
} catch (err) {
|
|
76
|
+
warnings.push(`${viewport} capture failed: ${err.message}`);
|
|
77
|
+
screenshots[viewport] = { error: err.message, failed: true };
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return { screenshots, warnings };
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Navigate to one page and take viewport screenshots.
|
|
86
|
+
* @param {import('playwright').Page} page
|
|
87
|
+
* @param {{path, name, url}} pageInfo
|
|
88
|
+
* @param {string} outputDir
|
|
89
|
+
* @param {Object} options
|
|
90
|
+
* @returns {Promise<{path, name, url, filename, screenshots, warnings, success, error?}>}
|
|
91
|
+
*/
|
|
92
|
+
export async function captureSinglePage(page, pageInfo, outputDir, options) {
|
|
93
|
+
const filename = pathToFilename(pageInfo.path);
|
|
94
|
+
const result = { path: pageInfo.path, name: pageInfo.name, url: pageInfo.url, filename, screenshots: {}, warnings: [] };
|
|
95
|
+
|
|
96
|
+
try {
|
|
97
|
+
await page.goto(pageInfo.url, { waitUntil: 'networkidle', timeout: options.timeout });
|
|
98
|
+
await waitForPageReady(page);
|
|
99
|
+
await dismissCookieBanner(page).catch(() => {});
|
|
100
|
+
await waitForDomStable(page, 300, 3000);
|
|
101
|
+
|
|
102
|
+
const { screenshots, warnings: vpW } = await capturePageViewports(page, outputDir, filename, options);
|
|
103
|
+
result.screenshots = screenshots; result.warnings.push(...vpW);
|
|
104
|
+
result.success = true;
|
|
105
|
+
} catch (err) {
|
|
106
|
+
result.success = false; result.error = err.message;
|
|
107
|
+
result.warnings.push(`Page capture failed: ${err.message}`);
|
|
108
|
+
}
|
|
109
|
+
return result;
|
|
110
|
+
}
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-page Screenshot Capture
|
|
3
|
+
*
|
|
4
|
+
* Capture multi-viewport screenshots for multiple pages
|
|
5
|
+
* using a shared browser session for efficiency.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* import { captureMultiplePages } from './multi-page-screenshot.js';
|
|
9
|
+
* const result = await captureMultiplePages(pages, { outputDir: './output' });
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import path from 'path';
|
|
13
|
+
import fs from 'fs/promises';
|
|
14
|
+
|
|
15
|
+
import { getBrowser, getPage, disconnectBrowser } from '../../utils/browser.js';
|
|
16
|
+
import {
|
|
17
|
+
captureSinglePage,
|
|
18
|
+
createOutputStructure,
|
|
19
|
+
pathToFilename,
|
|
20
|
+
DEFAULT_OPTIONS
|
|
21
|
+
} from './multi-page-screenshot-page.js';
|
|
22
|
+
import { BrowserContextPool } from './browser-context-pool.js';
|
|
23
|
+
|
|
24
|
+
// ============================================================================
|
|
25
|
+
// Batch Capture
|
|
26
|
+
// ============================================================================
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Capture multiple pages with shared browser session
|
|
30
|
+
* @param {Array<{path: string, name: string, url: string}>} pages - Pages to capture
|
|
31
|
+
* @param {Object} options - Capture options (merged with DEFAULT_OPTIONS)
|
|
32
|
+
* @returns {Promise<Object>} Complete capture result with per-page results and stats
|
|
33
|
+
*/
|
|
34
|
+
export async function captureMultiplePages(pages, options = {}) {
|
|
35
|
+
const opts = { ...DEFAULT_OPTIONS, ...options };
|
|
36
|
+
const startTime = Date.now();
|
|
37
|
+
|
|
38
|
+
if (!opts.outputDir) throw new Error('outputDir is required');
|
|
39
|
+
|
|
40
|
+
await createOutputStructure(opts.outputDir, opts.viewports);
|
|
41
|
+
|
|
42
|
+
let browser = null;
|
|
43
|
+
const results = {
|
|
44
|
+
success: true,
|
|
45
|
+
baseUrl: pages[0]?.url ? new URL(pages[0].url).origin : null,
|
|
46
|
+
outputDir: path.resolve(opts.outputDir),
|
|
47
|
+
pages: [],
|
|
48
|
+
stats: {
|
|
49
|
+
totalPages: pages.length,
|
|
50
|
+
successfulPages: 0,
|
|
51
|
+
failedPages: 0,
|
|
52
|
+
totalScreenshots: 0,
|
|
53
|
+
totalWarnings: 0
|
|
54
|
+
},
|
|
55
|
+
capturedAt: new Date().toISOString()
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
try {
|
|
59
|
+
browser = await getBrowser({ headless: true });
|
|
60
|
+
const concurrency = opts.concurrency || 3;
|
|
61
|
+
|
|
62
|
+
// Try parallel capture with context pool
|
|
63
|
+
let usePool = concurrency > 1 && pages.length > 1;
|
|
64
|
+
if (usePool) {
|
|
65
|
+
try {
|
|
66
|
+
const pool = new BrowserContextPool(browser, { maxContexts: concurrency });
|
|
67
|
+
const pageResults = new Array(pages.length);
|
|
68
|
+
|
|
69
|
+
const pagePromises = pages.map(async (pageInfo, i) => {
|
|
70
|
+
const { context, page } = await pool.acquire();
|
|
71
|
+
try {
|
|
72
|
+
if (opts.onProgress) {
|
|
73
|
+
opts.onProgress(i + 1, pages.length, { path: pageInfo.path, name: pageInfo.name, status: 'capturing' });
|
|
74
|
+
}
|
|
75
|
+
pageResults[i] = await captureSinglePage(page, pageInfo, opts.outputDir, opts);
|
|
76
|
+
if (opts.onProgress) {
|
|
77
|
+
opts.onProgress(i + 1, pages.length, { path: pageInfo.path, name: pageInfo.name, status: 'done' });
|
|
78
|
+
}
|
|
79
|
+
} finally {
|
|
80
|
+
await pool.release(context);
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
await Promise.allSettled(pagePromises);
|
|
85
|
+
await pool.drain();
|
|
86
|
+
|
|
87
|
+
for (const pageResult of pageResults) {
|
|
88
|
+
if (!pageResult) { results.stats.failedPages++; continue; }
|
|
89
|
+
results.pages.push(pageResult);
|
|
90
|
+
if (pageResult.success) {
|
|
91
|
+
results.stats.successfulPages++;
|
|
92
|
+
results.stats.totalScreenshots += Object.keys(pageResult.screenshots)
|
|
93
|
+
.filter(vp => !pageResult.screenshots[vp].failed).length;
|
|
94
|
+
} else {
|
|
95
|
+
results.stats.failedPages++;
|
|
96
|
+
}
|
|
97
|
+
results.stats.totalWarnings += pageResult.warnings?.length || 0;
|
|
98
|
+
}
|
|
99
|
+
} catch {
|
|
100
|
+
// Pool failed, fall back to sequential
|
|
101
|
+
usePool = false;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Sequential fallback (single page or pool failure)
|
|
106
|
+
if (!usePool) {
|
|
107
|
+
for (let i = 0; i < pages.length; i++) {
|
|
108
|
+
const pageInfo = pages[i];
|
|
109
|
+
if (opts.onProgress) {
|
|
110
|
+
opts.onProgress(i + 1, pages.length, { path: pageInfo.path, name: pageInfo.name, status: 'capturing' });
|
|
111
|
+
}
|
|
112
|
+
const page = await getPage(browser);
|
|
113
|
+
try {
|
|
114
|
+
const pageResult = await captureSinglePage(page, pageInfo, opts.outputDir, opts);
|
|
115
|
+
results.pages.push(pageResult);
|
|
116
|
+
if (pageResult.success) {
|
|
117
|
+
results.stats.successfulPages++;
|
|
118
|
+
results.stats.totalScreenshots += Object.keys(pageResult.screenshots)
|
|
119
|
+
.filter(vp => !pageResult.screenshots[vp].failed).length;
|
|
120
|
+
} else {
|
|
121
|
+
results.stats.failedPages++;
|
|
122
|
+
}
|
|
123
|
+
results.stats.totalWarnings += pageResult.warnings.length;
|
|
124
|
+
if (opts.onProgress) {
|
|
125
|
+
opts.onProgress(i + 1, pages.length, { path: pageInfo.path, name: pageInfo.name, status: 'done' });
|
|
126
|
+
}
|
|
127
|
+
} finally {
|
|
128
|
+
await page.close().catch(() => {});
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
} catch (err) {
|
|
133
|
+
results.success = false;
|
|
134
|
+
results.error = err.message;
|
|
135
|
+
} finally {
|
|
136
|
+
if (browser) await disconnectBrowser().catch(() => {});
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
results.stats.totalTimeMs = Date.now() - startTime;
|
|
140
|
+
|
|
141
|
+
const resultsPath = path.join(opts.outputDir, 'capture-results.json');
|
|
142
|
+
await fs.writeFile(resultsPath, JSON.stringify(results, null, 2));
|
|
143
|
+
results.resultsFile = resultsPath;
|
|
144
|
+
|
|
145
|
+
return results;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// ============================================================================
|
|
149
|
+
// Re-exports (backward-compatible)
|
|
150
|
+
// ============================================================================
|
|
151
|
+
|
|
152
|
+
export { pathToFilename };
|
|
153
|
+
|
|
154
|
+
// ============================================================================
|
|
155
|
+
// CLI Entry Point
|
|
156
|
+
// ============================================================================
|
|
157
|
+
|
|
158
|
+
const isMainModule = process.argv[1] && (
|
|
159
|
+
process.argv[1].endsWith('multi-page-screenshot.js') ||
|
|
160
|
+
process.argv[1].includes('multi-page-screenshot')
|
|
161
|
+
);
|
|
162
|
+
|
|
163
|
+
if (isMainModule) {
|
|
164
|
+
const url = process.argv[2];
|
|
165
|
+
const outputDir = process.argv[3] || './multi-capture-output';
|
|
166
|
+
|
|
167
|
+
if (!url) {
|
|
168
|
+
console.error('Usage: node multi-page-screenshot.js <url> [outputDir]');
|
|
169
|
+
process.exit(1);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
import('../discovery/discover-pages.js').then(async ({ discoverPages, estimateCapture }) => {
|
|
173
|
+
console.error(`[INFO] Discovering pages from ${url}...`);
|
|
174
|
+
const discovery = await discoverPages(url, { maxPages: 5 });
|
|
175
|
+
|
|
176
|
+
if (!discovery.success) {
|
|
177
|
+
console.error(`[ERROR] Discovery failed: ${discovery.error}`);
|
|
178
|
+
process.exit(1);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
console.error(`[INFO] Found ${discovery.pages.length} pages`);
|
|
182
|
+
|
|
183
|
+
// Dry-run: show discovery + estimate, exit without capture
|
|
184
|
+
if (process.argv.includes('--dry-run')) {
|
|
185
|
+
const estimate = estimateCapture(discovery.pages);
|
|
186
|
+
console.log(JSON.stringify({
|
|
187
|
+
dryRun: true,
|
|
188
|
+
discovery: { pages: discovery.pages, framework: discovery.framework },
|
|
189
|
+
estimate,
|
|
190
|
+
hint: 'Remove --dry-run to execute capture'
|
|
191
|
+
}, null, 2));
|
|
192
|
+
process.exit(0);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
const result = await captureMultiplePages(discovery.pages, {
|
|
196
|
+
outputDir,
|
|
197
|
+
onProgress: (current, total, info) => {
|
|
198
|
+
console.error(`[${current}/${total}] ${info.status}: ${info.name} (${info.path})`);
|
|
199
|
+
}
|
|
200
|
+
});
|
|
201
|
+
|
|
202
|
+
console.log(JSON.stringify(result, null, 2));
|
|
203
|
+
process.exit(result.success ? 0 : 1);
|
|
204
|
+
}).catch(err => {
|
|
205
|
+
console.error(`[ERROR] ${err.message}`);
|
|
206
|
+
process.exit(1);
|
|
207
|
+
});
|
|
208
|
+
}
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Screenshot Extraction
|
|
3
|
+
*
|
|
4
|
+
* HTML, CSS, and animation extraction pipeline used during multi-viewport
|
|
5
|
+
* screenshot capture. Handles content counting, semantic enhancement,
|
|
6
|
+
* CSS filtering, and animation token generation.
|
|
7
|
+
*
|
|
8
|
+
* @module screenshot-extraction
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import path from 'path';
|
|
12
|
+
import fs from 'fs/promises';
|
|
13
|
+
|
|
14
|
+
import { filterCssFile } from '../css/filter-css.js';
|
|
15
|
+
import { extractCleanHtml, extractAndEnhanceHtml, JS_FRAMEWORK_PATTERNS, MAX_HTML_SIZE } from '../html/html-extractor.js';
|
|
16
|
+
import { extractContentCounts, generateContentSummary } from '../content/content-counter.js';
|
|
17
|
+
import { extractAllCss, MAX_CSS_SIZE } from '../css/css-extractor.js';
|
|
18
|
+
import { extractAnimations, generateAnimationsCss, generateAnimationTokens } from '../animation/animation-extractor.js';
|
|
19
|
+
import { logInfo, logWarn, isTTY } from '../../utils/log.js';
|
|
20
|
+
import { createProgress } from '../../utils/progress.js';
|
|
21
|
+
|
|
22
|
+
/** Extract and write content counts (grids, repeated items) to output dir. */
|
|
23
|
+
export async function runContentCounting(page, output) {
|
|
24
|
+
const contentCounts = await extractContentCounts(page);
|
|
25
|
+
const countsPath = path.join(output, 'content-counts.json');
|
|
26
|
+
await fs.writeFile(countsPath, JSON.stringify(contentCounts, null, 2), 'utf-8');
|
|
27
|
+
const contentSummary = generateContentSummary(contentCounts);
|
|
28
|
+
const summaryPath = path.join(output, 'content-summary.md');
|
|
29
|
+
await fs.writeFile(summaryPath, contentSummary, 'utf-8');
|
|
30
|
+
logInfo(`Content counts: ${contentCounts.grids.total} grids, ${contentCounts.repeatedItems.total} items`);
|
|
31
|
+
return { path: path.resolve(countsPath), summaryPath: path.resolve(summaryPath), summary: contentCounts.summary };
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/** Extract and write page HTML (with optional semantic enhancement). */
|
|
35
|
+
export async function runHtmlExtraction(page, output, enhanceSemantic) {
|
|
36
|
+
const htmlResult = enhanceSemantic
|
|
37
|
+
? await extractAndEnhanceHtml(page, { enhanceSemantic: true })
|
|
38
|
+
: await extractCleanHtml(page, JS_FRAMEWORK_PATTERNS);
|
|
39
|
+
const html = htmlResult.html;
|
|
40
|
+
const htmlSize = Buffer.byteLength(html, 'utf-8');
|
|
41
|
+
if (htmlSize > MAX_HTML_SIZE) throw new Error(`HTML size exceeds ${MAX_HTML_SIZE / 1024 / 1024}MB limit`);
|
|
42
|
+
const htmlPath = path.join(output, 'source.html');
|
|
43
|
+
await fs.writeFile(htmlPath, html, 'utf-8');
|
|
44
|
+
return { path: path.resolve(htmlPath), size: htmlSize, elementCount: htmlResult.elementCount, semanticEnhanced: enhanceSemantic, semanticStats: htmlResult.semanticStats || null, warnings: htmlResult.warnings || [] };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/** Extract and write all page CSS (inline + linked stylesheets). */
|
|
48
|
+
export async function runCssExtraction(page, url, output) {
|
|
49
|
+
const cssData = await extractAllCss(page, url);
|
|
50
|
+
const rawCss = cssData.cssBlocks.map(b => `/* Source: ${b.source} */\n${b.css}`).join('\n\n');
|
|
51
|
+
const cssSize = Buffer.byteLength(rawCss, 'utf-8');
|
|
52
|
+
|
|
53
|
+
if (cssSize > MAX_CSS_SIZE) {
|
|
54
|
+
throw new Error(`CSS size exceeds ${MAX_CSS_SIZE / 1024 / 1024}MB limit`);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const rawCssPath = path.join(output, 'source-raw.css');
|
|
58
|
+
await fs.writeFile(rawCssPath, rawCss, 'utf-8');
|
|
59
|
+
if (Object.keys(cssData.computedStyles).length > 0) {
|
|
60
|
+
await fs.writeFile(path.join(output, 'computed-styles.json'), JSON.stringify(cssData.computedStyles, null, 2));
|
|
61
|
+
}
|
|
62
|
+
return { path: path.resolve(rawCssPath), size: cssSize, blocks: cssData.cssBlocks.length, totalRules: cssData.totalRules, corsBlocked: cssData.corsBlocked, computedStyles: cssData.computedStyles, warnings: cssData.warnings || [] };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/** Filter CSS against HTML to remove unused selectors. */
|
|
66
|
+
export async function runCssFiltering(htmlPath, cssPath, output, aggressiveFilter = false) {
|
|
67
|
+
const filteredCssPath = path.join(output, 'source.css');
|
|
68
|
+
const fr = await filterCssFile(htmlPath, cssPath, filteredCssPath, false, output, aggressiveFilter);
|
|
69
|
+
logInfo(`CSS filtered: ${fr.stats.reduction} reduction`);
|
|
70
|
+
return { path: fr.output.path, size: fr.output.size, reduction: fr.stats.reduction, stats: { totalRules: fr.stats.totalRules, keptRules: fr.stats.keptRules, removedRules: fr.stats.removedRules } };
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/** Extract animation keyframes, transitions, and tokens from CSS file. */
|
|
74
|
+
export async function runAnimationExtraction(cssFilePath, output) {
|
|
75
|
+
const rawCss = await fs.readFile(cssFilePath, 'utf-8');
|
|
76
|
+
const animData = await extractAnimations(rawCss);
|
|
77
|
+
if (animData.error) throw new Error(animData.error);
|
|
78
|
+
|
|
79
|
+
const animPath = path.join(output, 'animations.css');
|
|
80
|
+
await fs.writeFile(animPath, generateAnimationsCss(animData), 'utf-8');
|
|
81
|
+
const animTokens = generateAnimationTokens(animData);
|
|
82
|
+
const animTokensPath = path.join(output, 'animation-tokens.json');
|
|
83
|
+
await fs.writeFile(animTokensPath, JSON.stringify({ keyframes: animData.keyframes, transitions: animData.transitions, animatedElements: animData.animatedElements, summary: animTokens }, null, 2), 'utf-8');
|
|
84
|
+
logInfo(`Animations: ${animTokens.keyframeCount} keyframes, ${animTokens.transitions} transitions`);
|
|
85
|
+
return { path: path.resolve(animPath), tokensPath: path.resolve(animTokensPath), keyframeCount: animTokens.keyframeCount, transitionCount: animTokens.transitions, animatedElementCount: animTokens.animatedElements, tokens: animTokens };
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Run the full extraction pipeline: content counts, HTML, CSS, filter, animations.
|
|
90
|
+
* @param {import('playwright').Page} page
|
|
91
|
+
* @param {string} url - Page URL
|
|
92
|
+
* @param {string} output - Output directory
|
|
93
|
+
* @param {{extractHtml, extractCss, filterUnused, enhanceSemantic, extractAnimations: boolean}} opts
|
|
94
|
+
* @returns {Promise<Object>} extraction object with html/css/filtered/animations/warnings
|
|
95
|
+
*/
|
|
96
|
+
export async function runExtractionPipeline(page, url, output, opts) {
|
|
97
|
+
const { extractHtml, extractCss, filterUnused, enhanceSemantic, extractAnimations: extractAnimsFlag } = opts;
|
|
98
|
+
const extraction = { html: null, css: null, warnings: [] };
|
|
99
|
+
const extractionWarnings = [];
|
|
100
|
+
|
|
101
|
+
const progress = createProgress();
|
|
102
|
+
const stepCount = [extractHtml && 'content', extractHtml && 'html', extractCss && 'css', filterUnused && 'filter', extractAnimsFlag && 'animations'].filter(Boolean).length;
|
|
103
|
+
progress.start(stepCount, 'Extraction pipeline');
|
|
104
|
+
|
|
105
|
+
// Content counts (before HTML cleanup)
|
|
106
|
+
if (extractHtml) {
|
|
107
|
+
progress.step('Content counting');
|
|
108
|
+
try {
|
|
109
|
+
extraction.contentCounts = await runContentCounting(page, output);
|
|
110
|
+
} catch (error) {
|
|
111
|
+
extractionWarnings.push(`Content counting failed: ${error.message}`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
if (extractHtml) {
|
|
116
|
+
progress.step('HTML extraction');
|
|
117
|
+
try {
|
|
118
|
+
const r = await runHtmlExtraction(page, output, enhanceSemantic);
|
|
119
|
+
const { warnings, ...htmlData } = r;
|
|
120
|
+
extraction.html = htmlData;
|
|
121
|
+
if (warnings.length > 0) extractionWarnings.push(...warnings);
|
|
122
|
+
} catch (error) {
|
|
123
|
+
extraction.html = { error: error.message, failed: true };
|
|
124
|
+
extractionWarnings.push(`HTML extraction failed: ${error.message}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (extractCss) {
|
|
129
|
+
progress.step('CSS extraction');
|
|
130
|
+
try {
|
|
131
|
+
const r = await runCssExtraction(page, url, output);
|
|
132
|
+
const { warnings, ...cssData } = r;
|
|
133
|
+
extraction.css = cssData;
|
|
134
|
+
if (warnings.length > 0) extractionWarnings.push(...warnings);
|
|
135
|
+
if (r.corsBlocked.length > 0) extractionWarnings.push(`${r.corsBlocked.length} CORS-blocked stylesheets`);
|
|
136
|
+
} catch (error) {
|
|
137
|
+
extraction.css = { error: error.message, failed: true };
|
|
138
|
+
extractionWarnings.push(`CSS extraction failed: ${error.message}`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (filterUnused && extraction?.html?.path && extraction?.css?.path &&
|
|
143
|
+
!extraction.html.failed && !extraction.css.failed) {
|
|
144
|
+
progress.step('CSS filtering');
|
|
145
|
+
try {
|
|
146
|
+
extraction.filtered = await runCssFiltering(extraction.html.path, extraction.css.path, output, opts.aggressiveFilter);
|
|
147
|
+
} catch (error) {
|
|
148
|
+
extraction.filtered = { error: error.message, failed: true };
|
|
149
|
+
extractionWarnings.push(`CSS filtering failed: ${error.message}`);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Computed style gap-fill (opt-in via --extract-computed)
|
|
154
|
+
if (opts.extractComputed && extraction?.filtered?.path && !extraction.filtered.failed) {
|
|
155
|
+
try {
|
|
156
|
+
const { extractComputedGapFill } = await import('../css/computed-style-extractor.js');
|
|
157
|
+
const filteredCss = await fs.readFile(extraction.filtered.path, 'utf-8');
|
|
158
|
+
const computed = await extractComputedGapFill(page, filteredCss);
|
|
159
|
+
const computedPath = path.join(output, 'computed-gap.css');
|
|
160
|
+
await fs.writeFile(computedPath, computed.css, 'utf-8');
|
|
161
|
+
extraction.computedGap = { path: path.resolve(computedPath), ...computed.stats };
|
|
162
|
+
logInfo(`Computed gap-fill: ${computed.rules} rules for ${computed.stats.elementsAnalyzed} elements`);
|
|
163
|
+
} catch (error) {
|
|
164
|
+
extraction.computedGap = { error: error.message, failed: true };
|
|
165
|
+
extractionWarnings.push(`Computed style extraction failed: ${error.message}`);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
if (extractCss && extractAnimsFlag && extraction?.css?.path && !extraction.css.failed) {
|
|
170
|
+
progress.step('Animation extraction');
|
|
171
|
+
try {
|
|
172
|
+
extraction.animations = await runAnimationExtraction(extraction.css.path, output);
|
|
173
|
+
} catch (error) {
|
|
174
|
+
extraction.animations = { error: error.message, failed: true };
|
|
175
|
+
extractionWarnings.push(`Animation extraction failed: ${error.message}`);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
progress.complete(`${stepCount} steps completed`);
|
|
180
|
+
|
|
181
|
+
extraction.warnings = extractionWarnings;
|
|
182
|
+
if (extractionWarnings.length > 0) {
|
|
183
|
+
extractionWarnings.forEach(w => logWarn(w));
|
|
184
|
+
}
|
|
185
|
+
return extraction;
|
|
186
|
+
}
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Screenshot Helpers
|
|
3
|
+
*
|
|
4
|
+
* CLI argument parsing, browser lifecycle management, and image compression
|
|
5
|
+
* utilities used by the screenshot capture pipeline.
|
|
6
|
+
*
|
|
7
|
+
* @module screenshot-helpers
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import fs from 'fs/promises';
|
|
11
|
+
|
|
12
|
+
import { filterCssFile } from '../css/filter-css.js';
|
|
13
|
+
import { getBrowser, getPage, closeBrowser } from '../../utils/browser.js';
|
|
14
|
+
import { parseArgs, outputError } from '../../utils/helpers.js';
|
|
15
|
+
import { waitForPageReady } from '../page-prep/page-readiness.js';
|
|
16
|
+
import { dismissCookieBanner } from '../page-prep/cookie-handler.js';
|
|
17
|
+
import { VIEWPORTS } from '../../shared/viewports.js';
|
|
18
|
+
import { TIMING } from '../../shared/config.js';
|
|
19
|
+
|
|
20
|
+
export { filterCssFile, VIEWPORTS };
|
|
21
|
+
|
|
22
|
+
export const VIEWPORT_SETTLE_DELAY = TIMING.VIEWPORT_SETTLE_DELAY;
|
|
23
|
+
export const NETWORK_IDLE_TIMEOUT = TIMING.NETWORK_IDLE_TIMEOUT;
|
|
24
|
+
export const DEFAULT_SCROLL_DELAY = 1500;
|
|
25
|
+
|
|
26
|
+
// Try to import Sharp for compression
|
|
27
|
+
let sharp = null;
|
|
28
|
+
try {
|
|
29
|
+
sharp = (await import('sharp')).default;
|
|
30
|
+
} catch {
|
|
31
|
+
// Sharp not available
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Parse and validate screenshot command arguments
|
|
36
|
+
* @param {string[]} argv - Raw CLI arguments
|
|
37
|
+
* @returns {Object} Parsed and validated options
|
|
38
|
+
*/
|
|
39
|
+
export function parseScreenshotArgs(argv) {
|
|
40
|
+
const args = parseArgs(argv);
|
|
41
|
+
const { url, output } = args;
|
|
42
|
+
|
|
43
|
+
if (!url) {
|
|
44
|
+
outputError(new Error('--url is required'));
|
|
45
|
+
process.exit(1);
|
|
46
|
+
}
|
|
47
|
+
if (!output) {
|
|
48
|
+
outputError(new Error('--output directory is required'));
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const requestedViewports = args.viewports
|
|
53
|
+
? args.viewports.split(',').map(v => v.trim().toLowerCase())
|
|
54
|
+
: ['desktop', 'tablet', 'mobile'];
|
|
55
|
+
|
|
56
|
+
for (const vp of requestedViewports) {
|
|
57
|
+
if (!VIEWPORTS[vp]) {
|
|
58
|
+
outputError(new Error(`Invalid viewport: ${vp}. Valid: desktop, tablet, mobile`));
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return {
|
|
64
|
+
url: url,
|
|
65
|
+
output: output,
|
|
66
|
+
viewports: requestedViewports,
|
|
67
|
+
fullPage: args['full-page'] !== 'false',
|
|
68
|
+
maxSize: args['max-size'] ? parseFloat(args['max-size']) : 5,
|
|
69
|
+
scrollDelay: args['scroll-delay'] ? parseInt(args['scroll-delay'], 10) : DEFAULT_SCROLL_DELAY,
|
|
70
|
+
extractHtml: args['extract-html'] === 'true',
|
|
71
|
+
extractCss: args['extract-css'] === 'true',
|
|
72
|
+
filterUnused: args['filter-unused'] !== 'false',
|
|
73
|
+
captureHover: args['capture-hover'] === 'true',
|
|
74
|
+
captureVideo: args['video'] === 'true',
|
|
75
|
+
videoFormat: args['video-format'] || 'webm',
|
|
76
|
+
videoDuration: args['video-duration'] ? parseInt(args['video-duration'], 10) : 12000,
|
|
77
|
+
sectionMode: args['section-mode'] === 'true',
|
|
78
|
+
enhanceSemantic: args['no-semantic'] !== 'true',
|
|
79
|
+
extractAnimations: args['extract-animations'] !== 'false',
|
|
80
|
+
detectBreakpoints: args['detect-breakpoints'] === 'true',
|
|
81
|
+
extractComputed: args['extract-computed'] === 'true',
|
|
82
|
+
aggressiveFilter: args['aggressive-filter'],
|
|
83
|
+
headless: args.headless === 'true',
|
|
84
|
+
close: args.close === 'true'
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Create browser manager for handling browser lifecycle
|
|
90
|
+
* @param {boolean} cliHeadless - CLI headless flag
|
|
91
|
+
* @returns {Object} Browser manager with init/cleanup methods
|
|
92
|
+
*/
|
|
93
|
+
export function createBrowserManager(cliHeadless) {
|
|
94
|
+
let browser = null;
|
|
95
|
+
let page = null;
|
|
96
|
+
let currentHeadless = null;
|
|
97
|
+
let cookieResult = null;
|
|
98
|
+
|
|
99
|
+
const getHeadlessForViewport = (viewport) => viewport === 'desktop' ? true : cliHeadless;
|
|
100
|
+
|
|
101
|
+
const init = async (headless, navigateUrl = null) => {
|
|
102
|
+
if (browser && currentHeadless !== headless) {
|
|
103
|
+
await closeBrowser();
|
|
104
|
+
browser = null;
|
|
105
|
+
page = null;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (!browser) {
|
|
109
|
+
browser = await getBrowser({
|
|
110
|
+
headless,
|
|
111
|
+
args: headless ? [] : ['--start-maximized', '--window-position=0,0']
|
|
112
|
+
});
|
|
113
|
+
page = await getPage(browser);
|
|
114
|
+
currentHeadless = headless;
|
|
115
|
+
|
|
116
|
+
if (navigateUrl) {
|
|
117
|
+
await page.setViewportSize(VIEWPORTS.desktop);
|
|
118
|
+
await page.goto(navigateUrl, { waitUntil: 'domcontentloaded', timeout: 90000 });
|
|
119
|
+
await new Promise(r => setTimeout(r, 3000));
|
|
120
|
+
cookieResult = await dismissCookieBanner(page);
|
|
121
|
+
await waitForPageReady(page);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
return { browser, page };
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
return {
|
|
128
|
+
init,
|
|
129
|
+
getHeadlessForViewport,
|
|
130
|
+
getPage: () => page,
|
|
131
|
+
getCookieResult: () => cookieResult,
|
|
132
|
+
getCurrentHeadless: () => currentHeadless,
|
|
133
|
+
setCurrentHeadless: (val) => { currentHeadless = val; }
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Compress image if it exceeds max size
|
|
139
|
+
* @param {string} filePath - Path to the image file
|
|
140
|
+
* @param {number} [maxSizeMB=5] - Max size in MB
|
|
141
|
+
* @returns {Promise<Object>} Compression result
|
|
142
|
+
*/
|
|
143
|
+
export async function compressIfNeeded(filePath, maxSizeMB = 5) {
|
|
144
|
+
const stats = await fs.stat(filePath);
|
|
145
|
+
const originalSize = stats.size;
|
|
146
|
+
const maxBytes = maxSizeMB * 1024 * 1024;
|
|
147
|
+
|
|
148
|
+
if (originalSize <= maxBytes || !sharp) {
|
|
149
|
+
return { compressed: false, originalSize, finalSize: originalSize };
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
try {
|
|
153
|
+
const buffer = await fs.readFile(filePath);
|
|
154
|
+
const meta = await sharp(buffer).metadata();
|
|
155
|
+
|
|
156
|
+
const newWidth = Math.round(meta.width * 0.85);
|
|
157
|
+
let output = await sharp(buffer)
|
|
158
|
+
.resize(newWidth)
|
|
159
|
+
.png({ quality: 80, compressionLevel: 9 })
|
|
160
|
+
.toBuffer();
|
|
161
|
+
|
|
162
|
+
if (output.length > maxBytes) {
|
|
163
|
+
const smallerWidth = Math.round(meta.width * 0.7);
|
|
164
|
+
output = await sharp(buffer)
|
|
165
|
+
.resize(smallerWidth)
|
|
166
|
+
.png({ quality: 70, compressionLevel: 9 })
|
|
167
|
+
.toBuffer();
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
await fs.writeFile(filePath, output);
|
|
171
|
+
return { compressed: true, originalSize, finalSize: output.length };
|
|
172
|
+
} catch (err) {
|
|
173
|
+
return { compressed: false, originalSize, finalSize: originalSize, error: err.message };
|
|
174
|
+
}
|
|
175
|
+
}
|