bertui 1.1.6 → 1.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +164 -292
- package/index.js +53 -31
- package/package.json +49 -31
- package/src/build/image-optimizer.js +102 -51
- package/src/client/fast-refresh.js +72 -0
- package/src/client/hmr-runtime.js +59 -0
- package/src/compiler/index.js +25 -0
- package/src/compiler/router-generator-pure.js +104 -0
- package/src/compiler/transform.js +149 -0
- package/src/config/index.js +2 -0
- package/src/css/index.js +46 -0
- package/src/css/processor.js +127 -0
- package/src/image-optimizer/index.js +76 -0
- package/src/images/index.js +102 -0
- package/src/images/processor.js +169 -0
- package/src/router/index.js +3 -0
- package/src/server/dev-handler.js +254 -0
- package/src/server/dev-server-utils.js +289 -0
- package/src/server/dev-server.js +10 -456
- package/src/server/hmr-handler.js +148 -0
- package/src/server/index.js +3 -0
- package/src/server/request-handler.js +36 -0
- package/src/server-islands/extractor.js +198 -0
- package/src/server-islands/index.js +59 -0
- package/src/utils/index.js +11 -0
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
// bertui/src/compiler/transform.js - NEW FILE
|
|
2
|
+
// PURE JSX/TSX transformation function - NO FILE SYSTEM
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Transform JSX/TSX code to JavaScript
|
|
6
|
+
* @param {string} sourceCode - The source code to transform
|
|
7
|
+
* @param {Object} options - Transformation options
|
|
8
|
+
* @param {string} options.loader - 'jsx', 'tsx', 'ts', 'js' (default: 'tsx')
|
|
9
|
+
* @param {string} options.env - 'development' or 'production' (default: 'development')
|
|
10
|
+
* @param {boolean} options.addReactImport - Automatically add React import if missing (default: true)
|
|
11
|
+
* @returns {Promise<string>} Transformed JavaScript code
|
|
12
|
+
*/
|
|
13
|
+
export async function transformJSX(sourceCode, options = {}) {
|
|
14
|
+
const {
|
|
15
|
+
loader = 'tsx',
|
|
16
|
+
env = 'development',
|
|
17
|
+
addReactImport = true
|
|
18
|
+
} = options;
|
|
19
|
+
|
|
20
|
+
// Skip transformation if it's plain JS without JSX
|
|
21
|
+
if (loader === 'js' && !sourceCode.includes('React.createElement') && !/<[A-Z]/.test(sourceCode)) {
|
|
22
|
+
return sourceCode;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
// Create Bun transpiler instance
|
|
27
|
+
const transpiler = new Bun.Transpiler({
|
|
28
|
+
loader,
|
|
29
|
+
target: 'browser',
|
|
30
|
+
define: {
|
|
31
|
+
'process.env.NODE_ENV': JSON.stringify(env)
|
|
32
|
+
},
|
|
33
|
+
tsconfig: {
|
|
34
|
+
compilerOptions: {
|
|
35
|
+
jsx: 'react',
|
|
36
|
+
jsxFactory: 'React.createElement',
|
|
37
|
+
jsxFragmentFactory: 'React.Fragment',
|
|
38
|
+
target: 'ES2020',
|
|
39
|
+
module: 'ESNext'
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
let transformed = await transpiler.transform(sourceCode);
|
|
45
|
+
|
|
46
|
+
// Add React import if needed and not already present
|
|
47
|
+
if (addReactImport &&
|
|
48
|
+
!transformed.includes('import React') &&
|
|
49
|
+
!transformed.includes('import * as React') &&
|
|
50
|
+
(transformed.includes('React.createElement') || transformed.includes('jsx(') || transformed.includes('jsxs('))) {
|
|
51
|
+
transformed = `import React from 'react';\n${transformed}`;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Clean up any dev JSX references
|
|
55
|
+
if (env === 'production') {
|
|
56
|
+
transformed = transformed.replace(/jsxDEV/g, 'jsx');
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return transformed;
|
|
60
|
+
|
|
61
|
+
} catch (error) {
|
|
62
|
+
throw new Error(`JSX transformation failed: ${error.message}`);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Synchronous version of transformJSX
|
|
68
|
+
* Use only when you know the code is small and you need sync execution
|
|
69
|
+
*/
|
|
70
|
+
export function transformJSXSync(sourceCode, options = {}) {
|
|
71
|
+
const {
|
|
72
|
+
loader = 'tsx',
|
|
73
|
+
env = 'development',
|
|
74
|
+
addReactImport = true
|
|
75
|
+
} = options;
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
const transpiler = new Bun.Transpiler({
|
|
79
|
+
loader,
|
|
80
|
+
target: 'browser',
|
|
81
|
+
define: {
|
|
82
|
+
'process.env.NODE_ENV': JSON.stringify(env)
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
let transformed = transpiler.transformSync(sourceCode);
|
|
87
|
+
|
|
88
|
+
if (addReactImport &&
|
|
89
|
+
!transformed.includes('import React') &&
|
|
90
|
+
!transformed.includes('import * as React') &&
|
|
91
|
+
(transformed.includes('React.createElement') || transformed.includes('jsx('))) {
|
|
92
|
+
transformed = `import React from 'react';\n${transformed}`;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (env === 'production') {
|
|
96
|
+
transformed = transformed.replace(/jsxDEV/g, 'jsx');
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return transformed;
|
|
100
|
+
|
|
101
|
+
} catch (error) {
|
|
102
|
+
throw new Error(`JSX transformation failed: ${error.message}`);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Check if code contains JSX syntax
|
|
108
|
+
*/
|
|
109
|
+
export function containsJSX(code) {
|
|
110
|
+
return code.includes('React.createElement') ||
|
|
111
|
+
code.includes('React.Fragment') ||
|
|
112
|
+
/<[A-Z]/.test(code) ||
|
|
113
|
+
code.includes('jsx(') ||
|
|
114
|
+
code.includes('jsxs(') ||
|
|
115
|
+
/<[a-z][a-z0-9]*\s/.test(code);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Remove CSS imports from code (for production builds)
|
|
120
|
+
*/
|
|
121
|
+
export function removeCSSImports(code) {
|
|
122
|
+
return code
|
|
123
|
+
.replace(/import\s+['"][^'"]*\.css['"];?\s*/g, '')
|
|
124
|
+
.replace(/import\s+['"]bertui\/styles['"]\s*;?\s*/g, '');
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Remove dotenv imports (for browser)
|
|
129
|
+
*/
|
|
130
|
+
export function removeDotenvImports(code) {
|
|
131
|
+
return code
|
|
132
|
+
.replace(/import\s+\w+\s+from\s+['"]dotenv['"]\s*;?\s*/g, '')
|
|
133
|
+
.replace(/import\s+\{[^}]+\}\s+from\s+['"]dotenv['"]\s*;?\s*/g, '')
|
|
134
|
+
.replace(/\w+\.config\(\s*\)\s*;?\s*/g, '');
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Fix relative imports to include .js extension
|
|
139
|
+
*/
|
|
140
|
+
export function fixRelativeImports(code) {
|
|
141
|
+
const importRegex = /from\s+['"](\.\.?\/[^'"]+?)(?<!\.js|\.jsx|\.ts|\.tsx|\.json)['"]/g;
|
|
142
|
+
|
|
143
|
+
return code.replace(importRegex, (match, path) => {
|
|
144
|
+
if (path.endsWith('/') || /\.\w+$/.test(path)) {
|
|
145
|
+
return match;
|
|
146
|
+
}
|
|
147
|
+
return `from '${path}.js'`;
|
|
148
|
+
});
|
|
149
|
+
}
|
package/src/css/index.js
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
// bertui/src/css/index.js - NEW FILE
|
|
2
|
+
// PURE CSS processing - no server
|
|
3
|
+
|
|
4
|
+
import { transform } from 'lightningcss';
|
|
5
|
+
import logger from '../logger/logger.js';
|
|
6
|
+
|
|
7
|
+
export async function minifyCSS(css, options = {}) {
|
|
8
|
+
try {
|
|
9
|
+
const { code } = transform({
|
|
10
|
+
filename: options.filename || 'style.css',
|
|
11
|
+
code: Buffer.from(css),
|
|
12
|
+
minify: true,
|
|
13
|
+
sourceMap: false,
|
|
14
|
+
targets: {
|
|
15
|
+
chrome: 90 << 16,
|
|
16
|
+
firefox: 88 << 16,
|
|
17
|
+
safari: 14 << 16,
|
|
18
|
+
edge: 90 << 16
|
|
19
|
+
},
|
|
20
|
+
drafts: {
|
|
21
|
+
nesting: true
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
return code.toString();
|
|
25
|
+
} catch (error) {
|
|
26
|
+
logger.warn(`CSS minification failed: ${error.message}`);
|
|
27
|
+
// Fallback minification
|
|
28
|
+
return css
|
|
29
|
+
.replace(/\/\*[\s\S]*?\*\//g, '')
|
|
30
|
+
.replace(/\s+/g, ' ')
|
|
31
|
+
.replace(/\s*([{}:;,])\s*/g, '$1')
|
|
32
|
+
.replace(/;}/g, '}')
|
|
33
|
+
.trim();
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function combineCSS(files) {
|
|
38
|
+
return files.map(({ filename, content }) =>
|
|
39
|
+
`/* ${filename} */\n${content}`
|
|
40
|
+
).join('\n\n');
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
export { minifyCSS, minifyCSSSync, combineCSS, extractCSSImports, isCSSFile } from './processor.js';
|
|
45
|
+
export { buildAllCSS } from '../build/processors/css-builder.js';
|
|
46
|
+
export { buildCSS, copyCSS } from '../build/css-builder.js';
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
// bertui/src/css/processor.js - PURE CSS PROCESSING
|
|
2
|
+
import { transform } from 'lightningcss';
|
|
3
|
+
import logger from '../logger/logger.js';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Minify CSS using Lightning CSS with fallback
|
|
7
|
+
*/
|
|
8
|
+
export async function minifyCSS(css, options = {}) {
|
|
9
|
+
const {
|
|
10
|
+
filename = 'style.css',
|
|
11
|
+
minify = true,
|
|
12
|
+
sourceMap = false,
|
|
13
|
+
targets = {
|
|
14
|
+
chrome: 90 << 16,
|
|
15
|
+
firefox: 88 << 16,
|
|
16
|
+
safari: 14 << 16,
|
|
17
|
+
edge: 90 << 16
|
|
18
|
+
}
|
|
19
|
+
} = options;
|
|
20
|
+
|
|
21
|
+
// Empty CSS
|
|
22
|
+
if (!css || css.trim() === '') {
|
|
23
|
+
return '/* Empty CSS */';
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
try {
|
|
27
|
+
const { code } = transform({
|
|
28
|
+
filename,
|
|
29
|
+
code: Buffer.from(css),
|
|
30
|
+
minify,
|
|
31
|
+
sourceMap,
|
|
32
|
+
targets,
|
|
33
|
+
drafts: {
|
|
34
|
+
nesting: true
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
return code.toString();
|
|
39
|
+
} catch (error) {
|
|
40
|
+
logger.warn(`Lightning CSS failed: ${error.message}, using fallback minifier`);
|
|
41
|
+
return fallbackMinifyCSS(css);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Synchronous version for build scripts
|
|
47
|
+
*/
|
|
48
|
+
export function minifyCSSSync(css, options = {}) {
|
|
49
|
+
const {
|
|
50
|
+
filename = 'style.css',
|
|
51
|
+
minify = true,
|
|
52
|
+
sourceMap = false
|
|
53
|
+
} = options;
|
|
54
|
+
|
|
55
|
+
if (!css || css.trim() === '') {
|
|
56
|
+
return '/* Empty CSS */';
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
try {
|
|
60
|
+
const { code } = transform({
|
|
61
|
+
filename,
|
|
62
|
+
code: Buffer.from(css),
|
|
63
|
+
minify,
|
|
64
|
+
sourceMap,
|
|
65
|
+
drafts: {
|
|
66
|
+
nesting: true
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
return code.toString();
|
|
70
|
+
} catch (error) {
|
|
71
|
+
return fallbackMinifyCSS(css);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Combine multiple CSS files into one
|
|
77
|
+
*/
|
|
78
|
+
export function combineCSS(files) {
|
|
79
|
+
if (!Array.isArray(files)) {
|
|
80
|
+
throw new Error('combineCSS expects an array of {filename, content}');
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return files.map(({ filename, content }) => {
|
|
84
|
+
// Add file comment for debugging
|
|
85
|
+
const header = `/* ${filename} */\n`;
|
|
86
|
+
return header + content;
|
|
87
|
+
}).join('\n\n');
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Fallback CSS minifier (simple but works)
|
|
92
|
+
*/
|
|
93
|
+
function fallbackMinifyCSS(css) {
|
|
94
|
+
return css
|
|
95
|
+
// Remove comments
|
|
96
|
+
.replace(/\/\*[\s\S]*?\*\//g, '')
|
|
97
|
+
// Remove whitespace
|
|
98
|
+
.replace(/\s+/g, ' ')
|
|
99
|
+
// Remove space around { } : ; ,
|
|
100
|
+
.replace(/\s*([{}:;,])\s*/g, '$1')
|
|
101
|
+
// Remove last semicolon before }
|
|
102
|
+
.replace(/;}/g, '}')
|
|
103
|
+
// Remove leading/trailing space
|
|
104
|
+
.trim();
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Extract CSS imports from JavaScript
|
|
109
|
+
*/
|
|
110
|
+
export function extractCSSImports(code) {
|
|
111
|
+
const imports = [];
|
|
112
|
+
const regex = /import\s+['"]([^'"]*\.css)['"];?/g;
|
|
113
|
+
let match;
|
|
114
|
+
|
|
115
|
+
while ((match = regex.exec(code)) !== null) {
|
|
116
|
+
imports.push(match[1]);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return imports;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Check if file is CSS
|
|
124
|
+
*/
|
|
125
|
+
export function isCSSFile(filename) {
|
|
126
|
+
return filename.toLowerCase().endsWith('.css');
|
|
127
|
+
}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
// 3. src/image-optimizer/index.js - USE OXIPNG BINARY
|
|
2
|
+
import { exec } from 'child_process';
|
|
3
|
+
import { promisify } from 'util';
|
|
4
|
+
import { join } from 'path';
|
|
5
|
+
import { tmpdir } from 'os';
|
|
6
|
+
import { writeFile, readFile, unlink } from 'fs/promises';
|
|
7
|
+
|
|
8
|
+
const execAsync = promisify(exec);
|
|
9
|
+
|
|
10
|
+
export async function optimizeImage(buffer, options = {}) {
|
|
11
|
+
const { format = 'auto', quality = 3 } = options;
|
|
12
|
+
|
|
13
|
+
// Detect format
|
|
14
|
+
const detectedFormat = format === 'auto' ? detectFormat(buffer) : format;
|
|
15
|
+
|
|
16
|
+
// Only PNG supported for now
|
|
17
|
+
if (detectedFormat === 'png') {
|
|
18
|
+
return await optimizePNG(buffer, quality);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Fallback to copy
|
|
22
|
+
return fallbackOptimize(buffer, detectedFormat);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async function optimizePNG(buffer, level = 3) {
|
|
26
|
+
const original_size = buffer.byteLength;
|
|
27
|
+
|
|
28
|
+
// Write temp file
|
|
29
|
+
const tmpPath = join(tmpdir(), `bertui-${Date.now()}.png`);
|
|
30
|
+
const outPath = join(tmpdir(), `bertui-${Date.now()}-opt.png`);
|
|
31
|
+
|
|
32
|
+
try {
|
|
33
|
+
await writeFile(tmpPath, Buffer.from(buffer));
|
|
34
|
+
|
|
35
|
+
// Run oxipng binary
|
|
36
|
+
await execAsync(`oxipng -o ${level} -s "${tmpPath}" -o "${outPath}"`);
|
|
37
|
+
|
|
38
|
+
// Read optimized file
|
|
39
|
+
const optimized = await readFile(outPath);
|
|
40
|
+
const optimized_size = optimized.length;
|
|
41
|
+
const savings_percent = ((original_size - optimized_size) / original_size * 100).toFixed(1);
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
data: optimized.buffer,
|
|
45
|
+
original_size,
|
|
46
|
+
optimized_size,
|
|
47
|
+
format: 'png',
|
|
48
|
+
savings_percent: parseFloat(savings_percent)
|
|
49
|
+
};
|
|
50
|
+
} finally {
|
|
51
|
+
// Cleanup
|
|
52
|
+
await unlink(tmpPath).catch(() => {});
|
|
53
|
+
await unlink(outPath).catch(() => {});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function fallbackOptimize(buffer, format) {
|
|
58
|
+
return {
|
|
59
|
+
data: buffer,
|
|
60
|
+
original_size: buffer.byteLength,
|
|
61
|
+
optimized_size: buffer.byteLength,
|
|
62
|
+
format,
|
|
63
|
+
savings_percent: 0
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function detectFormat(buffer) {
|
|
68
|
+
if (buffer.length >= 8) {
|
|
69
|
+
const pngSig = [0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A];
|
|
70
|
+
if (pngSig.every((b, i) => buffer[i] === b)) return 'png';
|
|
71
|
+
}
|
|
72
|
+
return 'unknown';
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export const hasWasm = () => false;
|
|
76
|
+
export const version = '1.1.7';
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
// bertui/src/images/index.js
|
|
2
|
+
// PURE image handling - NO WASM, NO SHARP, just copy (FALLBACK)
|
|
3
|
+
|
|
4
|
+
import { join, extname } from 'path';
|
|
5
|
+
import { cpSync, mkdirSync, existsSync, readdirSync } from 'fs';
|
|
6
|
+
import logger from '../logger/logger.js';
|
|
7
|
+
|
|
8
|
+
export const IMAGE_EXTENSIONS = [
|
|
9
|
+
'.png', '.jpg', '.jpeg', '.webp', '.gif', '.svg',
|
|
10
|
+
'.avif', '.ico', '.bmp', '.tiff', '.tif'
|
|
11
|
+
];
|
|
12
|
+
|
|
13
|
+
export function isImageFile(filename) {
|
|
14
|
+
const ext = extname(filename).toLowerCase();
|
|
15
|
+
return IMAGE_EXTENSIONS.includes(ext);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function copyImagesSync(srcDir, destDir) {
|
|
19
|
+
let copied = 0;
|
|
20
|
+
let skipped = 0;
|
|
21
|
+
|
|
22
|
+
if (!existsSync(srcDir)) return { copied, skipped };
|
|
23
|
+
|
|
24
|
+
mkdirSync(destDir, { recursive: true });
|
|
25
|
+
|
|
26
|
+
function process(dir, targetDir) {
|
|
27
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
28
|
+
|
|
29
|
+
for (const entry of entries) {
|
|
30
|
+
const srcPath = join(dir, entry.name);
|
|
31
|
+
const destPath = join(targetDir, entry.name);
|
|
32
|
+
|
|
33
|
+
if (entry.isDirectory()) {
|
|
34
|
+
const subDest = join(targetDir, entry.name);
|
|
35
|
+
mkdirSync(subDest, { recursive: true });
|
|
36
|
+
process(srcPath, subDest);
|
|
37
|
+
} else if (entry.isFile() && isImageFile(entry.name)) {
|
|
38
|
+
try {
|
|
39
|
+
cpSync(srcPath, destPath);
|
|
40
|
+
copied++;
|
|
41
|
+
} catch (error) {
|
|
42
|
+
logger.warn(`Failed to copy ${entry.name}: ${error.message}`);
|
|
43
|
+
skipped++;
|
|
44
|
+
}
|
|
45
|
+
} else {
|
|
46
|
+
skipped++;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
process(srcDir, destDir);
|
|
52
|
+
return { copied, skipped };
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export function getImageContentType(ext) {
|
|
56
|
+
const types = {
|
|
57
|
+
'.jpg': 'image/jpeg',
|
|
58
|
+
'.jpeg': 'image/jpeg',
|
|
59
|
+
'.png': 'image/png',
|
|
60
|
+
'.gif': 'image/gif',
|
|
61
|
+
'.svg': 'image/svg+xml',
|
|
62
|
+
'.webp': 'image/webp',
|
|
63
|
+
'.avif': 'image/avif',
|
|
64
|
+
'.ico': 'image/x-icon'
|
|
65
|
+
};
|
|
66
|
+
return types[ext.toLowerCase()] || 'application/octet-stream';
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export function getImageFiles(dir, baseDir = dir) {
|
|
70
|
+
const images = [];
|
|
71
|
+
if (!existsSync(dir)) return images;
|
|
72
|
+
|
|
73
|
+
function scan(directory) {
|
|
74
|
+
const entries = readdirSync(directory, { withFileTypes: true });
|
|
75
|
+
for (const entry of entries) {
|
|
76
|
+
const fullPath = join(directory, entry.name);
|
|
77
|
+
const relativePath = fullPath.replace(baseDir, '').replace(/^[\/\\]/, '');
|
|
78
|
+
if (entry.isDirectory()) {
|
|
79
|
+
scan(fullPath);
|
|
80
|
+
} else if (entry.isFile() && isImageFile(entry.name)) {
|
|
81
|
+
images.push({ path: fullPath, relativePath, filename: entry.name });
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
scan(dir);
|
|
86
|
+
return images;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
export function getTotalImageSize(images) {
|
|
90
|
+
return images.reduce((total, img) => total + img.size, 0);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export function formatBytes(bytes) {
|
|
94
|
+
if (bytes === 0) return '0 B';
|
|
95
|
+
const k = 1024;
|
|
96
|
+
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
97
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
98
|
+
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Legacy exports
|
|
102
|
+
export { optimizeImages, copyImages } from '../build/image-optimizer.js';
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
// bertui/src/images/processor.js - PURE IMAGE PROCESSING
|
|
2
|
+
import { join, extname } from 'path';
|
|
3
|
+
import { cpSync, mkdirSync, existsSync, readdirSync, statSync } from 'fs';
|
|
4
|
+
import logger from '../logger/logger.js';
|
|
5
|
+
|
|
6
|
+
// All supported image formats
|
|
7
|
+
export const IMAGE_EXTENSIONS = [
|
|
8
|
+
'.png', '.jpg', '.jpeg', '.webp', '.gif', '.svg',
|
|
9
|
+
'.avif', '.ico', '.bmp', '.tiff', '.tif'
|
|
10
|
+
];
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Check if a file is an image based on extension
|
|
14
|
+
*/
|
|
15
|
+
export function isImageFile(filename) {
|
|
16
|
+
if (!filename) return false;
|
|
17
|
+
const ext = extname(filename).toLowerCase();
|
|
18
|
+
return IMAGE_EXTENSIONS.includes(ext);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Get image content type for HTTP headers
|
|
23
|
+
*/
|
|
24
|
+
export function getImageContentType(ext) {
|
|
25
|
+
const types = {
|
|
26
|
+
'.jpg': 'image/jpeg',
|
|
27
|
+
'.jpeg': 'image/jpeg',
|
|
28
|
+
'.png': 'image/png',
|
|
29
|
+
'.gif': 'image/gif',
|
|
30
|
+
'.svg': 'image/svg+xml',
|
|
31
|
+
'.webp': 'image/webp',
|
|
32
|
+
'.avif': 'image/avif',
|
|
33
|
+
'.ico': 'image/x-icon',
|
|
34
|
+
'.bmp': 'image/bmp',
|
|
35
|
+
'.tiff': 'image/tiff',
|
|
36
|
+
'.tif': 'image/tiff'
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
return types[ext.toLowerCase()] || 'application/octet-stream';
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Copy images synchronously from source to destination
|
|
44
|
+
*/
|
|
45
|
+
export function copyImagesSync(srcDir, destDir, options = {}) {
|
|
46
|
+
const {
|
|
47
|
+
verbose = false,
|
|
48
|
+
overwrite = true,
|
|
49
|
+
filter = null
|
|
50
|
+
} = options;
|
|
51
|
+
|
|
52
|
+
let copied = 0;
|
|
53
|
+
let skipped = 0;
|
|
54
|
+
let failed = 0;
|
|
55
|
+
|
|
56
|
+
if (!existsSync(srcDir)) {
|
|
57
|
+
logger.warn(`Source directory not found: ${srcDir}`);
|
|
58
|
+
return { copied, skipped, failed };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Create destination directory
|
|
62
|
+
mkdirSync(destDir, { recursive: true });
|
|
63
|
+
|
|
64
|
+
function copyDir(dir, targetDir) {
|
|
65
|
+
try {
|
|
66
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
67
|
+
|
|
68
|
+
for (const entry of entries) {
|
|
69
|
+
const srcPath = join(dir, entry.name);
|
|
70
|
+
const destPath = join(targetDir, entry.name);
|
|
71
|
+
|
|
72
|
+
// Skip if filtered
|
|
73
|
+
if (filter && !filter(srcPath)) {
|
|
74
|
+
skipped++;
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if (entry.isDirectory()) {
|
|
79
|
+
// Recursively copy subdirectories
|
|
80
|
+
const subDest = join(targetDir, entry.name);
|
|
81
|
+
mkdirSync(subDest, { recursive: true });
|
|
82
|
+
copyDir(srcPath, subDest);
|
|
83
|
+
}
|
|
84
|
+
else if (entry.isFile() && isImageFile(entry.name)) {
|
|
85
|
+
try {
|
|
86
|
+
// Check if destination exists and we should overwrite
|
|
87
|
+
if (!overwrite && existsSync(destPath)) {
|
|
88
|
+
skipped++;
|
|
89
|
+
if (verbose) logger.debug(`Skipped existing: ${entry.name}`);
|
|
90
|
+
continue;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
cpSync(srcPath, destPath);
|
|
94
|
+
copied++;
|
|
95
|
+
|
|
96
|
+
if (verbose) {
|
|
97
|
+
const srcSize = statSync(srcPath).size;
|
|
98
|
+
logger.debug(`Copied: ${entry.name} (${(srcSize / 1024).toFixed(2)}KB)`);
|
|
99
|
+
}
|
|
100
|
+
} catch (error) {
|
|
101
|
+
failed++;
|
|
102
|
+
logger.warn(`Failed to copy ${entry.name}: ${error.message}`);
|
|
103
|
+
}
|
|
104
|
+
} else {
|
|
105
|
+
skipped++;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
} catch (error) {
|
|
109
|
+
logger.error(`Error processing ${dir}: ${error.message}`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
copyDir(srcDir, destDir);
|
|
114
|
+
|
|
115
|
+
return { copied, skipped, failed };
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Get all image files from a directory recursively
|
|
120
|
+
*/
|
|
121
|
+
export function getImageFiles(dir, baseDir = dir) {
|
|
122
|
+
const images = [];
|
|
123
|
+
|
|
124
|
+
if (!existsSync(dir)) {
|
|
125
|
+
return images;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
function scan(directory) {
|
|
129
|
+
const entries = readdirSync(directory, { withFileTypes: true });
|
|
130
|
+
|
|
131
|
+
for (const entry of entries) {
|
|
132
|
+
const fullPath = join(directory, entry.name);
|
|
133
|
+
const relativePath = fullPath.replace(baseDir, '').replace(/^[\/\\]/, '');
|
|
134
|
+
|
|
135
|
+
if (entry.isDirectory()) {
|
|
136
|
+
scan(fullPath);
|
|
137
|
+
} else if (entry.isFile() && isImageFile(entry.name)) {
|
|
138
|
+
images.push({
|
|
139
|
+
path: fullPath,
|
|
140
|
+
relativePath,
|
|
141
|
+
filename: entry.name,
|
|
142
|
+
size: statSync(fullPath).size,
|
|
143
|
+
ext: extname(entry.name).toLowerCase()
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
scan(dir);
|
|
150
|
+
return images;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Calculate total size of images
|
|
155
|
+
*/
|
|
156
|
+
export function getTotalImageSize(images) {
|
|
157
|
+
return images.reduce((total, img) => total + img.size, 0);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Format bytes to human readable
|
|
162
|
+
*/
|
|
163
|
+
export function formatBytes(bytes) {
|
|
164
|
+
if (bytes === 0) return '0 B';
|
|
165
|
+
const k = 1024;
|
|
166
|
+
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
167
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
168
|
+
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
|
|
169
|
+
}
|