rn-remove-image-bg 0.0.15 → 0.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/ImageProcessing.web.d.ts +3 -6
- package/lib/ImageProcessing.web.js +187 -144
- package/package.json +1 -1
- package/src/ImageProcessing.web.ts +193 -164
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Web implementation using
|
|
3
|
-
*
|
|
4
|
-
* Loads from CDN to bypass Metro bundler issues.
|
|
2
|
+
* Web implementation using Inline Web Worker & WebGPU.
|
|
3
|
+
* Moves all heavy processing to a background thread to prevent UI freezing.
|
|
4
|
+
* Loads @huggingface/transformers from CDN to bypass Metro bundler issues.
|
|
5
5
|
*/
|
|
6
6
|
export type OutputFormat = 'PNG' | 'WEBP';
|
|
7
7
|
export interface RemoveBgImageOptions {
|
|
@@ -10,9 +10,6 @@ export interface RemoveBgImageOptions {
|
|
|
10
10
|
onProgress?: (progress: number) => void;
|
|
11
11
|
debug?: boolean;
|
|
12
12
|
}
|
|
13
|
-
/**
|
|
14
|
-
* Remove background from image
|
|
15
|
-
*/
|
|
16
13
|
export declare function removeBgImage(uri: string, options?: RemoveBgImageOptions): Promise<string>;
|
|
17
14
|
export declare const removeBackground: typeof removeBgImage;
|
|
18
15
|
export interface CompressImageOptions {
|
|
@@ -1,152 +1,194 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Web implementation using
|
|
3
|
-
*
|
|
4
|
-
* Loads from CDN to bypass Metro bundler issues.
|
|
2
|
+
* Web implementation using Inline Web Worker & WebGPU.
|
|
3
|
+
* Moves all heavy processing to a background thread to prevent UI freezing.
|
|
4
|
+
* Loads @huggingface/transformers from CDN to bypass Metro bundler issues.
|
|
5
5
|
*/
|
|
6
|
-
//
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
let
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
async
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
6
|
+
// ==========================================
|
|
7
|
+
// INLINE WORKER CODE (Run in background)
|
|
8
|
+
// ==========================================
|
|
9
|
+
const WORKER_CODE = `
|
|
10
|
+
let pipeline = null;
|
|
11
|
+
let env = null;
|
|
12
|
+
|
|
13
|
+
// Helper to load image bitmap
|
|
14
|
+
async function loadImageBitmapFromUrl(url) {
|
|
15
|
+
const response = await fetch(url);
|
|
16
|
+
const blob = await response.blob();
|
|
17
|
+
return await createImageBitmap(blob);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
self.onmessage = async (e) => {
|
|
21
|
+
const { id, type, payload } = e.data;
|
|
22
|
+
|
|
23
|
+
try {
|
|
24
|
+
if (type === 'init') {
|
|
25
|
+
if (!pipeline) {
|
|
26
|
+
// Dynamic import from CDN
|
|
27
|
+
const transformers = await import('https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.3.0/+esm');
|
|
28
|
+
env = transformers.env;
|
|
29
|
+
|
|
30
|
+
// Configure environment
|
|
31
|
+
env.allowLocalModels = false;
|
|
32
|
+
env.useBrowserCache = true;
|
|
33
|
+
// Try WebGPU, fallback to WASM
|
|
34
|
+
// env.backends.onnx.wasm.numThreads = 1; // Limit threads if needed
|
|
35
|
+
|
|
36
|
+
pipeline = await transformers.pipeline('image-segmentation', 'briaai/RMBG-1.4', {
|
|
37
|
+
device: 'webgpu', // Attempt WebGPU first
|
|
38
|
+
dtype: 'q8', // Quantized for speed
|
|
36
39
|
progress_callback: (info) => {
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
40
|
+
self.postMessage({ id, type: 'progress', payload: info });
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
self.postMessage({ id, type: 'success', payload: true });
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (type === 'removeBg') {
|
|
49
|
+
const { uri, format, quality } = payload;
|
|
50
|
+
|
|
51
|
+
if (!pipeline) throw new Error('Pipeline not initialized');
|
|
52
|
+
|
|
53
|
+
// 1. Run Inference
|
|
54
|
+
const results = await pipeline(uri);
|
|
55
|
+
const result = Array.isArray(results) ? results[0] : results;
|
|
56
|
+
if (!result || !result.mask) throw new Error('No mask generated');
|
|
57
|
+
|
|
58
|
+
// 2. Apply Mask (Pixel Manipulation)
|
|
59
|
+
// We use OffscreenCanvas if available, or just pixel math
|
|
60
|
+
// Since we are in a worker, we can't use DOM Image, but we can use ImageBitmap
|
|
61
|
+
|
|
62
|
+
const originalBitmap = await loadImageBitmapFromUrl(uri);
|
|
63
|
+
const { width, height } = originalBitmap;
|
|
64
|
+
|
|
65
|
+
const offscreen = new OffscreenCanvas(width, height);
|
|
66
|
+
const ctx = offscreen.getContext('2d');
|
|
67
|
+
ctx.drawImage(originalBitmap, 0, 0);
|
|
68
|
+
|
|
69
|
+
const imageData = ctx.getImageData(0, 0, width, height);
|
|
70
|
+
const pixelData = imageData.data;
|
|
71
|
+
|
|
72
|
+
// Handle Mask
|
|
73
|
+
// mask.data is usually 1-channel or 3-channel
|
|
74
|
+
const mask = result.mask;
|
|
75
|
+
const maskData = mask.data;
|
|
76
|
+
|
|
77
|
+
// Simple resizing logic if dimensions differ (Nearest Neighbor)
|
|
78
|
+
// (Preprocessing usually resizes input, so output mask matches input size typically?
|
|
79
|
+
// Actually RMBG-1.4 output is fixed size 1024x1024 usually, need resize)
|
|
80
|
+
|
|
81
|
+
const maskW = mask.width;
|
|
82
|
+
const maskH = mask.height;
|
|
83
|
+
|
|
84
|
+
for (let i = 0; i < pixelData.length; i += 4) {
|
|
85
|
+
const pixelIndex = i / 4;
|
|
86
|
+
const x = pixelIndex % width;
|
|
87
|
+
const y = Math.floor(pixelIndex / width);
|
|
88
|
+
|
|
89
|
+
// Map to mask coordinates
|
|
90
|
+
const mx = Math.floor(x * (maskW / width));
|
|
91
|
+
const my = Math.floor(y * (maskH / height));
|
|
92
|
+
const maskIdx = (my * maskW + mx);
|
|
93
|
+
|
|
94
|
+
// Get Alpha
|
|
95
|
+
let alpha = 255;
|
|
96
|
+
if (maskData.length === maskW * maskH) {
|
|
97
|
+
alpha = maskData[maskIdx];
|
|
98
|
+
} else {
|
|
99
|
+
alpha = maskData[maskIdx * mask.channels]; // Assuming channels property or stride
|
|
100
|
+
// Fallback if channels undefined:
|
|
101
|
+
if (!mask.channels) alpha = maskData[maskIdx * 3]; // RGB assumption
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
pixelData[i + 3] = alpha;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
ctx.putImageData(imageData, 0, 0);
|
|
108
|
+
|
|
109
|
+
// 3. Convert to Blob/DataURL
|
|
110
|
+
const blob = await offscreen.convertToBlob({
|
|
111
|
+
type: format === 'WEBP' ? 'image/webp' : 'image/png',
|
|
112
|
+
quality: quality / 100
|
|
41
113
|
});
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
const { format = 'PNG', quality = 100, onProgress, debug = false } = options;
|
|
53
|
-
if (debug)
|
|
54
|
-
console.log('[rmbg] Processing:', uri);
|
|
55
|
-
onProgress?.(5);
|
|
56
|
-
const segmenter = await ensureLoaded(onProgress, debug);
|
|
57
|
-
onProgress?.(60);
|
|
58
|
-
const results = await segmenter(uri);
|
|
59
|
-
onProgress?.(90);
|
|
60
|
-
const result = Array.isArray(results) ? results[0] : results;
|
|
61
|
-
if (!result?.mask)
|
|
62
|
-
throw new Error('No mask returned');
|
|
63
|
-
// Apply mask to original image
|
|
64
|
-
const original = await loadImage(uri);
|
|
65
|
-
const dataUrl = await applyMask(original, result.mask, format, quality);
|
|
66
|
-
if (debug)
|
|
67
|
-
console.log('[rmbg] Done');
|
|
68
|
-
onProgress?.(100);
|
|
69
|
-
return dataUrl;
|
|
70
|
-
}
|
|
71
|
-
async function applyMask(image, mask, format, quality) {
|
|
72
|
-
const canvas = document.createElement('canvas');
|
|
73
|
-
// Use original image dimensions
|
|
74
|
-
canvas.width = image.width;
|
|
75
|
-
canvas.height = image.height;
|
|
76
|
-
const ctx = canvas.getContext('2d');
|
|
77
|
-
if (!ctx)
|
|
78
|
-
throw new Error('Could not get canvas context');
|
|
79
|
-
// Draw original image
|
|
80
|
-
ctx.drawImage(image, 0, 0);
|
|
81
|
-
// Get image data to manipulate pixels
|
|
82
|
-
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
|
83
|
-
const pixelData = imageData.data;
|
|
84
|
-
// Process mask
|
|
85
|
-
let maskData;
|
|
86
|
-
let maskWidth;
|
|
87
|
-
let maskHeight;
|
|
88
|
-
if (typeof mask === 'string') {
|
|
89
|
-
// If mask is a URL, load it
|
|
90
|
-
const maskImg = await loadImage(mask);
|
|
91
|
-
const maskCanvas = document.createElement('canvas');
|
|
92
|
-
maskCanvas.width = canvas.width;
|
|
93
|
-
maskCanvas.height = canvas.height;
|
|
94
|
-
const maskCtx = maskCanvas.getContext('2d');
|
|
95
|
-
if (!maskCtx)
|
|
96
|
-
throw new Error('Could not get mask context');
|
|
97
|
-
// Draw and resize mask to match image
|
|
98
|
-
maskCtx.drawImage(maskImg, 0, 0, canvas.width, canvas.height);
|
|
99
|
-
const maskImageData = maskCtx.getImageData(0, 0, canvas.width, canvas.height);
|
|
100
|
-
maskData = maskImageData.data;
|
|
101
|
-
maskWidth = canvas.width;
|
|
102
|
-
maskHeight = canvas.height;
|
|
103
|
-
}
|
|
104
|
-
else {
|
|
105
|
-
// @ts-ignore - Transformers.js types are loose
|
|
106
|
-
maskData = mask.data;
|
|
107
|
-
maskWidth = mask.width;
|
|
108
|
-
maskHeight = mask.height;
|
|
114
|
+
|
|
115
|
+
// Convert blob to DataURL for return
|
|
116
|
+
const reader = new FileReader();
|
|
117
|
+
reader.onloadend = () => {
|
|
118
|
+
self.postMessage({ id, type: 'success', payload: reader.result });
|
|
119
|
+
};
|
|
120
|
+
reader.readAsDataURL(blob);
|
|
121
|
+
}
|
|
122
|
+
} catch (err) {
|
|
123
|
+
self.postMessage({ id, type: 'error', payload: err.message });
|
|
109
124
|
}
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
125
|
+
};
|
|
126
|
+
`;
|
|
127
|
+
// ==========================================
|
|
128
|
+
// MAIN THREAD BRIDGE
|
|
129
|
+
// ==========================================
|
|
130
|
+
let worker = null;
|
|
131
|
+
const pendingMessages = new Map();
|
|
132
|
+
function getWorker() {
|
|
133
|
+
if (!worker) {
|
|
134
|
+
const blob = new Blob([WORKER_CODE], { type: 'application/javascript' });
|
|
135
|
+
const url = URL.createObjectURL(blob);
|
|
136
|
+
worker = new Worker(url);
|
|
137
|
+
worker.onmessage = (e) => {
|
|
138
|
+
const { id, type, payload } = e.data;
|
|
139
|
+
const deferred = pendingMessages.get(id);
|
|
140
|
+
if (!deferred)
|
|
141
|
+
return;
|
|
142
|
+
if (type === 'progress') {
|
|
143
|
+
if (deferred.onProgress && payload.progress) {
|
|
144
|
+
// Map 0-100 progress
|
|
145
|
+
deferred.onProgress(payload.progress);
|
|
146
|
+
}
|
|
117
147
|
}
|
|
118
|
-
else if (
|
|
119
|
-
|
|
148
|
+
else if (type === 'success') {
|
|
149
|
+
deferred.resolve(payload);
|
|
150
|
+
pendingMessages.delete(id);
|
|
120
151
|
}
|
|
121
|
-
else {
|
|
122
|
-
|
|
152
|
+
else if (type === 'error') {
|
|
153
|
+
deferred.reject(new Error(payload));
|
|
154
|
+
pendingMessages.delete(id);
|
|
123
155
|
}
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
const
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
}
|
|
140
|
-
};
|
|
141
|
-
// Apply alpha
|
|
142
|
-
for (let i = 0; i < pixelData.length; i += 4) {
|
|
143
|
-
const alpha = getAlpha(i, maskData, maskWidth, maskHeight, canvas.width, canvas.height) ?? 255;
|
|
144
|
-
pixelData[i + 3] = alpha;
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
return worker;
|
|
159
|
+
}
|
|
160
|
+
function sendToWorker(type, payload, onProgress) {
|
|
161
|
+
return new Promise((resolve, reject) => {
|
|
162
|
+
const id = Math.random().toString(36).substring(7);
|
|
163
|
+
pendingMessages.set(id, { resolve, reject, onProgress });
|
|
164
|
+
getWorker().postMessage({ id, type, payload });
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
// Initialize model
|
|
168
|
+
let initPromise = null;
|
|
169
|
+
async function ensureInit() {
|
|
170
|
+
if (!initPromise) {
|
|
171
|
+
initPromise = sendToWorker('init', {});
|
|
145
172
|
}
|
|
146
|
-
|
|
147
|
-
|
|
173
|
+
return initPromise;
|
|
174
|
+
}
|
|
175
|
+
export async function removeBgImage(uri, options = {}) {
|
|
176
|
+
const { format = 'PNG', quality = 100, onProgress } = options;
|
|
177
|
+
onProgress?.(1); // Start
|
|
178
|
+
await ensureInit();
|
|
179
|
+
// The worker handles the heavy calculation
|
|
180
|
+
const result = await sendToWorker('removeBg', { uri, format, quality }, (p) => {
|
|
181
|
+
// Transformers.js progress is model downloading mainly
|
|
182
|
+
// We can map it: 0-90% download/load, 90-100% inference
|
|
183
|
+
onProgress?.(p * 0.9);
|
|
184
|
+
});
|
|
185
|
+
onProgress?.(100);
|
|
186
|
+
return result;
|
|
148
187
|
}
|
|
149
188
|
export const removeBackground = removeBgImage;
|
|
189
|
+
// ==========================================
|
|
190
|
+
// UTILITIES (Main Thread - lightweight)
|
|
191
|
+
// ==========================================
|
|
150
192
|
// Helper to load image
|
|
151
193
|
function loadImage(src) {
|
|
152
194
|
return new Promise((resolve, reject) => {
|
|
@@ -200,7 +242,7 @@ export async function generateThumbhash(imageUri, options = {}) {
|
|
|
200
242
|
ctx.drawImage(img, 0, 0, size, size);
|
|
201
243
|
const imageData = ctx.getImageData(0, 0, size, size);
|
|
202
244
|
// Load thumbhash from CDN
|
|
203
|
-
// @ts-
|
|
245
|
+
// @ts-ignore
|
|
204
246
|
const { rgbaToThumbHash } = await import(/* webpackIgnore: true */ 'https://cdn.jsdelivr.net/npm/thumbhash@0.1/+esm');
|
|
205
247
|
const hash = rgbaToThumbHash(size, size, imageData.data);
|
|
206
248
|
return btoa(String.fromCharCode(...hash));
|
|
@@ -211,12 +253,13 @@ export async function generateThumbhash(imageUri, options = {}) {
|
|
|
211
253
|
}
|
|
212
254
|
}
|
|
213
255
|
export async function clearCache() {
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
256
|
+
if (worker) {
|
|
257
|
+
worker.terminate();
|
|
258
|
+
worker = null;
|
|
259
|
+
}
|
|
260
|
+
initPromise = null;
|
|
219
261
|
}
|
|
262
|
+
export function getCacheSize() { return 0; }
|
|
220
263
|
export async function onLowMemory() {
|
|
221
264
|
await clearCache();
|
|
222
265
|
return 0;
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Web implementation using
|
|
3
|
-
*
|
|
4
|
-
* Loads from CDN to bypass Metro bundler issues.
|
|
2
|
+
* Web implementation using Inline Web Worker & WebGPU.
|
|
3
|
+
* Moves all heavy processing to a background thread to prevent UI freezing.
|
|
4
|
+
* Loads @huggingface/transformers from CDN to bypass Metro bundler issues.
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
export type OutputFormat = 'PNG' | 'WEBP';
|
|
@@ -13,177 +13,207 @@ export interface RemoveBgImageOptions {
|
|
|
13
13
|
debug?: boolean;
|
|
14
14
|
}
|
|
15
15
|
|
|
16
|
-
//
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
let
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
transformersModule = await import(/* webpackIgnore: true */ cdnUrl);
|
|
29
|
-
return transformersModule;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
async function ensureLoaded(onProgress?: (p: number) => void, debug?: boolean) {
|
|
33
|
-
if (pipeline) return pipeline;
|
|
34
|
-
|
|
35
|
-
if (loadPromise) {
|
|
36
|
-
await loadPromise;
|
|
37
|
-
return pipeline;
|
|
16
|
+
// ==========================================
|
|
17
|
+
// INLINE WORKER CODE (Run in background)
|
|
18
|
+
// ==========================================
|
|
19
|
+
const WORKER_CODE = `
|
|
20
|
+
let pipeline = null;
|
|
21
|
+
let env = null;
|
|
22
|
+
|
|
23
|
+
// Helper to load image bitmap
|
|
24
|
+
async function loadImageBitmapFromUrl(url) {
|
|
25
|
+
const response = await fetch(url);
|
|
26
|
+
const blob = await response.blob();
|
|
27
|
+
return await createImageBitmap(blob);
|
|
38
28
|
}
|
|
39
29
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
30
|
+
self.onmessage = async (e) => {
|
|
31
|
+
const { id, type, payload } = e.data;
|
|
32
|
+
|
|
33
|
+
try {
|
|
34
|
+
if (type === 'init') {
|
|
35
|
+
if (!pipeline) {
|
|
36
|
+
// Dynamic import from CDN
|
|
37
|
+
const transformers = await import('https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.3.0/+esm');
|
|
38
|
+
env = transformers.env;
|
|
39
|
+
|
|
40
|
+
// Configure environment
|
|
41
|
+
env.allowLocalModels = false;
|
|
42
|
+
env.useBrowserCache = true;
|
|
43
|
+
// Try WebGPU, fallback to WASM
|
|
44
|
+
// env.backends.onnx.wasm.numThreads = 1; // Limit threads if needed
|
|
45
|
+
|
|
46
|
+
pipeline = await transformers.pipeline('image-segmentation', 'briaai/RMBG-1.4', {
|
|
47
|
+
device: 'webgpu', // Attempt WebGPU first
|
|
48
|
+
dtype: 'q8', // Quantized for speed
|
|
49
|
+
progress_callback: (info) => {
|
|
50
|
+
self.postMessage({ id, type: 'progress', payload: info });
|
|
51
|
+
}
|
|
52
|
+
});
|
|
55
53
|
}
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
54
|
+
self.postMessage({ id, type: 'success', payload: true });
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (type === 'removeBg') {
|
|
59
|
+
const { uri, format, quality } = payload;
|
|
60
|
+
|
|
61
|
+
if (!pipeline) throw new Error('Pipeline not initialized');
|
|
62
|
+
|
|
63
|
+
// 1. Run Inference
|
|
64
|
+
const results = await pipeline(uri);
|
|
65
|
+
const result = Array.isArray(results) ? results[0] : results;
|
|
66
|
+
if (!result || !result.mask) throw new Error('No mask generated');
|
|
67
|
+
|
|
68
|
+
// 2. Apply Mask (Pixel Manipulation)
|
|
69
|
+
// We use OffscreenCanvas if available, or just pixel math
|
|
70
|
+
// Since we are in a worker, we can't use DOM Image, but we can use ImageBitmap
|
|
71
|
+
|
|
72
|
+
const originalBitmap = await loadImageBitmapFromUrl(uri);
|
|
73
|
+
const { width, height } = originalBitmap;
|
|
74
|
+
|
|
75
|
+
const offscreen = new OffscreenCanvas(width, height);
|
|
76
|
+
const ctx = offscreen.getContext('2d');
|
|
77
|
+
ctx.drawImage(originalBitmap, 0, 0);
|
|
78
|
+
|
|
79
|
+
const imageData = ctx.getImageData(0, 0, width, height);
|
|
80
|
+
const pixelData = imageData.data;
|
|
81
|
+
|
|
82
|
+
// Handle Mask
|
|
83
|
+
// mask.data is usually 1-channel or 3-channel
|
|
84
|
+
const mask = result.mask;
|
|
85
|
+
const maskData = mask.data;
|
|
86
|
+
|
|
87
|
+
// Simple resizing logic if dimensions differ (Nearest Neighbor)
|
|
88
|
+
// (Preprocessing usually resizes input, so output mask matches input size typically?
|
|
89
|
+
// Actually RMBG-1.4 output is fixed size 1024x1024 usually, need resize)
|
|
90
|
+
|
|
91
|
+
const maskW = mask.width;
|
|
92
|
+
const maskH = mask.height;
|
|
93
|
+
|
|
94
|
+
for (let i = 0; i < pixelData.length; i += 4) {
|
|
95
|
+
const pixelIndex = i / 4;
|
|
96
|
+
const x = pixelIndex % width;
|
|
97
|
+
const y = Math.floor(pixelIndex / width);
|
|
98
|
+
|
|
99
|
+
// Map to mask coordinates
|
|
100
|
+
const mx = Math.floor(x * (maskW / width));
|
|
101
|
+
const my = Math.floor(y * (maskH / height));
|
|
102
|
+
const maskIdx = (my * maskW + mx);
|
|
103
|
+
|
|
104
|
+
// Get Alpha
|
|
105
|
+
let alpha = 255;
|
|
106
|
+
if (maskData.length === maskW * maskH) {
|
|
107
|
+
alpha = maskData[maskIdx];
|
|
108
|
+
} else {
|
|
109
|
+
alpha = maskData[maskIdx * mask.channels]; // Assuming channels property or stride
|
|
110
|
+
// Fallback if channels undefined:
|
|
111
|
+
if (!mask.channels) alpha = maskData[maskIdx * 3]; // RGB assumption
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
pixelData[i + 3] = alpha;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
ctx.putImageData(imageData, 0, 0);
|
|
118
|
+
|
|
119
|
+
// 3. Convert to Blob/DataURL
|
|
120
|
+
const blob = await offscreen.convertToBlob({
|
|
121
|
+
type: format === 'WEBP' ? 'image/webp' : 'image/png',
|
|
122
|
+
quality: quality / 100
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
// Convert blob to DataURL for return
|
|
126
|
+
const reader = new FileReader();
|
|
127
|
+
reader.onloadend = () => {
|
|
128
|
+
self.postMessage({ id, type: 'success', payload: reader.result });
|
|
129
|
+
};
|
|
130
|
+
reader.readAsDataURL(blob);
|
|
131
|
+
}
|
|
132
|
+
} catch (err) {
|
|
133
|
+
self.postMessage({ id, type: 'error', payload: err.message });
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
`;
|
|
77
137
|
|
|
78
|
-
|
|
79
|
-
|
|
138
|
+
// ==========================================
|
|
139
|
+
// MAIN THREAD BRIDGE
|
|
140
|
+
// ==========================================
|
|
80
141
|
|
|
81
|
-
|
|
82
|
-
|
|
142
|
+
let worker: Worker | null = null;
|
|
143
|
+
const pendingMessages = new Map<string, { resolve: (v: any) => void; reject: (e: any) => void; onProgress?: (p: number) => void }>();
|
|
83
144
|
|
|
84
|
-
|
|
85
|
-
if (!
|
|
145
|
+
function getWorker() {
|
|
146
|
+
if (!worker) {
|
|
147
|
+
const blob = new Blob([WORKER_CODE], { type: 'application/javascript' });
|
|
148
|
+
const url = URL.createObjectURL(blob);
|
|
149
|
+
worker = new Worker(url);
|
|
150
|
+
|
|
151
|
+
worker.onmessage = (e) => {
|
|
152
|
+
const { id, type, payload } = e.data;
|
|
153
|
+
const deferred = pendingMessages.get(id);
|
|
154
|
+
|
|
155
|
+
if (!deferred) return;
|
|
156
|
+
|
|
157
|
+
if (type === 'progress') {
|
|
158
|
+
if (deferred.onProgress && payload.progress) {
|
|
159
|
+
// Map 0-100 progress
|
|
160
|
+
deferred.onProgress(payload.progress);
|
|
161
|
+
}
|
|
162
|
+
} else if (type === 'success') {
|
|
163
|
+
deferred.resolve(payload);
|
|
164
|
+
pendingMessages.delete(id);
|
|
165
|
+
} else if (type === 'error') {
|
|
166
|
+
deferred.reject(new Error(payload));
|
|
167
|
+
pendingMessages.delete(id);
|
|
168
|
+
}
|
|
169
|
+
};
|
|
170
|
+
}
|
|
171
|
+
return worker;
|
|
172
|
+
}
|
|
86
173
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
174
|
+
function sendToWorker(type: string, payload: any, onProgress?: (p: number) => void): Promise<any> {
|
|
175
|
+
return new Promise((resolve, reject) => {
|
|
176
|
+
const id = Math.random().toString(36).substring(7);
|
|
177
|
+
pendingMessages.set(id, { resolve, reject, onProgress });
|
|
178
|
+
getWorker().postMessage({ id, type, payload });
|
|
179
|
+
});
|
|
180
|
+
}
|
|
90
181
|
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
182
|
+
// Initialize model
|
|
183
|
+
let initPromise: Promise<void> | null = null;
|
|
184
|
+
async function ensureInit() {
|
|
185
|
+
if (!initPromise) {
|
|
186
|
+
initPromise = sendToWorker('init', {});
|
|
187
|
+
}
|
|
188
|
+
return initPromise;
|
|
95
189
|
}
|
|
96
190
|
|
|
97
|
-
async function
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
format: OutputFormat,
|
|
101
|
-
quality: number
|
|
191
|
+
export async function removeBgImage(
|
|
192
|
+
uri: string,
|
|
193
|
+
options: RemoveBgImageOptions = {}
|
|
102
194
|
): Promise<string> {
|
|
103
|
-
|
|
104
|
-
// Use original image dimensions
|
|
105
|
-
canvas.width = image.width;
|
|
106
|
-
canvas.height = image.height;
|
|
107
|
-
const ctx = canvas.getContext('2d');
|
|
108
|
-
if (!ctx) throw new Error('Could not get canvas context');
|
|
109
|
-
|
|
110
|
-
// Draw original image
|
|
111
|
-
ctx.drawImage(image, 0, 0);
|
|
112
|
-
|
|
113
|
-
// Get image data to manipulate pixels
|
|
114
|
-
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
|
115
|
-
const pixelData = imageData.data;
|
|
116
|
-
|
|
117
|
-
// Process mask
|
|
118
|
-
let maskData: Uint8ClampedArray | Uint8Array;
|
|
119
|
-
let maskWidth: number;
|
|
120
|
-
let maskHeight: number;
|
|
121
|
-
|
|
122
|
-
if (typeof mask === 'string') {
|
|
123
|
-
// If mask is a URL, load it
|
|
124
|
-
const maskImg = await loadImage(mask);
|
|
125
|
-
const maskCanvas = document.createElement('canvas');
|
|
126
|
-
maskCanvas.width = canvas.width;
|
|
127
|
-
maskCanvas.height = canvas.height;
|
|
128
|
-
const maskCtx = maskCanvas.getContext('2d');
|
|
129
|
-
if (!maskCtx) throw new Error('Could not get mask context');
|
|
195
|
+
const { format = 'PNG', quality = 100, onProgress } = options;
|
|
130
196
|
|
|
131
|
-
//
|
|
132
|
-
|
|
133
|
-
const maskImageData = maskCtx.getImageData(0, 0, canvas.width, canvas.height);
|
|
134
|
-
maskData = maskImageData.data;
|
|
135
|
-
maskWidth = canvas.width;
|
|
136
|
-
maskHeight = canvas.height;
|
|
137
|
-
} else {
|
|
138
|
-
// @ts-ignore - Transformers.js types are loose
|
|
139
|
-
maskData = mask.data;
|
|
140
|
-
maskWidth = mask.width;
|
|
141
|
-
maskHeight = mask.height;
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
// Helper to get alpha value from mask data
|
|
145
|
-
const getAlpha = (index: number, data: Uint8ClampedArray | Uint8Array, width: number, height: number, targetWidth: number, targetHeight: number) => {
|
|
146
|
-
// If dimensions match
|
|
147
|
-
if (width === targetWidth && height === targetHeight) {
|
|
148
|
-
// Check if mask is single channel (grayscale) or RGBA
|
|
149
|
-
if (data.length === width * height) {
|
|
150
|
-
return data[index / 4];
|
|
151
|
-
} else if (data.length === width * height * 3) {
|
|
152
|
-
return data[Math.floor(index / 4) * 3];
|
|
153
|
-
} else {
|
|
154
|
-
return data[index]; // Assume RGBA red channel or alpha channel usage
|
|
155
|
-
}
|
|
156
|
-
}
|
|
197
|
+
onProgress?.(1); // Start
|
|
198
|
+
await ensureInit();
|
|
157
199
|
|
|
158
|
-
//
|
|
159
|
-
const
|
|
160
|
-
|
|
200
|
+
// The worker handles the heavy calculation
|
|
201
|
+
const result = await sendToWorker('removeBg', { uri, format, quality }, (p) => {
|
|
202
|
+
// Transformers.js progress is model downloading mainly
|
|
203
|
+
// We can map it: 0-90% download/load, 90-100% inference
|
|
204
|
+
onProgress?.(p * 0.9);
|
|
205
|
+
});
|
|
161
206
|
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
const maskIndex = (maskY * width + maskX);
|
|
165
|
-
|
|
166
|
-
if (data.length === width * height) {
|
|
167
|
-
return data[maskIndex];
|
|
168
|
-
} else if (data.length === width * height * 3) {
|
|
169
|
-
return data[maskIndex * 3];
|
|
170
|
-
} else {
|
|
171
|
-
return data[maskIndex * 4];
|
|
172
|
-
}
|
|
173
|
-
};
|
|
174
|
-
|
|
175
|
-
// Apply alpha
|
|
176
|
-
for (let i = 0; i < pixelData.length; i += 4) {
|
|
177
|
-
const alpha = getAlpha(i, maskData, maskWidth, maskHeight, canvas.width, canvas.height) ?? 255;
|
|
178
|
-
pixelData[i + 3] = alpha;
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
ctx.putImageData(imageData, 0, 0);
|
|
182
|
-
return canvas.toDataURL(format === 'WEBP' ? 'image/webp' : 'image/png', quality / 100);
|
|
207
|
+
onProgress?.(100);
|
|
208
|
+
return result as string;
|
|
183
209
|
}
|
|
184
210
|
|
|
185
211
|
export const removeBackground = removeBgImage;
|
|
186
212
|
|
|
213
|
+
// ==========================================
|
|
214
|
+
// UTILITIES (Main Thread - lightweight)
|
|
215
|
+
// ==========================================
|
|
216
|
+
|
|
187
217
|
// Helper to load image
|
|
188
218
|
function loadImage(src: string): Promise<HTMLImageElement> {
|
|
189
219
|
return new Promise((resolve, reject) => {
|
|
@@ -269,7 +299,7 @@ export async function generateThumbhash(
|
|
|
269
299
|
const imageData = ctx.getImageData(0, 0, size, size);
|
|
270
300
|
|
|
271
301
|
// Load thumbhash from CDN
|
|
272
|
-
// @ts-
|
|
302
|
+
// @ts-ignore
|
|
273
303
|
const { rgbaToThumbHash } = await import(/* webpackIgnore: true */ 'https://cdn.jsdelivr.net/npm/thumbhash@0.1/+esm');
|
|
274
304
|
const hash = rgbaToThumbHash(size, size, imageData.data);
|
|
275
305
|
return btoa(String.fromCharCode(...hash));
|
|
@@ -280,18 +310,17 @@ export async function generateThumbhash(
|
|
|
280
310
|
}
|
|
281
311
|
|
|
282
312
|
export async function clearCache(): Promise<void> {
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
return 0;
|
|
313
|
+
if (worker) {
|
|
314
|
+
worker.terminate();
|
|
315
|
+
worker = null;
|
|
316
|
+
}
|
|
317
|
+
initPromise = null;
|
|
289
318
|
}
|
|
290
319
|
|
|
320
|
+
export function getCacheSize(): number { return 0; }
|
|
291
321
|
export async function onLowMemory(): Promise<number> {
|
|
292
322
|
await clearCache();
|
|
293
323
|
return 0;
|
|
294
324
|
}
|
|
295
|
-
|
|
296
325
|
export function configureCache(_config: { maxEntries?: number }): void {}
|
|
297
326
|
export function getCacheDirectory(): string { return ''; }
|