@gyeonghokim/fisheye.js 0.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,16 +1,16 @@
1
1
  # fisheye.js
2
2
 
3
- > Modern fisheye dewarping library for the web, using **General Purpose GPU**
3
+ > Modern fisheye dewarping library for the web using **WebGPU** (general-purpose GPU compute)
4
4
 
5
- fisheye.js is a javascript library for drawing VideoFrame to the canvas with [simple radial lens distortion](<https://en.wikipedia.org/wiki/Distortion_(optics)>) using **GPGPU** WebGPU(WebGL if your browser does not support WebGPU).
5
+ fisheye.js processes [VideoFrame](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame)s with **WebGPU compute shaders**—no canvas 2D—and corrects fisheye lens distortion using the **OpenCV fisheye model** (Kannala–Brandt–style polynomial in angle θ with coefficients k1–k4). This is the same model as in [OpenCV’s fisheye module](https://docs.opencv.org/4.x/db/d58/group__calib3d__fisheye.html), not UCM (Unified Camera Model) or a simple radial model.
6
6
 
7
7
  ## Features
8
8
 
9
- - ESM support: You can just `import { Fisheye } from @gyeonghokim/fisheye.js;` in your WebAPP
10
- - TypeGPU: WebGPU backend with type-safe shader programing(with [typegpu](https://www.npmjs.com/package/typegpu))
11
- - GPGPU: we do not use canvas element, read from GPU buffer directly(efficient more than other libraries)
12
- - WebCodecs API: Modern Video processing with WebCodecs' [VideoFrame](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame)
13
- - Installation from modern package managers(npm)
9
+ - **WebGPU GPGPU**: Compute-shader pipeline via [TypeGPU](https://www.npmjs.com/package/typegpu); input/output as textures and readback to VideoFrame—no canvas element for dewarping
10
+ - **OpenCV fisheye (Kannala–Brandt) model**: Distortion model `θ_d = θ × (1 + k1·θ² + k2·θ⁴ + k3·θ⁶ + k4·θ⁸)` for accurate calibration
11
+ - **WebCodecs**: Built on the [VideoFrame](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame) API
12
+ - **ESM**: `import { Fisheye } from "@gyeonghokim/fisheye.js"`
13
+ - **npm**: Install via npm or other package managers
14
14
 
15
15
  ## Getting Started(Typescript Example)
16
16
 
@@ -68,20 +68,19 @@ Creates a new Fisheye dewarper instance.
68
68
 
69
69
  **Options:**
70
70
 
71
- - `k1` (number, optional): Fisheye distortion coefficient k1. Typical range: -1.0 to 1.0. Default: `0.5`.
71
+ - `k1` (number, optional): Fisheye distortion coefficient k1. Typical range: -1.0 to 1.0. Default: `0`.
72
72
  - `k2` (number, optional): Fisheye distortion coefficient k2. Default: `0`.
73
73
  - `k3` (number, optional): Fisheye distortion coefficient k3. Default: `0`.
74
74
  - `k4` (number, optional): Fisheye distortion coefficient k4. Default: `0`.
75
- - `width` (number, optional): Output canvas width. Default: `640`
76
- - `height` (number, optional): Output canvas height. Default: `480`
75
+ - `width` (number, optional): Output frame width. Default: `300`
76
+ - `height` (number, optional): Output frame height. Default: `150`
77
77
  - `fov` (number, optional): Field of view in degrees. Default: `180`
78
78
  - `centerX` (number, optional): X offset of the lens center (normalized, -1.0 to 1.0). Default: `0`
79
79
  - `centerY` (number, optional): Y offset of the lens center (normalized, -1.0 to 1.0). Default: `0`
80
80
  - `zoom` (number, optional): Zoom factor. Default: `1.0`
81
81
 
82
- **Fisheye model (OpenCV):**
83
- We follow the OpenCV fisheye camera model described here:
84
- https://docs.opencv.org/4.x/db/d58/group__calib3d__fisheye.html
82
+ **Fisheye model (OpenCV fisheye / Kannala–Brandt):**
83
+ We use the same model as OpenCV’s [fisheye module](https://docs.opencv.org/4.x/db/d58/group__calib3d__fisheye.html) (cited there as the “generic camera model from Kannala & Brandt, 2006). It is a polynomial-in-θ model, not UCM:
85
84
 
86
85
  ```
87
86
  theta = atan(r)
@@ -114,7 +113,7 @@ If you receive raw YUV binary data from a camera or server, you can use the `cre
114
113
  ```ts
115
114
  import { Fisheye, createVideoFrameFromYUV } from "@gyeonghokim/fisheye.js";
116
115
 
117
- const dewarper = new Fisheye({ distortion: 0.5, width: 1920, height: 1080 });
116
+ const dewarper = new Fisheye({ k1: 0.5, width: 1920, height: 1080 });
118
117
 
119
118
  // Example: Receiving NV12 data from a server
120
119
  const response = await fetch("/api/camera/frame");
package/dist/index.d.ts CHANGED
@@ -1,266 +1,269 @@
1
- /**
2
- * Options for configuring the Fisheye dewarper
3
- */
4
- interface FisheyeOptions {
5
- /**
6
- * Fisheye distortion coefficient k1.
7
- */
8
- k1?: number;
9
- /**
10
- * Fisheye distortion coefficient k2.
11
- */
12
- k2?: number;
13
- /**
14
- * Fisheye distortion coefficient k3.
15
- */
16
- k3?: number;
17
- /**
18
- * Fisheye distortion coefficient k4.
19
- */
20
- k4?: number;
21
- /**
22
- * Canvas width for output
23
- * @default 300
24
- */
25
- width?: number;
26
- /**
27
- * Canvas height for output
28
- * @default 150
29
- */
30
- height?: number;
31
- /**
32
- * Field of view in degrees
33
- * @default 180
34
- */
35
- fov?: number;
36
- /**
37
- * X offset of the lens center (normalized, -1.0 to 1.0)
38
- * @default 0
39
- */
40
- centerX?: number;
41
- /**
42
- * Y offset of the lens center (normalized, -1.0 to 1.0)
43
- * @default 0
44
- */
45
- centerY?: number;
46
- /**
47
- * Zoom factor
48
- * @default 1.0
49
- */
50
- zoom?: number;
51
- }
52
- /**
53
- * Internal configuration after applying defaults
54
- */
55
- interface FisheyeConfig extends Required<FisheyeOptions> {
56
- }
57
-
58
- /**
59
- * Fisheye dewarper using WebGPU via TypeGPU (Pure GPGPU)
60
- *
61
- * @example
62
- * ```ts
63
- * const dewarper = new Fisheye({
64
- * distortion: 0.5,
65
- * width: 1920,
66
- * height: 1080,
67
- * });
68
- *
69
- * const dewarpedFrame = await dewarper.dewarp(videoFrame);
70
- * ```
71
- */
72
- declare class Fisheye {
73
- private config;
74
- private root;
75
- private uniformBuffer;
76
- private inputTexture;
77
- private outputTexture;
78
- private inputView;
79
- private outputView;
80
- private bindGroup;
81
- private dewarpPipeline;
82
- private readbackBuffers;
83
- private readbackIndex;
84
- private readbackHasData;
85
- private readbackBytesPerRow;
86
- private readbackActualBytesPerRow;
87
- private pixelBuffer;
88
- private inputTextureSize;
89
- private outputTextureSize;
90
- private static createInputView;
91
- private static createOutputView;
92
- constructor(options?: FisheyeOptions);
93
- /**
94
- * Apply default values to options
95
- */
96
- private applyDefaults;
97
- /**
98
- * Initialize TypeGPU root and resources
99
- */
100
- private initialize;
101
- /**
102
- * Get uniform data from current configuration
103
- */
104
- private getUniformData;
105
- /**
106
- * Update uniform buffer with current configuration
107
- */
108
- private updateUniforms;
109
- private readbackToVideoFrame;
110
- /**
111
- * Create input texture with proper typing
112
- */
113
- private createInputTexture;
114
- /**
115
- * Create output texture with proper typing (storage only, no render needed for GPGPU)
116
- */
117
- private createOutputTexture;
118
- /**
119
- * Calculate bytes per row with proper alignment (256-byte alignment for WebGPU)
120
- */
121
- private calculateBytesPerRow;
122
- /**
123
- * Create or recreate readback buffer for GPU to CPU data transfer
124
- */
125
- private createReadbackBuffer;
126
- /**
127
- * Dewarp a VideoFrame
128
- *
129
- * @param frame - Input VideoFrame with fisheye distortion
130
- * @returns Dewarped VideoFrame
131
- */
132
- dewarp(frame: VideoFrame): Promise<VideoFrame>;
133
- /**
134
- * Update configuration
135
- */
136
- updateConfig(options: Partial<FisheyeOptions>): void;
137
- /**
138
- * Clean up GPU resources
139
- */
140
- destroy(): void;
141
- }
142
-
143
- /**
144
- * Supported YUV pixel formats for VideoFrame creation
145
- */
146
- type YUVFormat = "I420" | "I420A" | "I422" | "I444" | "NV12";
147
- /**
148
- * Options for creating a VideoFrame from YUV data
149
- */
150
- interface CreateVideoFrameOptions {
151
- /**
152
- * YUV pixel format
153
- * - I420: YUV 4:2:0 planar (Y plane, U plane, V plane)
154
- * - I420A: YUV 4:2:0 planar with alpha
155
- * - I422: YUV 4:2:2 planar
156
- * - I444: YUV 4:4:4 planar
157
- * - NV12: YUV 4:2:0 semi-planar (Y plane, interleaved UV plane)
158
- */
159
- format: YUVFormat;
160
- /**
161
- * Width of the video frame in pixels
162
- */
163
- width: number;
164
- /**
165
- * Height of the video frame in pixels
166
- */
167
- height: number;
168
- /**
169
- * Timestamp in microseconds
170
- */
171
- timestamp: number;
172
- /**
173
- * Duration in microseconds (optional)
174
- */
175
- duration?: number;
176
- /**
177
- * Display width (optional, defaults to width)
178
- */
179
- displayWidth?: number;
180
- /**
181
- * Display height (optional, defaults to height)
182
- */
183
- displayHeight?: number;
184
- /**
185
- * Color space configuration (optional)
186
- */
187
- colorSpace?: VideoColorSpaceInit;
188
- /**
189
- * Transfer ownership of the buffer for zero-copy (optional)
190
- * If true, the input buffer will be detached after VideoFrame creation
191
- */
192
- transfer?: boolean;
193
- }
194
- /**
195
- * Create a VideoFrame from YUV binary data
196
- *
197
- * @param data - YUV binary data (ArrayBuffer, TypedArray, or DataView)
198
- * @param options - Configuration options including format, dimensions, and timestamp
199
- * @returns A new VideoFrame object
200
- *
201
- * @example
202
- * ```ts
203
- * // Create VideoFrame from I420 (YUV 4:2:0) data
204
- * const yuvData = new Uint8Array(width * height * 1.5); // I420 size
205
- * const frame = createVideoFrameFromYUV(yuvData, {
206
- * format: "I420",
207
- * width: 1920,
208
- * height: 1080,
209
- * timestamp: 0,
210
- * });
211
- * ```
212
- *
213
- * @example
214
- * ```ts
215
- * // Create VideoFrame from NV12 data with zero-copy transfer
216
- * const nv12Data = new Uint8Array(width * height * 1.5);
217
- * const frame = createVideoFrameFromYUV(nv12Data, {
218
- * format: "NV12",
219
- * width: 1920,
220
- * height: 1080,
221
- * timestamp: 0,
222
- * transfer: true, // Transfer buffer ownership for better performance
223
- * });
224
- * ```
225
- */
226
- declare function createVideoFrameFromYUV(data: BufferSource, options: CreateVideoFrameOptions): VideoFrame;
227
- /**
228
- * Convert RGBA image data to YUV format (I420 by default)
229
- *
230
- * Uses ITU-R BT.601 color space conversion:
231
- * - Y = 0.299*R + 0.587*G + 0.114*B
232
- * - U = -0.169*R - 0.331*G + 0.5*B + 128
233
- * - V = 0.5*R - 0.419*G - 0.081*B + 128
234
- *
235
- * For I420 format:
236
- * - Y plane: full resolution (width * height)
237
- * - U plane: quarter resolution ((width/2) * (height/2))
238
- * - V plane: quarter resolution ((width/2) * (height/2))
239
- *
240
- * @param rgbaData - RGBA pixel data (Uint8ClampedArray from ImageData)
241
- * @param width - Image width in pixels
242
- * @param height - Image height in pixels
243
- * @param format - YUV format to convert to (default: "I420")
244
- * @returns YUV data as Uint8Array
245
- *
246
- * @example
247
- * ```ts
248
- * const canvas = document.createElement('canvas');
249
- * const ctx = canvas.getContext('2d');
250
- * ctx.drawImage(image, 0, 0);
251
- * const imageData = ctx.getImageData(0, 0, width, height);
252
- * const yuvData = convertRGBAtoYUV(imageData.data, width, height);
253
- * ```
254
- */
255
- declare function convertRGBAtoYUV(rgbaData: Uint8ClampedArray, width: number, height: number, format?: YUVFormat): Uint8Array;
256
- /**
257
- * Calculate the expected byte size for YUV data based on format and dimensions
258
- *
259
- * @param format - YUV pixel format
260
- * @param width - Frame width in pixels
261
- * @param height - Frame height in pixels
262
- * @returns Expected byte size
263
- */
264
- declare function calculateYUVDataSize(format: YUVFormat, width: number, height: number): number;
265
-
266
- export { type CreateVideoFrameOptions, Fisheye, type FisheyeConfig, type FisheyeOptions, type YUVFormat, calculateYUVDataSize, convertRGBAtoYUV, createVideoFrameFromYUV };
1
+ /**
2
+ * Calculate the expected byte size for YUV data based on format and dimensions
3
+ *
4
+ * @param format - YUV pixel format
5
+ * @param width - Frame width in pixels
6
+ * @param height - Frame height in pixels
7
+ * @returns Expected byte size
8
+ */
9
+ export declare function calculateYUVDataSize(format: YUVFormat, width: number, height: number): number;
10
+
11
+ /**
12
+ * Convert RGBA image data to YUV format (I420 by default)
13
+ *
14
+ * Uses ITU-R BT.601 color space conversion:
15
+ * - Y = 0.299*R + 0.587*G + 0.114*B
16
+ * - U = -0.169*R - 0.331*G + 0.5*B + 128
17
+ * - V = 0.5*R - 0.419*G - 0.081*B + 128
18
+ *
19
+ * For I420 format:
20
+ * - Y plane: full resolution (width * height)
21
+ * - U plane: quarter resolution ((width/2) * (height/2))
22
+ * - V plane: quarter resolution ((width/2) * (height/2))
23
+ *
24
+ * @param rgbaData - RGBA pixel data (Uint8ClampedArray from ImageData)
25
+ * @param width - Image width in pixels
26
+ * @param height - Image height in pixels
27
+ * @param format - YUV format to convert to (default: "I420")
28
+ * @returns YUV data as Uint8Array
29
+ *
30
+ * @example
31
+ * ```ts
32
+ * const canvas = document.createElement('canvas');
33
+ * const ctx = canvas.getContext('2d');
34
+ * ctx.drawImage(image, 0, 0);
35
+ * const imageData = ctx.getImageData(0, 0, width, height);
36
+ * const yuvData = convertRGBAtoYUV(imageData.data, width, height);
37
+ * ```
38
+ */
39
+ export declare function convertRGBAtoYUV(rgbaData: Uint8ClampedArray, width: number, height: number, format?: YUVFormat): Uint8Array;
40
+
41
+ /**
42
+ * Create a VideoFrame from YUV binary data
43
+ *
44
+ * @param data - YUV binary data (ArrayBuffer, TypedArray, or DataView)
45
+ * @param options - Configuration options including format, dimensions, and timestamp
46
+ * @returns A new VideoFrame object
47
+ *
48
+ * @example
49
+ * ```ts
50
+ * // Create VideoFrame from I420 (YUV 4:2:0) data
51
+ * const yuvData = new Uint8Array(width * height * 1.5); // I420 size
52
+ * const frame = createVideoFrameFromYUV(yuvData, {
53
+ * format: "I420",
54
+ * width: 1920,
55
+ * height: 1080,
56
+ * timestamp: 0,
57
+ * });
58
+ * ```
59
+ *
60
+ * @example
61
+ * ```ts
62
+ * // Create VideoFrame from NV12 data with zero-copy transfer
63
+ * const nv12Data = new Uint8Array(width * height * 1.5);
64
+ * const frame = createVideoFrameFromYUV(nv12Data, {
65
+ * format: "NV12",
66
+ * width: 1920,
67
+ * height: 1080,
68
+ * timestamp: 0,
69
+ * transfer: true, // Transfer buffer ownership for better performance
70
+ * });
71
+ * ```
72
+ */
73
+ export declare function createVideoFrameFromYUV(data: BufferSource, options: CreateVideoFrameOptions): VideoFrame;
74
+
75
+ /**
76
+ * Options for creating a VideoFrame from YUV data
77
+ */
78
+ export declare interface CreateVideoFrameOptions {
79
+ /**
80
+ * YUV pixel format
81
+ * - I420: YUV 4:2:0 planar (Y plane, U plane, V plane)
82
+ * - I420A: YUV 4:2:0 planar with alpha
83
+ * - I422: YUV 4:2:2 planar
84
+ * - I444: YUV 4:4:4 planar
85
+ * - NV12: YUV 4:2:0 semi-planar (Y plane, interleaved UV plane)
86
+ */
87
+ format: YUVFormat;
88
+ /**
89
+ * Width of the video frame in pixels
90
+ */
91
+ width: number;
92
+ /**
93
+ * Height of the video frame in pixels
94
+ */
95
+ height: number;
96
+ /**
97
+ * Timestamp in microseconds
98
+ */
99
+ timestamp: number;
100
+ /**
101
+ * Duration in microseconds (optional)
102
+ */
103
+ duration?: number;
104
+ /**
105
+ * Display width (optional, defaults to width)
106
+ */
107
+ displayWidth?: number;
108
+ /**
109
+ * Display height (optional, defaults to height)
110
+ */
111
+ displayHeight?: number;
112
+ /**
113
+ * Color space configuration (optional)
114
+ */
115
+ colorSpace?: VideoColorSpaceInit;
116
+ /**
117
+ * Transfer ownership of the buffer for zero-copy (optional)
118
+ * If true, the input buffer will be detached after VideoFrame creation
119
+ */
120
+ transfer?: boolean;
121
+ }
122
+
123
+ /**
124
+ * Fisheye dewarper using WebGPU via TypeGPU (Pure GPGPU)
125
+ *
126
+ * @example
127
+ * ```ts
128
+ * const dewarper = new Fisheye({
129
+ * distortion: 0.5,
130
+ * width: 1920,
131
+ * height: 1080,
132
+ * });
133
+ *
134
+ * const dewarpedFrame = await dewarper.dewarp(videoFrame);
135
+ * ```
136
+ */
137
+ export declare class Fisheye {
138
+ private config;
139
+ private root;
140
+ private uniformBuffer;
141
+ private inputTexture;
142
+ private outputTexture;
143
+ private bindGroup;
144
+ private dewarpPipeline;
145
+ private readbackBuffers;
146
+ private readbackIndex;
147
+ private readbackHasData;
148
+ private readbackBytesPerRow;
149
+ private readbackActualBytesPerRow;
150
+ private pixelBuffer;
151
+ private inputTextureSize;
152
+ private outputTextureSize;
153
+ private uniformInputWidth;
154
+ private uniformInputHeight;
155
+ constructor(options?: FisheyeOptions);
156
+ /**
157
+ * Apply default values to options
158
+ */
159
+ private applyDefaults;
160
+ /**
161
+ * Initialize TypeGPU root and resources
162
+ */
163
+ private initialize;
164
+ /**
165
+ * Get uniform data from current configuration
166
+ */
167
+ private getUniformData;
168
+ /**
169
+ * Update uniform buffer with current configuration
170
+ */
171
+ private updateUniforms;
172
+ private readbackToVideoFrame;
173
+ /**
174
+ * Create input texture (TypeGPU; per official docs: sampled + render for .write(image/VideoFrame)).
175
+ */
176
+ private createInputTexture;
177
+ /**
178
+ * Create output storage texture (TypeGPU; type-safe with layout.$)
179
+ */
180
+ private createOutputTexture;
181
+ /**
182
+ * Calculate bytes per row with proper alignment (256-byte alignment for WebGPU)
183
+ */
184
+ private calculateBytesPerRow;
185
+ /**
186
+ * Create or recreate readback buffer for GPU to CPU data transfer
187
+ */
188
+ private createReadbackBuffer;
189
+ /**
190
+ * Dewarp a VideoFrame
191
+ *
192
+ * @param frame - Input VideoFrame with fisheye distortion
193
+ * @returns Dewarped VideoFrame
194
+ */
195
+ dewarp(frame: VideoFrame): Promise<VideoFrame>;
196
+ /**
197
+ * Update configuration
198
+ */
199
+ updateConfig(options: Partial<FisheyeOptions>): void;
200
+ /**
201
+ * Clean up GPU resources
202
+ */
203
+ destroy(): void;
204
+ }
205
+
206
+ /**
207
+ * Internal configuration after applying defaults
208
+ */
209
+ export declare interface FisheyeConfig extends Required<FisheyeOptions> {
210
+ }
211
+
212
+ /**
213
+ * Options for configuring the Fisheye dewarper
214
+ */
215
+ export declare interface FisheyeOptions {
216
+ /**
217
+ * Fisheye distortion coefficient k1.
218
+ */
219
+ k1?: number;
220
+ /**
221
+ * Fisheye distortion coefficient k2.
222
+ */
223
+ k2?: number;
224
+ /**
225
+ * Fisheye distortion coefficient k3.
226
+ */
227
+ k3?: number;
228
+ /**
229
+ * Fisheye distortion coefficient k4.
230
+ */
231
+ k4?: number;
232
+ /**
233
+ * Canvas width for output
234
+ * @default 300
235
+ */
236
+ width?: number;
237
+ /**
238
+ * Canvas height for output
239
+ * @default 150
240
+ */
241
+ height?: number;
242
+ /**
243
+ * Field of view in degrees
244
+ * @default 180
245
+ */
246
+ fov?: number;
247
+ /**
248
+ * X offset of the lens center (normalized, -1.0 to 1.0)
249
+ * @default 0
250
+ */
251
+ centerX?: number;
252
+ /**
253
+ * Y offset of the lens center (normalized, -1.0 to 1.0)
254
+ * @default 0
255
+ */
256
+ centerY?: number;
257
+ /**
258
+ * Zoom factor
259
+ * @default 1.0
260
+ */
261
+ zoom?: number;
262
+ }
263
+
264
+ /**
265
+ * Supported YUV pixel formats for VideoFrame creation
266
+ */
267
+ export declare type YUVFormat = "I420" | "I420A" | "I422" | "I444" | "NV12";
268
+
269
+ export { }