@gyeonghokim/fisheye.js 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 GyeongHoKim
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,210 @@
1
+ # fisheye.js
2
+
3
+ > Modern fisheye dewarping library for the web, using **General Purpose GPU**
4
+
5
+ fisheye.js is a javascript library for drawing VideoFrame to the canvas with [simple radial lens distortion](<https://en.wikipedia.org/wiki/Distortion_(optics)>) using **GPGPU** WebGPU(WebGL if your browser does not support WebGPU).
6
+
7
+ ## Features
8
+
9
+ - ESM support: You can just `import { Fisheye } from @gyeonghokim/fisheye.js;` in your WebAPP
10
+ - TypeGPU: WebGPU backend with type-safe shader programing(with [typegpu](https://www.npmjs.com/package/typegpu))
11
+ - GPGPU: we do not use canvas element, read from GPU buffer directly(efficient more than other libraries)
12
+ - WebCodecs API: Modern Video processing with WebCodecs' [VideoFrame](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame)
13
+ - Installation from modern package managers(npm)
14
+
15
+ ## Getting Started(Typescript Example)
16
+
17
+ ```bash
18
+ npm install @gyeonghokim/fisheye.js
19
+ # optional
20
+ npm install --save-dev @webgpu/types
21
+ ```
22
+
23
+ if you installed `@webgpu/types`,
24
+
25
+ ```json
26
+ {
27
+ "compilerOptions": {
28
+ "types": ["@webgpu/types"]
29
+ }
30
+ }
31
+ ```
32
+
33
+ > Why should I install webgpu types?
34
+ > This library does not render your binary, it just dewarp the VideoFrame.
35
+ > You should make **your own YUV renderer**, or you can install `@gyeonghokim/yuv-player`.
36
+
37
+ in your code,
38
+
39
+ ```ts
40
+ import { Fisheye } from "@gyeonghokim/fisheye.js";
41
+
42
+ const dewarper = new Fisheye({
43
+ k1: 0.5,
44
+ k2: 0.0,
45
+ k3: 0.0,
46
+ k4: 0.0,
47
+ width: 1920,
48
+ height: 1080,
49
+ fov: 180, // Field of view in degrees
50
+ centerX: 0, // X offset of lens center (-1.0 to 1.0)
51
+ centerY: 0, // Y offset of lens center (-1.0 to 1.0)
52
+ zoom: 1.0, // Zoom factor
53
+ });
54
+
55
+ const renderLoop = (timestamp: DOMHighResTimestamp) => {
56
+ // your render logic
57
+ const dewarpedVideoFrame: VideoFrame = await dewarper.dewarp(yourVideoFrame);
58
+ yourYUVPlayer.draw(dewarpedVideoFrame);
59
+ requestAnimationFrame(renderLoop);
60
+ };
61
+ ```
62
+
63
+ ## API
64
+
65
+ ### `new Fisheye(options?: FisheyeOptions)`
66
+
67
+ Creates a new Fisheye dewarper instance.
68
+
69
+ **Options:**
70
+
71
+ - `k1` (number, optional): Fisheye distortion coefficient k1. Typical range: -1.0 to 1.0. Default: `0.5`.
72
+ - `k2` (number, optional): Fisheye distortion coefficient k2. Default: `0`.
73
+ - `k3` (number, optional): Fisheye distortion coefficient k3. Default: `0`.
74
+ - `k4` (number, optional): Fisheye distortion coefficient k4. Default: `0`.
75
+ - `width` (number, optional): Output canvas width. Default: `640`
76
+ - `height` (number, optional): Output canvas height. Default: `480`
77
+ - `fov` (number, optional): Field of view in degrees. Default: `180`
78
+ - `centerX` (number, optional): X offset of the lens center (normalized, -1.0 to 1.0). Default: `0`
79
+ - `centerY` (number, optional): Y offset of the lens center (normalized, -1.0 to 1.0). Default: `0`
80
+ - `zoom` (number, optional): Zoom factor. Default: `1.0`
81
+
82
+ **Fisheye model (OpenCV):**
83
+ We follow the OpenCV fisheye camera model described here:
84
+ https://docs.opencv.org/4.x/db/d58/group__calib3d__fisheye.html
85
+
86
+ ```
87
+ theta = atan(r)
88
+ theta_d = theta * (1 + k1*theta^2 + k2*theta^4 + k3*theta^6 + k4*theta^8)
89
+ r_d = tan(theta_d)
90
+ ```
91
+
92
+ ### `dewarp(frame: VideoFrame): Promise<VideoFrame>`
93
+
94
+ Dewarps a VideoFrame with fisheye distortion.
95
+
96
+ **Parameters:**
97
+
98
+ - `frame`: Input VideoFrame with fisheye distortion
99
+
100
+ **Returns:** Promise that resolves to a dewarped VideoFrame
101
+
102
+ ### `updateConfig(options: Partial<FisheyeOptions>): void`
103
+
104
+ Updates the dewarper configuration. You can update any subset of the original options.
105
+
106
+ ### `destroy(): void`
107
+
108
+ Cleans up GPU resources. Call this when you're done using the dewarper.
109
+
110
+ ## Working with YUV Binary Data
111
+
112
+ If you receive raw YUV binary data from a camera or server, you can use the `createVideoFrameFromYUV` utility to create a VideoFrame:
113
+
114
+ ```ts
115
+ import { Fisheye, createVideoFrameFromYUV } from "@gyeonghokim/fisheye.js";
116
+
117
+ const dewarper = new Fisheye({ distortion: 0.5, width: 1920, height: 1080 });
118
+
119
+ // Example: Receiving NV12 data from a server
120
+ const response = await fetch("/api/camera/frame");
121
+ const yuvBuffer = await response.arrayBuffer();
122
+
123
+ const frame = createVideoFrameFromYUV(new Uint8Array(yuvBuffer), {
124
+ format: "NV12", // YUV format
125
+ width: 1920,
126
+ height: 1080,
127
+ timestamp: performance.now() * 1000, // microseconds
128
+ });
129
+
130
+ const dewarpedFrame = await dewarper.dewarp(frame);
131
+ frame.close(); // Don't forget to close the original frame
132
+ ```
133
+
134
+ ### `createVideoFrameFromYUV(data, options)`
135
+
136
+ Creates a VideoFrame from YUV binary data.
137
+
138
+ **Parameters:**
139
+
140
+ - `data`: YUV binary data (`ArrayBuffer`, `TypedArray`, or `DataView`)
141
+ - `options`: Configuration object
142
+ - `format` (required): YUV pixel format
143
+ - `width` (required): Frame width in pixels
144
+ - `height` (required): Frame height in pixels
145
+ - `timestamp` (required): Timestamp in microseconds
146
+ - `duration` (optional): Duration in microseconds
147
+ - `displayWidth` (optional): Display width (defaults to width)
148
+ - `displayHeight` (optional): Display height (defaults to height)
149
+ - `colorSpace` (optional): Color space configuration
150
+ - `transfer` (optional): If `true`, transfers buffer ownership for zero-copy performance
151
+
152
+ **Supported YUV Formats:**
153
+
154
+ | Format | Description | Data Size |
155
+ |--------|-------------|-----------|
156
+ | `I420` | YUV 4:2:0 planar (Y, U, V planes) | width × height × 1.5 |
157
+ | `NV12` | YUV 4:2:0 semi-planar (Y plane, interleaved UV) | width × height × 1.5 |
158
+ | `I420A` | YUV 4:2:0 planar with alpha | width × height × 2.5 |
159
+ | `I422` | YUV 4:2:2 planar | width × height × 2 |
160
+ | `I444` | YUV 4:4:4 planar | width × height × 3 |
161
+
162
+ ### `calculateYUVDataSize(format, width, height)`
163
+
164
+ Calculates the expected byte size for YUV data.
165
+
166
+ ```ts
167
+ import { calculateYUVDataSize } from "@gyeonghokim/fisheye.js";
168
+
169
+ const size = calculateYUVDataSize("NV12", 1920, 1080); // 3110400 bytes
170
+ ```
171
+
172
+ ## Development
173
+
174
+ This project uses:
175
+
176
+ - **Biome** for linting and formatting
177
+ - **Husky** for git hooks
178
+ - **Commitlint** for conventional commit messages
179
+ - **Semantic Release** for automated versioning and publishing
180
+ - **TypeScript** for type safety
181
+ - **WebGPU** for GPU-accelerated processing
182
+
183
+ ### Scripts
184
+
185
+ ```bash
186
+ npm run build # Build the library
187
+ npm run dev # Build in watch mode
188
+ npm run lint # Run linter
189
+ npm run lint:fix # Fix linting issues
190
+ npm run format # Format code
191
+ npm run type-check # Check TypeScript types
192
+ ```
193
+
194
+ ### Commit Message Format
195
+
196
+ This project follows the [Conventional Commits](https://www.conventionalcommits.org/) specification:
197
+
198
+ ```
199
+ <type>: <description>
200
+
201
+ [optional body]
202
+
203
+ [optional footer]
204
+ ```
205
+
206
+ Types: `feat`, `fix`, `docs`, `style`, `refactor`, `perf`, `test`, `build`, `ci`, `chore`, `revert`
207
+
208
+ ## License
209
+
210
+ MIT
@@ -0,0 +1,266 @@
1
+ /**
2
+ * Options for configuring the Fisheye dewarper
3
+ */
4
+ interface FisheyeOptions {
5
+ /**
6
+ * Fisheye distortion coefficient k1.
7
+ */
8
+ k1?: number;
9
+ /**
10
+ * Fisheye distortion coefficient k2.
11
+ */
12
+ k2?: number;
13
+ /**
14
+ * Fisheye distortion coefficient k3.
15
+ */
16
+ k3?: number;
17
+ /**
18
+ * Fisheye distortion coefficient k4.
19
+ */
20
+ k4?: number;
21
+ /**
22
+ * Canvas width for output
23
+ * @default 300
24
+ */
25
+ width?: number;
26
+ /**
27
+ * Canvas height for output
28
+ * @default 150
29
+ */
30
+ height?: number;
31
+ /**
32
+ * Field of view in degrees
33
+ * @default 180
34
+ */
35
+ fov?: number;
36
+ /**
37
+ * X offset of the lens center (normalized, -1.0 to 1.0)
38
+ * @default 0
39
+ */
40
+ centerX?: number;
41
+ /**
42
+ * Y offset of the lens center (normalized, -1.0 to 1.0)
43
+ * @default 0
44
+ */
45
+ centerY?: number;
46
+ /**
47
+ * Zoom factor
48
+ * @default 1.0
49
+ */
50
+ zoom?: number;
51
+ }
52
+ /**
53
+ * Internal configuration after applying defaults
54
+ */
55
+ interface FisheyeConfig extends Required<FisheyeOptions> {
56
+ }
57
+
58
+ /**
59
+ * Fisheye dewarper using WebGPU via TypeGPU (Pure GPGPU)
60
+ *
61
+ * @example
62
+ * ```ts
63
+ * const dewarper = new Fisheye({
64
+ * distortion: 0.5,
65
+ * width: 1920,
66
+ * height: 1080,
67
+ * });
68
+ *
69
+ * const dewarpedFrame = await dewarper.dewarp(videoFrame);
70
+ * ```
71
+ */
72
+ declare class Fisheye {
73
+ private config;
74
+ private root;
75
+ private uniformBuffer;
76
+ private inputTexture;
77
+ private outputTexture;
78
+ private inputView;
79
+ private outputView;
80
+ private bindGroup;
81
+ private dewarpPipeline;
82
+ private readbackBuffers;
83
+ private readbackIndex;
84
+ private readbackHasData;
85
+ private readbackBytesPerRow;
86
+ private readbackActualBytesPerRow;
87
+ private pixelBuffer;
88
+ private inputTextureSize;
89
+ private outputTextureSize;
90
+ private static createInputView;
91
+ private static createOutputView;
92
+ constructor(options?: FisheyeOptions);
93
+ /**
94
+ * Apply default values to options
95
+ */
96
+ private applyDefaults;
97
+ /**
98
+ * Initialize TypeGPU root and resources
99
+ */
100
+ private initialize;
101
+ /**
102
+ * Get uniform data from current configuration
103
+ */
104
+ private getUniformData;
105
+ /**
106
+ * Update uniform buffer with current configuration
107
+ */
108
+ private updateUniforms;
109
+ private readbackToVideoFrame;
110
+ /**
111
+ * Create input texture with proper typing
112
+ */
113
+ private createInputTexture;
114
+ /**
115
+ * Create output texture with proper typing (storage only, no render needed for GPGPU)
116
+ */
117
+ private createOutputTexture;
118
+ /**
119
+ * Calculate bytes per row with proper alignment (256-byte alignment for WebGPU)
120
+ */
121
+ private calculateBytesPerRow;
122
+ /**
123
+ * Create or recreate readback buffer for GPU to CPU data transfer
124
+ */
125
+ private createReadbackBuffer;
126
+ /**
127
+ * Dewarp a VideoFrame
128
+ *
129
+ * @param frame - Input VideoFrame with fisheye distortion
130
+ * @returns Dewarped VideoFrame
131
+ */
132
+ dewarp(frame: VideoFrame): Promise<VideoFrame>;
133
+ /**
134
+ * Update configuration
135
+ */
136
+ updateConfig(options: Partial<FisheyeOptions>): void;
137
+ /**
138
+ * Clean up GPU resources
139
+ */
140
+ destroy(): void;
141
+ }
142
+
143
+ /**
144
+ * Supported YUV pixel formats for VideoFrame creation
145
+ */
146
+ type YUVFormat = "I420" | "I420A" | "I422" | "I444" | "NV12";
147
+ /**
148
+ * Options for creating a VideoFrame from YUV data
149
+ */
150
+ interface CreateVideoFrameOptions {
151
+ /**
152
+ * YUV pixel format
153
+ * - I420: YUV 4:2:0 planar (Y plane, U plane, V plane)
154
+ * - I420A: YUV 4:2:0 planar with alpha
155
+ * - I422: YUV 4:2:2 planar
156
+ * - I444: YUV 4:4:4 planar
157
+ * - NV12: YUV 4:2:0 semi-planar (Y plane, interleaved UV plane)
158
+ */
159
+ format: YUVFormat;
160
+ /**
161
+ * Width of the video frame in pixels
162
+ */
163
+ width: number;
164
+ /**
165
+ * Height of the video frame in pixels
166
+ */
167
+ height: number;
168
+ /**
169
+ * Timestamp in microseconds
170
+ */
171
+ timestamp: number;
172
+ /**
173
+ * Duration in microseconds (optional)
174
+ */
175
+ duration?: number;
176
+ /**
177
+ * Display width (optional, defaults to width)
178
+ */
179
+ displayWidth?: number;
180
+ /**
181
+ * Display height (optional, defaults to height)
182
+ */
183
+ displayHeight?: number;
184
+ /**
185
+ * Color space configuration (optional)
186
+ */
187
+ colorSpace?: VideoColorSpaceInit;
188
+ /**
189
+ * Transfer ownership of the buffer for zero-copy (optional)
190
+ * If true, the input buffer will be detached after VideoFrame creation
191
+ */
192
+ transfer?: boolean;
193
+ }
194
+ /**
195
+ * Create a VideoFrame from YUV binary data
196
+ *
197
+ * @param data - YUV binary data (ArrayBuffer, TypedArray, or DataView)
198
+ * @param options - Configuration options including format, dimensions, and timestamp
199
+ * @returns A new VideoFrame object
200
+ *
201
+ * @example
202
+ * ```ts
203
+ * // Create VideoFrame from I420 (YUV 4:2:0) data
204
+ * const yuvData = new Uint8Array(width * height * 1.5); // I420 size
205
+ * const frame = createVideoFrameFromYUV(yuvData, {
206
+ * format: "I420",
207
+ * width: 1920,
208
+ * height: 1080,
209
+ * timestamp: 0,
210
+ * });
211
+ * ```
212
+ *
213
+ * @example
214
+ * ```ts
215
+ * // Create VideoFrame from NV12 data with zero-copy transfer
216
+ * const nv12Data = new Uint8Array(width * height * 1.5);
217
+ * const frame = createVideoFrameFromYUV(nv12Data, {
218
+ * format: "NV12",
219
+ * width: 1920,
220
+ * height: 1080,
221
+ * timestamp: 0,
222
+ * transfer: true, // Transfer buffer ownership for better performance
223
+ * });
224
+ * ```
225
+ */
226
+ declare function createVideoFrameFromYUV(data: BufferSource, options: CreateVideoFrameOptions): VideoFrame;
227
+ /**
228
+ * Convert RGBA image data to YUV format (I420 by default)
229
+ *
230
+ * Uses ITU-R BT.601 color space conversion:
231
+ * - Y = 0.299*R + 0.587*G + 0.114*B
232
+ * - U = -0.169*R - 0.331*G + 0.5*B + 128
233
+ * - V = 0.5*R - 0.419*G - 0.081*B + 128
234
+ *
235
+ * For I420 format:
236
+ * - Y plane: full resolution (width * height)
237
+ * - U plane: quarter resolution ((width/2) * (height/2))
238
+ * - V plane: quarter resolution ((width/2) * (height/2))
239
+ *
240
+ * @param rgbaData - RGBA pixel data (Uint8ClampedArray from ImageData)
241
+ * @param width - Image width in pixels
242
+ * @param height - Image height in pixels
243
+ * @param format - YUV format to convert to (default: "I420")
244
+ * @returns YUV data as Uint8Array
245
+ *
246
+ * @example
247
+ * ```ts
248
+ * const canvas = document.createElement('canvas');
249
+ * const ctx = canvas.getContext('2d');
250
+ * ctx.drawImage(image, 0, 0);
251
+ * const imageData = ctx.getImageData(0, 0, width, height);
252
+ * const yuvData = convertRGBAtoYUV(imageData.data, width, height);
253
+ * ```
254
+ */
255
+ declare function convertRGBAtoYUV(rgbaData: Uint8ClampedArray, width: number, height: number, format?: YUVFormat): Uint8Array;
256
+ /**
257
+ * Calculate the expected byte size for YUV data based on format and dimensions
258
+ *
259
+ * @param format - YUV pixel format
260
+ * @param width - Frame width in pixels
261
+ * @param height - Frame height in pixels
262
+ * @returns Expected byte size
263
+ */
264
+ declare function calculateYUVDataSize(format: YUVFormat, width: number, height: number): number;
265
+
266
+ export { type CreateVideoFrameOptions, Fisheye, type FisheyeConfig, type FisheyeOptions, type YUVFormat, calculateYUVDataSize, convertRGBAtoYUV, createVideoFrameFromYUV };
package/dist/index.js ADDED
@@ -0,0 +1,459 @@
1
+ // src/fisheye.ts
2
+ import tgpu from "typegpu";
3
+ import * as d from "typegpu/data";
4
+ import * as std from "typegpu/std";
5
+ var FisheyeUniforms = d.struct({
6
+ k1: d.f32,
7
+ k2: d.f32,
8
+ k3: d.f32,
9
+ k4: d.f32,
10
+ fov: d.f32,
11
+ centerX: d.f32,
12
+ centerY: d.f32,
13
+ zoom: d.f32,
14
+ width: d.f32,
15
+ height: d.f32,
16
+ padding: d.f32
17
+ });
18
+ var fisheyeLayout = tgpu.bindGroupLayout({
19
+ inputTexture: { texture: d.texture2d() },
20
+ outputTexture: { storageTexture: d.textureStorage2d("rgba8unorm") },
21
+ uniforms: { uniform: FisheyeUniforms }
22
+ });
23
+ var Fisheye = class _Fisheye {
24
+ config;
25
+ root = null;
26
+ uniformBuffer = null;
27
+ inputTexture = null;
28
+ outputTexture = null;
29
+ inputView = null;
30
+ outputView = null;
31
+ bindGroup = null;
32
+ dewarpPipeline = null;
33
+ readbackBuffers = null;
34
+ readbackIndex = 0;
35
+ readbackHasData = [false, false];
36
+ readbackBytesPerRow = 0;
37
+ readbackActualBytesPerRow = 0;
38
+ pixelBuffer = null;
39
+ inputTextureSize = [0, 0];
40
+ outputTextureSize = [0, 0];
41
+ static createInputView(texture) {
42
+ return texture.createView(d.texture2d());
43
+ }
44
+ static createOutputView(texture) {
45
+ return texture.createView(d.textureStorage2d("rgba8unorm"));
46
+ }
47
+ constructor(options = {}) {
48
+ this.config = this.applyDefaults(options);
49
+ }
50
+ /**
51
+ * Apply default values to options
52
+ */
53
+ applyDefaults(options) {
54
+ const k1 = options.k1 ?? 0;
55
+ return {
56
+ k1,
57
+ k2: options.k2 ?? 0,
58
+ k3: options.k3 ?? 0,
59
+ k4: options.k4 ?? 0,
60
+ width: options.width ?? 300,
61
+ height: options.height ?? 150,
62
+ fov: options.fov ?? 180,
63
+ centerX: options.centerX ?? 0,
64
+ centerY: options.centerY ?? 0,
65
+ zoom: options.zoom ?? 1
66
+ };
67
+ }
68
+ /**
69
+ * Initialize TypeGPU root and resources
70
+ */
71
+ async initialize() {
72
+ if (this.root) {
73
+ return;
74
+ }
75
+ this.root = await tgpu.init();
76
+ this.uniformBuffer = this.root.createBuffer(FisheyeUniforms, this.getUniformData()).$usage("uniform");
77
+ this.dewarpPipeline = this.root["~unstable"].createGuardedComputePipeline(
78
+ (x, y) => {
79
+ "use gpu";
80
+ const inputTex = fisheyeLayout.$.inputTexture;
81
+ const outputTex = fisheyeLayout.$.outputTexture;
82
+ const params = fisheyeLayout.$.uniforms;
83
+ const inputDims = std.textureDimensions(inputTex);
84
+ const outputDims = std.textureDimensions(outputTex);
85
+ const coord = d.vec2i(x, y);
86
+ if (x >= outputDims.x || y >= outputDims.y) {
87
+ return;
88
+ }
89
+ const uv = d.vec2f(
90
+ (d.f32(coord.x) / d.f32(outputDims.x) - 0.5) * 2,
91
+ (d.f32(coord.y) / d.f32(outputDims.y) - 0.5) * 2
92
+ );
93
+ const centered = uv.sub(d.vec2f(params.centerX, params.centerY));
94
+ const r = std.length(centered);
95
+ const theta = std.atan(r);
96
+ const theta2 = theta * theta;
97
+ const theta4 = theta2 * theta2;
98
+ const theta6 = theta4 * theta2;
99
+ const theta8 = theta4 * theta4;
100
+ const thetaDistorted = theta * (1 + params.k1 * theta2 + params.k2 * theta4 + params.k3 * theta6 + params.k4 * theta8);
101
+ const rDistorted = std.tan(thetaDistorted);
102
+ const rScaled = rDistorted / params.zoom;
103
+ let distortedUv = centered;
104
+ if (r > 1e-4) {
105
+ distortedUv = centered.mul(rScaled / r);
106
+ }
107
+ const finalUv = distortedUv.add(d.vec2f(params.centerX, params.centerY)).mul(0.5).add(0.5);
108
+ if (finalUv.x >= 0 && finalUv.x <= 1 && finalUv.y >= 0 && finalUv.y <= 1) {
109
+ const sampleCoord = d.vec2i(
110
+ d.i32(finalUv.x * d.f32(inputDims.x)),
111
+ d.i32(finalUv.y * d.f32(inputDims.y))
112
+ );
113
+ const color = std.textureLoad(inputTex, sampleCoord, 0);
114
+ std.textureStore(outputTex, coord, color);
115
+ } else {
116
+ std.textureStore(outputTex, coord, d.vec4f(0, 0, 0, 1));
117
+ }
118
+ }
119
+ );
120
+ }
121
+ /**
122
+ * Get uniform data from current configuration
123
+ */
124
+ getUniformData() {
125
+ return {
126
+ k1: this.config.k1,
127
+ k2: this.config.k2,
128
+ k3: this.config.k3,
129
+ k4: this.config.k4,
130
+ fov: this.config.fov,
131
+ centerX: this.config.centerX,
132
+ centerY: this.config.centerY,
133
+ zoom: this.config.zoom,
134
+ width: this.config.width,
135
+ height: this.config.height,
136
+ padding: 0
137
+ };
138
+ }
139
+ /**
140
+ * Update uniform buffer with current configuration
141
+ */
142
+ updateUniforms() {
143
+ if (!this.uniformBuffer) {
144
+ return;
145
+ }
146
+ this.uniformBuffer.write(this.getUniformData());
147
+ }
148
+ async readbackToVideoFrame(device, root, outputTexture, timestamp) {
149
+ const readbackBuffers = this.readbackBuffers;
150
+ if (!readbackBuffers) {
151
+ throw new Error("Readback buffer not initialized");
152
+ }
153
+ const outputGpuTexture = root.unwrap(outputTexture);
154
+ const commandEncoder = device.createCommandEncoder();
155
+ const writeIndex = this.readbackIndex;
156
+ const readIndex = 1 - writeIndex;
157
+ const writeBuffer = readbackBuffers[writeIndex];
158
+ const readBuffer = readbackBuffers[readIndex];
159
+ if (!writeBuffer || !readBuffer) {
160
+ throw new Error("Readback buffer not initialized");
161
+ }
162
+ commandEncoder.copyTextureToBuffer(
163
+ { texture: outputGpuTexture },
164
+ { buffer: writeBuffer, bytesPerRow: this.readbackBytesPerRow },
165
+ [this.config.width, this.config.height]
166
+ );
167
+ device.queue.submit([commandEncoder.finish()]);
168
+ this.readbackHasData[writeIndex] = true;
169
+ this.readbackIndex = readIndex;
170
+ const bufferToRead = this.readbackHasData[readIndex] ? readBuffer : writeBuffer;
171
+ await bufferToRead.mapAsync(GPUMapMode.READ);
172
+ const mappedData = bufferToRead.getMappedRange();
173
+ const pixelData = this.pixelBuffer ?? new Uint8Array(this.config.width * this.config.height * 4);
174
+ const srcView = new Uint8Array(mappedData);
175
+ for (let row = 0; row < this.config.height; row++) {
176
+ const srcOffset = row * this.readbackBytesPerRow;
177
+ const dstOffset = row * this.readbackActualBytesPerRow;
178
+ pixelData.set(
179
+ srcView.subarray(srcOffset, srcOffset + this.readbackActualBytesPerRow),
180
+ dstOffset
181
+ );
182
+ }
183
+ bufferToRead.unmap();
184
+ return new VideoFrame(pixelData, {
185
+ format: "RGBA",
186
+ codedWidth: this.config.width,
187
+ codedHeight: this.config.height,
188
+ timestamp
189
+ });
190
+ }
191
+ /**
192
+ * Create input texture with proper typing
193
+ */
194
+ createInputTexture(root, width, height) {
195
+ const size = [width, height];
196
+ const format = "rgba8unorm";
197
+ return root["~unstable"].createTexture({ size, format }).$usage("sampled");
198
+ }
199
+ /**
200
+ * Create output texture with proper typing (storage only, no render needed for GPGPU)
201
+ */
202
+ createOutputTexture(root, width, height) {
203
+ const size = [width, height];
204
+ const format = "rgba8unorm";
205
+ return root["~unstable"].createTexture({ size, format }).$usage("storage");
206
+ }
207
+ /**
208
+ * Calculate bytes per row with proper alignment (256-byte alignment for WebGPU)
209
+ */
210
+ calculateBytesPerRow(width) {
211
+ const bytesPerPixel = 4;
212
+ const unalignedBytesPerRow = width * bytesPerPixel;
213
+ return Math.ceil(unalignedBytesPerRow / 256) * 256;
214
+ }
215
+ /**
216
+ * Create or recreate readback buffer for GPU to CPU data transfer
217
+ */
218
+ createReadbackBuffer(device, width, height) {
219
+ const bytesPerRow = this.calculateBytesPerRow(width);
220
+ const bufferSize = bytesPerRow * height;
221
+ return device.createBuffer({
222
+ size: bufferSize,
223
+ usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ
224
+ });
225
+ }
226
+ /**
227
+ * Dewarp a VideoFrame
228
+ *
229
+ * @param frame - Input VideoFrame with fisheye distortion
230
+ * @returns Dewarped VideoFrame
231
+ */
232
+ async dewarp(frame) {
233
+ await this.initialize();
234
+ if (!this.root || !this.uniformBuffer) {
235
+ throw new Error("GPU resources not initialized");
236
+ }
237
+ const root = this.root;
238
+ const device = root.device;
239
+ let bindGroupDirty = false;
240
+ if (!this.inputTexture || this.inputTextureSize[0] !== frame.displayWidth || this.inputTextureSize[1] !== frame.displayHeight) {
241
+ this.inputTexture?.destroy();
242
+ this.inputTexture = this.createInputTexture(root, frame.displayWidth, frame.displayHeight);
243
+ this.inputTextureSize = [frame.displayWidth, frame.displayHeight];
244
+ this.inputView = _Fisheye.createInputView(this.inputTexture);
245
+ bindGroupDirty = true;
246
+ }
247
+ if (!this.outputTexture || this.outputTextureSize[0] !== this.config.width || this.outputTextureSize[1] !== this.config.height) {
248
+ this.outputTexture?.destroy();
249
+ this.readbackBuffers?.[0]?.destroy();
250
+ this.readbackBuffers?.[1]?.destroy();
251
+ this.outputTexture = this.createOutputTexture(root, this.config.width, this.config.height);
252
+ this.outputView = _Fisheye.createOutputView(this.outputTexture);
253
+ this.readbackBytesPerRow = this.calculateBytesPerRow(this.config.width);
254
+ this.readbackActualBytesPerRow = this.config.width * 4;
255
+ this.pixelBuffer = new Uint8Array(this.config.width * this.config.height * 4);
256
+ this.readbackBuffers = [
257
+ this.createReadbackBuffer(device, this.config.width, this.config.height),
258
+ this.createReadbackBuffer(device, this.config.width, this.config.height)
259
+ ];
260
+ this.readbackIndex = 0;
261
+ this.readbackHasData = [false, false];
262
+ this.outputTextureSize = [this.config.width, this.config.height];
263
+ bindGroupDirty = true;
264
+ }
265
+ const inputTexture = this.inputTexture;
266
+ const outputTexture = this.outputTexture;
267
+ inputTexture.write(frame);
268
+ if (bindGroupDirty || !this.bindGroup) {
269
+ this.bindGroup = root.createBindGroup(fisheyeLayout, {
270
+ inputTexture: this.inputView ?? _Fisheye.createInputView(inputTexture),
271
+ outputTexture: this.outputView ?? _Fisheye.createOutputView(outputTexture),
272
+ uniforms: this.uniformBuffer
273
+ });
274
+ }
275
+ const bindGroup = this.bindGroup;
276
+ const dewarpPipeline = this.dewarpPipeline;
277
+ if (!dewarpPipeline) {
278
+ throw new Error("Compute pipeline not initialized");
279
+ }
280
+ dewarpPipeline.with(bindGroup).dispatchThreads(this.config.width, this.config.height);
281
+ return this.readbackToVideoFrame(device, root, outputTexture, frame.timestamp);
282
+ }
283
+ /**
284
+ * Update configuration
285
+ */
286
+ updateConfig(options) {
287
+ this.config = this.applyDefaults({ ...this.config, ...options });
288
+ this.updateUniforms();
289
+ if (options.width || options.height) {
290
+ this.outputTexture?.destroy();
291
+ this.readbackBuffers?.[0]?.destroy();
292
+ this.readbackBuffers?.[1]?.destroy();
293
+ this.outputTexture = null;
294
+ this.readbackBuffers = null;
295
+ this.readbackIndex = 0;
296
+ this.readbackHasData = [false, false];
297
+ this.outputTextureSize = [0, 0];
298
+ this.outputView = null;
299
+ this.bindGroup = null;
300
+ this.readbackBytesPerRow = 0;
301
+ this.readbackActualBytesPerRow = 0;
302
+ this.pixelBuffer = null;
303
+ }
304
+ }
305
+ /**
306
+ * Clean up GPU resources
307
+ */
308
+ destroy() {
309
+ this.inputTexture?.destroy();
310
+ this.outputTexture?.destroy();
311
+ this.readbackBuffers?.[0]?.destroy();
312
+ this.readbackBuffers?.[1]?.destroy();
313
+ this.root?.destroy();
314
+ this.inputTexture = null;
315
+ this.outputTexture = null;
316
+ this.readbackBuffers = null;
317
+ this.readbackIndex = 0;
318
+ this.readbackHasData = [false, false];
319
+ this.uniformBuffer = null;
320
+ this.root = null;
321
+ this.inputView = null;
322
+ this.outputView = null;
323
+ this.bindGroup = null;
324
+ this.dewarpPipeline = null;
325
+ this.readbackBytesPerRow = 0;
326
+ this.readbackActualBytesPerRow = 0;
327
+ this.pixelBuffer = null;
328
+ this.inputTextureSize = [0, 0];
329
+ this.outputTextureSize = [0, 0];
330
+ }
331
+ };
332
+
333
+ // src/utils.ts
334
+ function createVideoFrameFromYUV(data, options) {
335
+ const {
336
+ format,
337
+ width,
338
+ height,
339
+ timestamp,
340
+ duration,
341
+ displayWidth,
342
+ displayHeight,
343
+ colorSpace,
344
+ transfer
345
+ } = options;
346
+ if (width <= 0 || height <= 0) {
347
+ throw new Error("Width and height must be positive integers");
348
+ }
349
+ const expectedSize = calculateYUVDataSize(format, width, height);
350
+ const actualSize = data instanceof ArrayBuffer ? data.byteLength : data.byteLength;
351
+ if (actualSize < expectedSize) {
352
+ throw new Error(
353
+ `Buffer too small for ${format} format. Expected at least ${expectedSize} bytes, got ${actualSize} bytes`
354
+ );
355
+ }
356
+ const init = {
357
+ format,
358
+ codedWidth: width,
359
+ codedHeight: height,
360
+ timestamp
361
+ };
362
+ if (duration !== void 0) {
363
+ init.duration = duration;
364
+ }
365
+ if (displayWidth !== void 0) {
366
+ init.displayWidth = displayWidth;
367
+ }
368
+ if (displayHeight !== void 0) {
369
+ init.displayHeight = displayHeight;
370
+ }
371
+ if (colorSpace !== void 0) {
372
+ init.colorSpace = colorSpace;
373
+ }
374
+ if (transfer) {
375
+ const buffer = data instanceof ArrayBuffer ? data : data.buffer;
376
+ init.transfer = [buffer];
377
+ }
378
+ return new VideoFrame(data, init);
379
+ }
380
+ function convertRGBAtoYUV(rgbaData, width, height, format = "I420") {
381
+ if (format !== "I420") {
382
+ throw new Error(`Unsupported format: ${format}. Only I420 is currently supported.`);
383
+ }
384
+ const lumaSize = width * height;
385
+ const chromaSize = width / 2 * (height / 2);
386
+ const yuvSize = lumaSize + chromaSize * 2;
387
+ const yuvData = new Uint8Array(yuvSize);
388
+ const Y_R = 0.299;
389
+ const Y_G = 0.587;
390
+ const Y_B = 0.114;
391
+ const U_R = -0.169;
392
+ const U_G = -0.331;
393
+ const U_B = 0.5;
394
+ const V_R = 0.5;
395
+ const V_G = -0.419;
396
+ const V_B = -0.081;
397
+ const yPlane = yuvData.subarray(0, lumaSize);
398
+ const uPlane = yuvData.subarray(lumaSize, lumaSize + chromaSize);
399
+ const vPlane = yuvData.subarray(lumaSize + chromaSize, yuvSize);
400
+ for (let y = 0; y < height; y++) {
401
+ for (let x = 0; x < width; x++) {
402
+ const rgbaIdx = (y * width + x) * 4;
403
+ const r = rgbaData[rgbaIdx];
404
+ const g = rgbaData[rgbaIdx + 1];
405
+ const b = rgbaData[rgbaIdx + 2];
406
+ const yVal = Y_R * r + Y_G * g + Y_B * b;
407
+ yPlane[y * width + x] = Math.round(Math.max(0, Math.min(255, yVal)));
408
+ }
409
+ }
410
+ for (let y = 0; y < height / 2; y++) {
411
+ for (let x = 0; x < width / 2; x++) {
412
+ let uSum = 0;
413
+ let vSum = 0;
414
+ for (let dy = 0; dy < 2; dy++) {
415
+ for (let dx = 0; dx < 2; dx++) {
416
+ const srcX = x * 2 + dx;
417
+ const srcY = y * 2 + dy;
418
+ const rgbaIdx = (srcY * width + srcX) * 4;
419
+ const r = rgbaData[rgbaIdx];
420
+ const g = rgbaData[rgbaIdx + 1];
421
+ const b = rgbaData[rgbaIdx + 2];
422
+ const uVal = U_R * r + U_G * g + U_B * b + 128;
423
+ const vVal = V_R * r + V_G * g + V_B * b + 128;
424
+ uSum += uVal;
425
+ vSum += vVal;
426
+ }
427
+ }
428
+ const uAvg = uSum / 4;
429
+ const vAvg = vSum / 4;
430
+ const chromaIdx = y * (width / 2) + x;
431
+ uPlane[chromaIdx] = Math.round(Math.max(0, Math.min(255, uAvg)));
432
+ vPlane[chromaIdx] = Math.round(Math.max(0, Math.min(255, vAvg)));
433
+ }
434
+ }
435
+ return yuvData;
436
+ }
437
+ function calculateYUVDataSize(format, width, height) {
438
+ const lumaSize = width * height;
439
+ switch (format) {
440
+ case "I420":
441
+ case "NV12":
442
+ return lumaSize + lumaSize / 2;
443
+ case "I420A":
444
+ return lumaSize * 2 + lumaSize / 2;
445
+ case "I422":
446
+ return lumaSize * 2;
447
+ case "I444":
448
+ return lumaSize * 3;
449
+ default:
450
+ throw new Error(`Unsupported YUV format: ${format}`);
451
+ }
452
+ }
453
+ export {
454
+ Fisheye,
455
+ calculateYUVDataSize,
456
+ convertRGBAtoYUV,
457
+ createVideoFrameFromYUV
458
+ };
459
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/fisheye.ts","../src/utils.ts"],"sourcesContent":["import tgpu, { type TgpuBuffer, type TgpuTexture } from \"typegpu\";\nimport * as d from \"typegpu/data\";\nimport * as std from \"typegpu/std\";\nimport type { FisheyeConfig, FisheyeOptions } from \"./types\";\n\n/**\n * Uniform struct for fisheye dewarping parameters\n */\nconst FisheyeUniforms = d.struct({\n k1: d.f32,\n k2: d.f32,\n k3: d.f32,\n k4: d.f32,\n fov: d.f32,\n centerX: d.f32,\n centerY: d.f32,\n zoom: d.f32,\n width: d.f32,\n height: d.f32,\n padding: d.f32,\n});\n\ntype TgpuRootType = Awaited<ReturnType<typeof tgpu.init>>;\n\n/**\n * Bind group layout for fisheye dewarping compute shader\n */\nconst fisheyeLayout = tgpu.bindGroupLayout({\n inputTexture: { texture: d.texture2d() },\n outputTexture: { storageTexture: d.textureStorage2d(\"rgba8unorm\") },\n uniforms: { uniform: FisheyeUniforms },\n});\n\n// Type definitions for textures with proper usage flags\ntype SampledTextureProps = {\n size: readonly [number, number];\n format: \"rgba8unorm\";\n};\n\ntype StorageTextureProps = {\n size: readonly [number, number];\n format: \"rgba8unorm\";\n};\n\ntype InputTextureType = TgpuTexture<SampledTextureProps> & {\n usableAsSampled: true;\n};\n\ntype OutputTextureType = TgpuTexture<StorageTextureProps> & {\n usableAsStorage: true;\n};\n\ntype UniformBufferType = TgpuBuffer<typeof FisheyeUniforms> & {\n usableAsUniform: true;\n};\n\n/**\n * Fisheye dewarper using WebGPU via TypeGPU (Pure GPGPU)\n *\n * @example\n * ```ts\n * const dewarper = new Fisheye({\n * distortion: 0.5,\n * width: 1920,\n * height: 1080,\n * });\n *\n * const dewarpedFrame = await dewarper.dewarp(videoFrame);\n * ```\n */\nexport class Fisheye {\n private config: FisheyeConfig;\n private root: TgpuRootType | null = null;\n private uniformBuffer: UniformBufferType | null = null;\n private inputTexture: InputTextureType | null = null;\n private outputTexture: OutputTextureType | null = null;\n private inputView: ReturnType<typeof Fisheye.createInputView> | null = null;\n private outputView: ReturnType<typeof Fisheye.createOutputView> | null = null;\n private bindGroup: ReturnType<TgpuRootType[\"createBindGroup\"]> | null = null;\n private dewarpPipeline: ReturnType<\n TgpuRootType[\"~unstable\"][\"createGuardedComputePipeline\"]\n > | null = null;\n private readbackBuffers: [GPUBuffer | null, GPUBuffer | null] | null = null;\n private readbackIndex = 0;\n private readbackHasData: [boolean, boolean] = [false, false];\n private readbackBytesPerRow = 0;\n private readbackActualBytesPerRow = 0;\n private pixelBuffer: Uint8Array | null = null;\n private inputTextureSize: [number, number] = [0, 0];\n private outputTextureSize: [number, number] = [0, 0];\n\n private static createInputView(texture: InputTextureType) {\n return texture.createView(d.texture2d());\n }\n\n private static createOutputView(texture: OutputTextureType) {\n return texture.createView(d.textureStorage2d(\"rgba8unorm\"));\n }\n\n constructor(options: FisheyeOptions = {}) {\n this.config = this.applyDefaults(options);\n }\n\n /**\n * Apply default values to options\n */\n private applyDefaults(options: FisheyeOptions): FisheyeConfig {\n const k1 = options.k1 ?? 0;\n return {\n k1,\n k2: options.k2 ?? 0,\n k3: options.k3 ?? 0,\n k4: options.k4 ?? 0,\n width: options.width ?? 300,\n height: options.height ?? 150,\n fov: options.fov ?? 180,\n centerX: options.centerX ?? 0,\n centerY: options.centerY ?? 0,\n zoom: options.zoom ?? 1.0,\n };\n }\n\n /**\n * Initialize TypeGPU root and resources\n */\n private async initialize(): Promise<void> {\n if (this.root) {\n return;\n }\n\n this.root = await tgpu.init();\n\n // Create uniform buffer with TypeGPU for type-safe data handling\n this.uniformBuffer = this.root\n .createBuffer(FisheyeUniforms, this.getUniformData())\n .$usage(\"uniform\");\n\n this.dewarpPipeline = this.root[\"~unstable\"].createGuardedComputePipeline(\n (x: number, y: number) => {\n \"use gpu\";\n\n const inputTex = fisheyeLayout.$.inputTexture;\n const outputTex = fisheyeLayout.$.outputTexture;\n const params = fisheyeLayout.$.uniforms;\n\n const inputDims = std.textureDimensions(inputTex);\n const outputDims = std.textureDimensions(outputTex);\n const coord = d.vec2i(x, y);\n\n // Early exit if outside texture bounds\n if (x >= outputDims.x || y >= outputDims.y) {\n return;\n }\n\n // Normalize coordinates to [-1, 1]\n const uv = d.vec2f(\n (d.f32(coord.x) / d.f32(outputDims.x) - 0.5) * 2.0,\n (d.f32(coord.y) / d.f32(outputDims.y) - 0.5) * 2.0,\n );\n\n // Apply center offset\n const centered = uv.sub(d.vec2f(params.centerX, params.centerY));\n\n // Calculate radius from center\n const r = std.length(centered);\n\n // Fisheye distortion (OpenCV model): theta_d = theta * (1 + k1*theta^2 + k2*theta^4 + k3*theta^6 + k4*theta^8)\n const theta = std.atan(r);\n const theta2 = theta * theta;\n const theta4 = theta2 * theta2;\n const theta6 = theta4 * theta2;\n const theta8 = theta4 * theta4;\n const thetaDistorted =\n theta *\n (1.0 + params.k1 * theta2 + params.k2 * theta4 + params.k3 * theta6 + params.k4 * theta8);\n const rDistorted = std.tan(thetaDistorted);\n\n // Apply zoom\n const rScaled = rDistorted / params.zoom;\n\n // Convert back to texture coordinates\n let distortedUv = centered;\n if (r > 0.0001) {\n distortedUv = centered.mul(rScaled / r);\n }\n\n // Add center offset back and denormalize\n const finalUv = distortedUv.add(d.vec2f(params.centerX, params.centerY)).mul(0.5).add(0.5);\n\n // Sample from input texture if within bounds\n if (finalUv.x >= 0.0 && finalUv.x <= 1.0 && finalUv.y >= 0.0 && finalUv.y <= 1.0) {\n const sampleCoord = d.vec2i(\n d.i32(finalUv.x * d.f32(inputDims.x)),\n d.i32(finalUv.y * d.f32(inputDims.y)),\n );\n const color = std.textureLoad(inputTex, sampleCoord, 0);\n std.textureStore(outputTex, coord, color);\n } else {\n // Black for out of bounds\n std.textureStore(outputTex, coord, d.vec4f(0.0, 0.0, 0.0, 1.0));\n }\n },\n );\n }\n\n /**\n * Get uniform data from current configuration\n */\n private getUniformData(): d.Infer<typeof FisheyeUniforms> {\n return {\n k1: this.config.k1,\n k2: this.config.k2,\n k3: this.config.k3,\n k4: this.config.k4,\n fov: this.config.fov,\n centerX: this.config.centerX,\n centerY: this.config.centerY,\n zoom: this.config.zoom,\n width: this.config.width,\n height: this.config.height,\n padding: 0,\n };\n }\n\n /**\n * Update uniform buffer with current configuration\n */\n private updateUniforms(): void {\n if (!this.uniformBuffer) {\n return;\n }\n this.uniformBuffer.write(this.getUniformData());\n }\n\n private async readbackToVideoFrame(\n device: GPUDevice,\n root: TgpuRootType,\n outputTexture: OutputTextureType,\n timestamp: number,\n ): Promise<VideoFrame> {\n const readbackBuffers = this.readbackBuffers;\n\n if (!readbackBuffers) {\n throw new Error(\"Readback buffer not initialized\");\n }\n\n const outputGpuTexture = root.unwrap(outputTexture);\n const commandEncoder = device.createCommandEncoder();\n const writeIndex = this.readbackIndex;\n const readIndex = 1 - writeIndex;\n const writeBuffer = readbackBuffers[writeIndex];\n const readBuffer = readbackBuffers[readIndex];\n\n if (!writeBuffer || !readBuffer) {\n throw new Error(\"Readback buffer not initialized\");\n }\n\n commandEncoder.copyTextureToBuffer(\n { texture: outputGpuTexture },\n { buffer: writeBuffer, bytesPerRow: this.readbackBytesPerRow },\n [this.config.width, this.config.height],\n );\n device.queue.submit([commandEncoder.finish()]);\n\n this.readbackHasData[writeIndex] = true;\n this.readbackIndex = readIndex;\n\n const bufferToRead = this.readbackHasData[readIndex] ? readBuffer : writeBuffer;\n\n await bufferToRead.mapAsync(GPUMapMode.READ);\n const mappedData = bufferToRead.getMappedRange();\n\n const pixelData =\n this.pixelBuffer ?? new Uint8Array(this.config.width * this.config.height * 4);\n const srcView = new Uint8Array(mappedData);\n\n for (let row = 0; row < this.config.height; row++) {\n const srcOffset = row * this.readbackBytesPerRow;\n const dstOffset = row * this.readbackActualBytesPerRow;\n pixelData.set(\n srcView.subarray(srcOffset, srcOffset + this.readbackActualBytesPerRow),\n dstOffset,\n );\n }\n\n bufferToRead.unmap();\n\n return new VideoFrame(pixelData, {\n format: \"RGBA\",\n codedWidth: this.config.width,\n codedHeight: this.config.height,\n timestamp,\n });\n }\n\n /**\n * Create input texture with proper typing\n */\n private createInputTexture(root: TgpuRootType, width: number, height: number): InputTextureType {\n const size: readonly [number, number] = [width, height];\n const format: \"rgba8unorm\" = \"rgba8unorm\";\n return root[\"~unstable\"].createTexture({ size, format }).$usage(\"sampled\");\n }\n\n /**\n * Create output texture with proper typing (storage only, no render needed for GPGPU)\n */\n private createOutputTexture(\n root: TgpuRootType,\n width: number,\n height: number,\n ): OutputTextureType {\n const size: readonly [number, number] = [width, height];\n const format: \"rgba8unorm\" = \"rgba8unorm\";\n return root[\"~unstable\"].createTexture({ size, format }).$usage(\"storage\");\n }\n\n /**\n * Calculate bytes per row with proper alignment (256-byte alignment for WebGPU)\n */\n private calculateBytesPerRow(width: number): number {\n const bytesPerPixel = 4; // RGBA8\n const unalignedBytesPerRow = width * bytesPerPixel;\n // WebGPU requires 256-byte alignment for buffer copies\n return Math.ceil(unalignedBytesPerRow / 256) * 256;\n }\n\n /**\n * Create or recreate readback buffer for GPU to CPU data transfer\n */\n private createReadbackBuffer(device: GPUDevice, width: number, height: number): GPUBuffer {\n const bytesPerRow = this.calculateBytesPerRow(width);\n const bufferSize = bytesPerRow * height;\n\n return device.createBuffer({\n size: bufferSize,\n usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,\n });\n }\n\n /**\n * Dewarp a VideoFrame\n *\n * @param frame - Input VideoFrame with fisheye distortion\n * @returns Dewarped VideoFrame\n */\n async dewarp(frame: VideoFrame): Promise<VideoFrame> {\n await this.initialize();\n\n if (!this.root || !this.uniformBuffer) {\n throw new Error(\"GPU resources not initialized\");\n }\n\n // Capture root for type narrowing\n const root = this.root;\n const device = root.device;\n\n let bindGroupDirty = false;\n\n // Create or recreate input texture if dimensions changed\n if (\n !this.inputTexture ||\n this.inputTextureSize[0] !== frame.displayWidth ||\n this.inputTextureSize[1] !== frame.displayHeight\n ) {\n this.inputTexture?.destroy();\n this.inputTexture = this.createInputTexture(root, frame.displayWidth, frame.displayHeight);\n this.inputTextureSize = [frame.displayWidth, frame.displayHeight];\n this.inputView = Fisheye.createInputView(this.inputTexture);\n bindGroupDirty = true;\n }\n\n // Create or recreate output texture and readback buffer if config dimensions changed\n if (\n !this.outputTexture ||\n this.outputTextureSize[0] !== this.config.width ||\n this.outputTextureSize[1] !== this.config.height\n ) {\n this.outputTexture?.destroy();\n this.readbackBuffers?.[0]?.destroy();\n this.readbackBuffers?.[1]?.destroy();\n\n this.outputTexture = this.createOutputTexture(root, this.config.width, this.config.height);\n this.outputView = Fisheye.createOutputView(this.outputTexture);\n this.readbackBytesPerRow = this.calculateBytesPerRow(this.config.width);\n this.readbackActualBytesPerRow = this.config.width * 4;\n this.pixelBuffer = new Uint8Array(this.config.width * this.config.height * 4);\n this.readbackBuffers = [\n this.createReadbackBuffer(device, this.config.width, this.config.height),\n this.createReadbackBuffer(device, this.config.width, this.config.height),\n ];\n this.readbackIndex = 0;\n this.readbackHasData = [false, false];\n this.outputTextureSize = [this.config.width, this.config.height];\n bindGroupDirty = true;\n }\n\n // Capture for type narrowing\n const inputTexture = this.inputTexture;\n const outputTexture = this.outputTexture;\n\n // Write VideoFrame to input texture\n inputTexture.write(frame);\n\n if (bindGroupDirty || !this.bindGroup) {\n this.bindGroup = root.createBindGroup(fisheyeLayout, {\n inputTexture: this.inputView ?? Fisheye.createInputView(inputTexture),\n outputTexture: this.outputView ?? Fisheye.createOutputView(outputTexture),\n uniforms: this.uniformBuffer,\n });\n }\n\n const bindGroup = this.bindGroup;\n const dewarpPipeline = this.dewarpPipeline;\n\n if (!dewarpPipeline) {\n throw new Error(\"Compute pipeline not initialized\");\n }\n\n // Execute the compute shader\n dewarpPipeline.with(bindGroup).dispatchThreads(this.config.width, this.config.height);\n\n return this.readbackToVideoFrame(device, root, outputTexture, frame.timestamp);\n }\n\n /**\n * Update configuration\n */\n updateConfig(options: Partial<FisheyeOptions>): void {\n this.config = this.applyDefaults({ ...this.config, ...options });\n this.updateUniforms();\n\n // Recreate output texture and readback buffer if size changed\n if (options.width || options.height) {\n this.outputTexture?.destroy();\n this.readbackBuffers?.[0]?.destroy();\n this.readbackBuffers?.[1]?.destroy();\n this.outputTexture = null;\n this.readbackBuffers = null;\n this.readbackIndex = 0;\n this.readbackHasData = [false, false];\n this.outputTextureSize = [0, 0];\n this.outputView = null;\n this.bindGroup = null;\n this.readbackBytesPerRow = 0;\n this.readbackActualBytesPerRow = 0;\n this.pixelBuffer = null;\n }\n }\n\n /**\n * Clean up GPU resources\n */\n destroy(): void {\n this.inputTexture?.destroy();\n this.outputTexture?.destroy();\n this.readbackBuffers?.[0]?.destroy();\n this.readbackBuffers?.[1]?.destroy();\n this.root?.destroy();\n\n this.inputTexture = null;\n this.outputTexture = null;\n this.readbackBuffers = null;\n this.readbackIndex = 0;\n this.readbackHasData = [false, false];\n this.uniformBuffer = null;\n this.root = null;\n this.inputView = null;\n this.outputView = null;\n this.bindGroup = null;\n this.dewarpPipeline = null;\n this.readbackBytesPerRow = 0;\n this.readbackActualBytesPerRow = 0;\n this.pixelBuffer = null;\n this.inputTextureSize = [0, 0];\n this.outputTextureSize = [0, 0];\n }\n}\n","/**\n * Supported YUV pixel formats for VideoFrame creation\n */\nexport type YUVFormat = \"I420\" | \"I420A\" | \"I422\" | \"I444\" | \"NV12\";\n\n/**\n * Extended VideoFrameBufferInit with transfer support\n * (transfer is part of the spec but may not be in all TypeScript definitions)\n */\ninterface VideoFrameBufferInitExtended extends VideoFrameBufferInit {\n transfer?: ArrayBuffer[];\n}\n\n/**\n * Options for creating a VideoFrame from YUV data\n */\nexport interface CreateVideoFrameOptions {\n /**\n * YUV pixel format\n * - I420: YUV 4:2:0 planar (Y plane, U plane, V plane)\n * - I420A: YUV 4:2:0 planar with alpha\n * - I422: YUV 4:2:2 planar\n * - I444: YUV 4:4:4 planar\n * - NV12: YUV 4:2:0 semi-planar (Y plane, interleaved UV plane)\n */\n format: YUVFormat;\n\n /**\n * Width of the video frame in pixels\n */\n width: number;\n\n /**\n * Height of the video frame in pixels\n */\n height: number;\n\n /**\n * Timestamp in microseconds\n */\n timestamp: number;\n\n /**\n * Duration in microseconds (optional)\n */\n duration?: number;\n\n /**\n * Display width (optional, defaults to width)\n */\n displayWidth?: number;\n\n /**\n * Display height (optional, defaults to height)\n */\n displayHeight?: number;\n\n /**\n * Color space configuration (optional)\n */\n colorSpace?: VideoColorSpaceInit;\n\n /**\n * Transfer ownership of the buffer for zero-copy (optional)\n * If true, the input buffer will be detached after VideoFrame creation\n */\n transfer?: boolean;\n}\n\n/**\n * Create a VideoFrame from YUV binary data\n *\n * @param data - YUV binary data (ArrayBuffer, TypedArray, or DataView)\n * @param options - Configuration options including format, dimensions, and timestamp\n * @returns A new VideoFrame object\n *\n * @example\n * ```ts\n * // Create VideoFrame from I420 (YUV 4:2:0) data\n * const yuvData = new Uint8Array(width * height * 1.5); // I420 size\n * const frame = createVideoFrameFromYUV(yuvData, {\n * format: \"I420\",\n * width: 1920,\n * height: 1080,\n * timestamp: 0,\n * });\n * ```\n *\n * @example\n * ```ts\n * // Create VideoFrame from NV12 data with zero-copy transfer\n * const nv12Data = new Uint8Array(width * height * 1.5);\n * const frame = createVideoFrameFromYUV(nv12Data, {\n * format: \"NV12\",\n * width: 1920,\n * height: 1080,\n * timestamp: 0,\n * transfer: true, // Transfer buffer ownership for better performance\n * });\n * ```\n */\nexport function createVideoFrameFromYUV(\n data: BufferSource,\n options: CreateVideoFrameOptions,\n): VideoFrame {\n const {\n format,\n width,\n height,\n timestamp,\n duration,\n displayWidth,\n displayHeight,\n colorSpace,\n transfer,\n } = options;\n\n // Validate dimensions\n if (width <= 0 || height <= 0) {\n throw new Error(\"Width and height must be positive integers\");\n }\n\n // Calculate expected data size based on format\n const expectedSize = calculateYUVDataSize(format, width, height);\n const actualSize = data instanceof ArrayBuffer ? data.byteLength : data.byteLength;\n\n if (actualSize < expectedSize) {\n throw new Error(\n `Buffer too small for ${format} format. Expected at least ${expectedSize} bytes, got ${actualSize} bytes`,\n );\n }\n\n // Build VideoFrame init options\n const init: VideoFrameBufferInitExtended = {\n format,\n codedWidth: width,\n codedHeight: height,\n timestamp,\n };\n\n if (duration !== undefined) {\n init.duration = duration;\n }\n\n if (displayWidth !== undefined) {\n init.displayWidth = displayWidth;\n }\n\n if (displayHeight !== undefined) {\n init.displayHeight = displayHeight;\n }\n\n if (colorSpace !== undefined) {\n init.colorSpace = colorSpace;\n }\n\n // Handle buffer transfer for zero-copy\n if (transfer) {\n const buffer = data instanceof ArrayBuffer ? data : data.buffer;\n init.transfer = [buffer];\n }\n\n return new VideoFrame(data, init);\n}\n\n/**\n * Convert RGBA image data to YUV format (I420 by default)\n *\n * Uses ITU-R BT.601 color space conversion:\n * - Y = 0.299*R + 0.587*G + 0.114*B\n * - U = -0.169*R - 0.331*G + 0.5*B + 128\n * - V = 0.5*R - 0.419*G - 0.081*B + 128\n *\n * For I420 format:\n * - Y plane: full resolution (width * height)\n * - U plane: quarter resolution ((width/2) * (height/2))\n * - V plane: quarter resolution ((width/2) * (height/2))\n *\n * @param rgbaData - RGBA pixel data (Uint8ClampedArray from ImageData)\n * @param width - Image width in pixels\n * @param height - Image height in pixels\n * @param format - YUV format to convert to (default: \"I420\")\n * @returns YUV data as Uint8Array\n *\n * @example\n * ```ts\n * const canvas = document.createElement('canvas');\n * const ctx = canvas.getContext('2d');\n * ctx.drawImage(image, 0, 0);\n * const imageData = ctx.getImageData(0, 0, width, height);\n * const yuvData = convertRGBAtoYUV(imageData.data, width, height);\n * ```\n */\nexport function convertRGBAtoYUV(\n rgbaData: Uint8ClampedArray,\n width: number,\n height: number,\n format: YUVFormat = \"I420\",\n): Uint8Array {\n if (format !== \"I420\") {\n throw new Error(`Unsupported format: ${format}. Only I420 is currently supported.`);\n }\n\n const lumaSize = width * height;\n const chromaSize = (width / 2) * (height / 2);\n const yuvSize = lumaSize + chromaSize * 2; // Y + U + V\n const yuvData = new Uint8Array(yuvSize);\n\n // BT.601 coefficients\n const Y_R = 0.299;\n const Y_G = 0.587;\n const Y_B = 0.114;\n const U_R = -0.169;\n const U_G = -0.331;\n const U_B = 0.5;\n const V_R = 0.5;\n const V_G = -0.419;\n const V_B = -0.081;\n\n // Convert RGB to YUV and downsample chroma for I420\n const yPlane = yuvData.subarray(0, lumaSize);\n const uPlane = yuvData.subarray(lumaSize, lumaSize + chromaSize);\n const vPlane = yuvData.subarray(lumaSize + chromaSize, yuvSize);\n\n // First pass: convert to YUV and store Y plane\n for (let y = 0; y < height; y++) {\n for (let x = 0; x < width; x++) {\n const rgbaIdx = (y * width + x) * 4;\n const r = rgbaData[rgbaIdx];\n const g = rgbaData[rgbaIdx + 1];\n const b = rgbaData[rgbaIdx + 2];\n\n // Calculate Y (luma)\n const yVal = Y_R * r + Y_G * g + Y_B * b;\n yPlane[y * width + x] = Math.round(Math.max(0, Math.min(255, yVal)));\n\n // Calculate U and V for chroma downsampling\n // We'll accumulate these in the second pass\n }\n }\n\n // Second pass: downsample U and V planes (average 2x2 blocks)\n for (let y = 0; y < height / 2; y++) {\n for (let x = 0; x < width / 2; x++) {\n // Sample 2x2 block from original image\n let uSum = 0;\n let vSum = 0;\n\n for (let dy = 0; dy < 2; dy++) {\n for (let dx = 0; dx < 2; dx++) {\n const srcX = x * 2 + dx;\n const srcY = y * 2 + dy;\n const rgbaIdx = (srcY * width + srcX) * 4;\n const r = rgbaData[rgbaIdx];\n const g = rgbaData[rgbaIdx + 1];\n const b = rgbaData[rgbaIdx + 2];\n\n // Calculate U and V\n const uVal = U_R * r + U_G * g + U_B * b + 128;\n const vVal = V_R * r + V_G * g + V_B * b + 128;\n\n uSum += uVal;\n vSum += vVal;\n }\n }\n\n // Average the 2x2 block\n const uAvg = uSum / 4;\n const vAvg = vSum / 4;\n\n const chromaIdx = y * (width / 2) + x;\n uPlane[chromaIdx] = Math.round(Math.max(0, Math.min(255, uAvg)));\n vPlane[chromaIdx] = Math.round(Math.max(0, Math.min(255, vAvg)));\n }\n }\n\n return yuvData;\n}\n\n/**\n * Calculate the expected byte size for YUV data based on format and dimensions\n *\n * @param format - YUV pixel format\n * @param width - Frame width in pixels\n * @param height - Frame height in pixels\n * @returns Expected byte size\n */\nexport function calculateYUVDataSize(format: YUVFormat, width: number, height: number): number {\n const lumaSize = width * height;\n\n switch (format) {\n case \"I420\":\n case \"NV12\":\n // 4:2:0 - chroma is half resolution in both dimensions\n // Y: width * height, U: (width/2) * (height/2), V: (width/2) * (height/2)\n return lumaSize + lumaSize / 2;\n\n case \"I420A\":\n // 4:2:0 with alpha\n // Y: width * height, U: (width/2) * (height/2), V: (width/2) * (height/2), A: width * height\n return lumaSize * 2 + lumaSize / 2;\n\n case \"I422\":\n // 4:2:2 - chroma is half resolution horizontally only\n // Y: width * height, U: (width/2) * height, V: (width/2) * height\n return lumaSize * 2;\n\n case \"I444\":\n // 4:4:4 - full resolution for all planes\n // Y: width * height, U: width * height, V: width * height\n return lumaSize * 3;\n\n default:\n throw new Error(`Unsupported YUV format: ${format}`);\n }\n}\n"],"mappings":";AAAA,OAAO,UAAiD;AACxD,YAAY,OAAO;AACnB,YAAY,SAAS;AAMrB,IAAM,kBAAoB,SAAO;AAAA,EAC/B,IAAM;AAAA,EACN,IAAM;AAAA,EACN,IAAM;AAAA,EACN,IAAM;AAAA,EACN,KAAO;AAAA,EACP,SAAW;AAAA,EACX,SAAW;AAAA,EACX,MAAQ;AAAA,EACR,OAAS;AAAA,EACT,QAAU;AAAA,EACV,SAAW;AACb,CAAC;AAOD,IAAM,gBAAgB,KAAK,gBAAgB;AAAA,EACzC,cAAc,EAAE,SAAW,YAAU,EAAE;AAAA,EACvC,eAAe,EAAE,gBAAkB,mBAAiB,YAAY,EAAE;AAAA,EAClE,UAAU,EAAE,SAAS,gBAAgB;AACvC,CAAC;AAuCM,IAAM,UAAN,MAAM,SAAQ;AAAA,EACX;AAAA,EACA,OAA4B;AAAA,EAC5B,gBAA0C;AAAA,EAC1C,eAAwC;AAAA,EACxC,gBAA0C;AAAA,EAC1C,YAA+D;AAAA,EAC/D,aAAiE;AAAA,EACjE,YAAgE;AAAA,EAChE,iBAEG;AAAA,EACH,kBAA+D;AAAA,EAC/D,gBAAgB;AAAA,EAChB,kBAAsC,CAAC,OAAO,KAAK;AAAA,EACnD,sBAAsB;AAAA,EACtB,4BAA4B;AAAA,EAC5B,cAAiC;AAAA,EACjC,mBAAqC,CAAC,GAAG,CAAC;AAAA,EAC1C,oBAAsC,CAAC,GAAG,CAAC;AAAA,EAEnD,OAAe,gBAAgB,SAA2B;AACxD,WAAO,QAAQ,WAAa,YAAU,CAAC;AAAA,EACzC;AAAA,EAEA,OAAe,iBAAiB,SAA4B;AAC1D,WAAO,QAAQ,WAAa,mBAAiB,YAAY,CAAC;AAAA,EAC5D;AAAA,EAEA,YAAY,UAA0B,CAAC,GAAG;AACxC,SAAK,SAAS,KAAK,cAAc,OAAO;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,SAAwC;AAC5D,UAAM,KAAK,QAAQ,MAAM;AACzB,WAAO;AAAA,MACL;AAAA,MACA,IAAI,QAAQ,MAAM;AAAA,MAClB,IAAI,QAAQ,MAAM;AAAA,MAClB,IAAI,QAAQ,MAAM;AAAA,MAClB,OAAO,QAAQ,SAAS;AAAA,MACxB,QAAQ,QAAQ,UAAU;AAAA,MAC1B,KAAK,QAAQ,OAAO;AAAA,MACpB,SAAS,QAAQ,WAAW;AAAA,MAC5B,SAAS,QAAQ,WAAW;AAAA,MAC5B,MAAM,QAAQ,QAAQ;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAA4B;AACxC,QAAI,KAAK,MAAM;AACb;AAAA,IACF;AAEA,SAAK,OAAO,MAAM,KAAK,KAAK;AAG5B,SAAK,gBAAgB,KAAK,KACvB,aAAa,iBAAiB,KAAK,eAAe,CAAC,EACnD,OAAO,SAAS;AAEnB,SAAK,iBAAiB,KAAK,KAAK,WAAW,EAAE;AAAA,MAC3C,CAAC,GAAW,MAAc;AACxB;AAEA,cAAM,WAAW,cAAc,EAAE;AACjC,cAAM,YAAY,cAAc,EAAE;AAClC,cAAM,SAAS,cAAc,EAAE;AAE/B,cAAM,YAAgB,sBAAkB,QAAQ;AAChD,cAAM,aAAiB,sBAAkB,SAAS;AAClD,cAAM,QAAU,QAAM,GAAG,CAAC;AAG1B,YAAI,KAAK,WAAW,KAAK,KAAK,WAAW,GAAG;AAC1C;AAAA,QACF;AAGA,cAAM,KAAO;AAAA,WACR,MAAI,MAAM,CAAC,IAAM,MAAI,WAAW,CAAC,IAAI,OAAO;AAAA,WAC5C,MAAI,MAAM,CAAC,IAAM,MAAI,WAAW,CAAC,IAAI,OAAO;AAAA,QACjD;AAGA,cAAM,WAAW,GAAG,IAAM,QAAM,OAAO,SAAS,OAAO,OAAO,CAAC;AAG/D,cAAM,IAAQ,WAAO,QAAQ;AAG7B,cAAM,QAAY,SAAK,CAAC;AACxB,cAAM,SAAS,QAAQ;AACvB,cAAM,SAAS,SAAS;AACxB,cAAM,SAAS,SAAS;AACxB,cAAM,SAAS,SAAS;AACxB,cAAM,iBACJ,SACC,IAAM,OAAO,KAAK,SAAS,OAAO,KAAK,SAAS,OAAO,KAAK,SAAS,OAAO,KAAK;AACpF,cAAM,aAAiB,QAAI,cAAc;AAGzC,cAAM,UAAU,aAAa,OAAO;AAGpC,YAAI,cAAc;AAClB,YAAI,IAAI,MAAQ;AACd,wBAAc,SAAS,IAAI,UAAU,CAAC;AAAA,QACxC;AAGA,cAAM,UAAU,YAAY,IAAM,QAAM,OAAO,SAAS,OAAO,OAAO,CAAC,EAAE,IAAI,GAAG,EAAE,IAAI,GAAG;AAGzF,YAAI,QAAQ,KAAK,KAAO,QAAQ,KAAK,KAAO,QAAQ,KAAK,KAAO,QAAQ,KAAK,GAAK;AAChF,gBAAM,cAAgB;AAAA,YAClB,MAAI,QAAQ,IAAM,MAAI,UAAU,CAAC,CAAC;AAAA,YAClC,MAAI,QAAQ,IAAM,MAAI,UAAU,CAAC,CAAC;AAAA,UACtC;AACA,gBAAM,QAAY,gBAAY,UAAU,aAAa,CAAC;AACtD,UAAI,iBAAa,WAAW,OAAO,KAAK;AAAA,QAC1C,OAAO;AAEL,UAAI,iBAAa,WAAW,OAAS,QAAM,GAAK,GAAK,GAAK,CAAG,CAAC;AAAA,QAChE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAkD;AACxD,WAAO;AAAA,MACL,IAAI,KAAK,OAAO;AAAA,MAChB,IAAI,KAAK,OAAO;AAAA,MAChB,IAAI,KAAK,OAAO;AAAA,MAChB,IAAI,KAAK,OAAO;AAAA,MAChB,KAAK,KAAK,OAAO;AAAA,MACjB,SAAS,KAAK,OAAO;AAAA,MACrB,SAAS,KAAK,OAAO;AAAA,MACrB,MAAM,KAAK,OAAO;AAAA,MAClB,OAAO,KAAK,OAAO;AAAA,MACnB,QAAQ,KAAK,OAAO;AAAA,MACpB,SAAS;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAuB;AAC7B,QAAI,CAAC,KAAK,eAAe;AACvB;AAAA,IACF;AACA,SAAK,cAAc,MAAM,KAAK,eAAe,CAAC;AAAA,EAChD;AAAA,EAEA,MAAc,qBACZ,QACA,MACA,eACA,WACqB;AACrB,UAAM,kBAAkB,KAAK;AAE7B,QAAI,CAAC,iBAAiB;AACpB,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAEA,UAAM,mBAAmB,KAAK,OAAO,aAAa;AAClD,UAAM,iBAAiB,OAAO,qBAAqB;AACnD,UAAM,aAAa,KAAK;AACxB,UAAM,YAAY,IAAI;AACtB,UAAM,cAAc,gBAAgB,UAAU;AAC9C,UAAM,aAAa,gBAAgB,SAAS;AAE5C,QAAI,CAAC,eAAe,CAAC,YAAY;AAC/B,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAEA,mBAAe;AAAA,MACb,EAAE,SAAS,iBAAiB;AAAA,MAC5B,EAAE,QAAQ,aAAa,aAAa,KAAK,oBAAoB;AAAA,MAC7D,CAAC,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM;AAAA,IACxC;AACA,WAAO,MAAM,OAAO,CAAC,eAAe,OAAO,CAAC,CAAC;AAE7C,SAAK,gBAAgB,UAAU,IAAI;AACnC,SAAK,gBAAgB;AAErB,UAAM,eAAe,KAAK,gBAAgB,SAAS,IAAI,aAAa;AAEpE,UAAM,aAAa,SAAS,WAAW,IAAI;AAC3C,UAAM,aAAa,aAAa,eAAe;AAE/C,UAAM,YACJ,KAAK,eAAe,IAAI,WAAW,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,CAAC;AAC/E,UAAM,UAAU,IAAI,WAAW,UAAU;AAEzC,aAAS,MAAM,GAAG,MAAM,KAAK,OAAO,QAAQ,OAAO;AACjD,YAAM,YAAY,MAAM,KAAK;AAC7B,YAAM,YAAY,MAAM,KAAK;AAC7B,gBAAU;AAAA,QACR,QAAQ,SAAS,WAAW,YAAY,KAAK,yBAAyB;AAAA,QACtE;AAAA,MACF;AAAA,IACF;AAEA,iBAAa,MAAM;AAEnB,WAAO,IAAI,WAAW,WAAW;AAAA,MAC/B,QAAQ;AAAA,MACR,YAAY,KAAK,OAAO;AAAA,MACxB,aAAa,KAAK,OAAO;AAAA,MACzB;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,MAAoB,OAAe,QAAkC;AAC9F,UAAM,OAAkC,CAAC,OAAO,MAAM;AACtD,UAAM,SAAuB;AAC7B,WAAO,KAAK,WAAW,EAAE,cAAc,EAAE,MAAM,OAAO,CAAC,EAAE,OAAO,SAAS;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA,EAKQ,oBACN,MACA,OACA,QACmB;AACnB,UAAM,OAAkC,CAAC,OAAO,MAAM;AACtD,UAAM,SAAuB;AAC7B,WAAO,KAAK,WAAW,EAAE,cAAc,EAAE,MAAM,OAAO,CAAC,EAAE,OAAO,SAAS;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,OAAuB;AAClD,UAAM,gBAAgB;AACtB,UAAM,uBAAuB,QAAQ;AAErC,WAAO,KAAK,KAAK,uBAAuB,GAAG,IAAI;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,QAAmB,OAAe,QAA2B;AACxF,UAAM,cAAc,KAAK,qBAAqB,KAAK;AACnD,UAAM,aAAa,cAAc;AAEjC,WAAO,OAAO,aAAa;AAAA,MACzB,MAAM;AAAA,MACN,OAAO,eAAe,WAAW,eAAe;AAAA,IAClD,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,OAAO,OAAwC;AACnD,UAAM,KAAK,WAAW;AAEtB,QAAI,CAAC,KAAK,QAAQ,CAAC,KAAK,eAAe;AACrC,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACjD;AAGA,UAAM,OAAO,KAAK;AAClB,UAAM,SAAS,KAAK;AAEpB,QAAI,iBAAiB;AAGrB,QACE,CAAC,KAAK,gBACN,KAAK,iBAAiB,CAAC,MAAM,MAAM,gBACnC,KAAK,iBAAiB,CAAC,MAAM,MAAM,eACnC;AACA,WAAK,cAAc,QAAQ;AAC3B,WAAK,eAAe,KAAK,mBAAmB,MAAM,MAAM,cAAc,MAAM,aAAa;AACzF,WAAK,mBAAmB,CAAC,MAAM,cAAc,MAAM,aAAa;AAChE,WAAK,YAAY,SAAQ,gBAAgB,KAAK,YAAY;AAC1D,uBAAiB;AAAA,IACnB;AAGA,QACE,CAAC,KAAK,iBACN,KAAK,kBAAkB,CAAC,MAAM,KAAK,OAAO,SAC1C,KAAK,kBAAkB,CAAC,MAAM,KAAK,OAAO,QAC1C;AACA,WAAK,eAAe,QAAQ;AAC5B,WAAK,kBAAkB,CAAC,GAAG,QAAQ;AACnC,WAAK,kBAAkB,CAAC,GAAG,QAAQ;AAEnC,WAAK,gBAAgB,KAAK,oBAAoB,MAAM,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM;AACzF,WAAK,aAAa,SAAQ,iBAAiB,KAAK,aAAa;AAC7D,WAAK,sBAAsB,KAAK,qBAAqB,KAAK,OAAO,KAAK;AACtE,WAAK,4BAA4B,KAAK,OAAO,QAAQ;AACrD,WAAK,cAAc,IAAI,WAAW,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,CAAC;AAC5E,WAAK,kBAAkB;AAAA,QACrB,KAAK,qBAAqB,QAAQ,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM;AAAA,QACvE,KAAK,qBAAqB,QAAQ,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM;AAAA,MACzE;AACA,WAAK,gBAAgB;AACrB,WAAK,kBAAkB,CAAC,OAAO,KAAK;AACpC,WAAK,oBAAoB,CAAC,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM;AAC/D,uBAAiB;AAAA,IACnB;AAGA,UAAM,eAAe,KAAK;AAC1B,UAAM,gBAAgB,KAAK;AAG3B,iBAAa,MAAM,KAAK;AAExB,QAAI,kBAAkB,CAAC,KAAK,WAAW;AACrC,WAAK,YAAY,KAAK,gBAAgB,eAAe;AAAA,QACnD,cAAc,KAAK,aAAa,SAAQ,gBAAgB,YAAY;AAAA,QACpE,eAAe,KAAK,cAAc,SAAQ,iBAAiB,aAAa;AAAA,QACxE,UAAU,KAAK;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,UAAM,YAAY,KAAK;AACvB,UAAM,iBAAiB,KAAK;AAE5B,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAGA,mBAAe,KAAK,SAAS,EAAE,gBAAgB,KAAK,OAAO,OAAO,KAAK,OAAO,MAAM;AAEpF,WAAO,KAAK,qBAAqB,QAAQ,MAAM,eAAe,MAAM,SAAS;AAAA,EAC/E;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,SAAwC;AACnD,SAAK,SAAS,KAAK,cAAc,EAAE,GAAG,KAAK,QAAQ,GAAG,QAAQ,CAAC;AAC/D,SAAK,eAAe;AAGpB,QAAI,QAAQ,SAAS,QAAQ,QAAQ;AACnC,WAAK,eAAe,QAAQ;AAC5B,WAAK,kBAAkB,CAAC,GAAG,QAAQ;AACnC,WAAK,kBAAkB,CAAC,GAAG,QAAQ;AACnC,WAAK,gBAAgB;AACrB,WAAK,kBAAkB;AACvB,WAAK,gBAAgB;AACrB,WAAK,kBAAkB,CAAC,OAAO,KAAK;AACpC,WAAK,oBAAoB,CAAC,GAAG,CAAC;AAC9B,WAAK,aAAa;AAClB,WAAK,YAAY;AACjB,WAAK,sBAAsB;AAC3B,WAAK,4BAA4B;AACjC,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,SAAK,cAAc,QAAQ;AAC3B,SAAK,eAAe,QAAQ;AAC5B,SAAK,kBAAkB,CAAC,GAAG,QAAQ;AACnC,SAAK,kBAAkB,CAAC,GAAG,QAAQ;AACnC,SAAK,MAAM,QAAQ;AAEnB,SAAK,eAAe;AACpB,SAAK,gBAAgB;AACrB,SAAK,kBAAkB;AACvB,SAAK,gBAAgB;AACrB,SAAK,kBAAkB,CAAC,OAAO,KAAK;AACpC,SAAK,gBAAgB;AACrB,SAAK,OAAO;AACZ,SAAK,YAAY;AACjB,SAAK,aAAa;AAClB,SAAK,YAAY;AACjB,SAAK,iBAAiB;AACtB,SAAK,sBAAsB;AAC3B,SAAK,4BAA4B;AACjC,SAAK,cAAc;AACnB,SAAK,mBAAmB,CAAC,GAAG,CAAC;AAC7B,SAAK,oBAAoB,CAAC,GAAG,CAAC;AAAA,EAChC;AACF;;;ACxXO,SAAS,wBACd,MACA,SACY;AACZ,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,MAAI,SAAS,KAAK,UAAU,GAAG;AAC7B,UAAM,IAAI,MAAM,4CAA4C;AAAA,EAC9D;AAGA,QAAM,eAAe,qBAAqB,QAAQ,OAAO,MAAM;AAC/D,QAAM,aAAa,gBAAgB,cAAc,KAAK,aAAa,KAAK;AAExE,MAAI,aAAa,cAAc;AAC7B,UAAM,IAAI;AAAA,MACR,wBAAwB,MAAM,8BAA8B,YAAY,eAAe,UAAU;AAAA,IACnG;AAAA,EACF;AAGA,QAAM,OAAqC;AAAA,IACzC;AAAA,IACA,YAAY;AAAA,IACZ,aAAa;AAAA,IACb;AAAA,EACF;AAEA,MAAI,aAAa,QAAW;AAC1B,SAAK,WAAW;AAAA,EAClB;AAEA,MAAI,iBAAiB,QAAW;AAC9B,SAAK,eAAe;AAAA,EACtB;AAEA,MAAI,kBAAkB,QAAW;AAC/B,SAAK,gBAAgB;AAAA,EACvB;AAEA,MAAI,eAAe,QAAW;AAC5B,SAAK,aAAa;AAAA,EACpB;AAGA,MAAI,UAAU;AACZ,UAAM,SAAS,gBAAgB,cAAc,OAAO,KAAK;AACzD,SAAK,WAAW,CAAC,MAAM;AAAA,EACzB;AAEA,SAAO,IAAI,WAAW,MAAM,IAAI;AAClC;AA8BO,SAAS,iBACd,UACA,OACA,QACA,SAAoB,QACR;AACZ,MAAI,WAAW,QAAQ;AACrB,UAAM,IAAI,MAAM,uBAAuB,MAAM,qCAAqC;AAAA,EACpF;AAEA,QAAM,WAAW,QAAQ;AACzB,QAAM,aAAc,QAAQ,KAAM,SAAS;AAC3C,QAAM,UAAU,WAAW,aAAa;AACxC,QAAM,UAAU,IAAI,WAAW,OAAO;AAGtC,QAAM,MAAM;AACZ,QAAM,MAAM;AACZ,QAAM,MAAM;AACZ,QAAM,MAAM;AACZ,QAAM,MAAM;AACZ,QAAM,MAAM;AACZ,QAAM,MAAM;AACZ,QAAM,MAAM;AACZ,QAAM,MAAM;AAGZ,QAAM,SAAS,QAAQ,SAAS,GAAG,QAAQ;AAC3C,QAAM,SAAS,QAAQ,SAAS,UAAU,WAAW,UAAU;AAC/D,QAAM,SAAS,QAAQ,SAAS,WAAW,YAAY,OAAO;AAG9D,WAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,aAAS,IAAI,GAAG,IAAI,OAAO,KAAK;AAC9B,YAAM,WAAW,IAAI,QAAQ,KAAK;AAClC,YAAM,IAAI,SAAS,OAAO;AAC1B,YAAM,IAAI,SAAS,UAAU,CAAC;AAC9B,YAAM,IAAI,SAAS,UAAU,CAAC;AAG9B,YAAM,OAAO,MAAM,IAAI,MAAM,IAAI,MAAM;AACvC,aAAO,IAAI,QAAQ,CAAC,IAAI,KAAK,MAAM,KAAK,IAAI,GAAG,KAAK,IAAI,KAAK,IAAI,CAAC,CAAC;AAAA,IAIrE;AAAA,EACF;AAGA,WAAS,IAAI,GAAG,IAAI,SAAS,GAAG,KAAK;AACnC,aAAS,IAAI,GAAG,IAAI,QAAQ,GAAG,KAAK;AAElC,UAAI,OAAO;AACX,UAAI,OAAO;AAEX,eAAS,KAAK,GAAG,KAAK,GAAG,MAAM;AAC7B,iBAAS,KAAK,GAAG,KAAK,GAAG,MAAM;AAC7B,gBAAM,OAAO,IAAI,IAAI;AACrB,gBAAM,OAAO,IAAI,IAAI;AACrB,gBAAM,WAAW,OAAO,QAAQ,QAAQ;AACxC,gBAAM,IAAI,SAAS,OAAO;AAC1B,gBAAM,IAAI,SAAS,UAAU,CAAC;AAC9B,gBAAM,IAAI,SAAS,UAAU,CAAC;AAG9B,gBAAM,OAAO,MAAM,IAAI,MAAM,IAAI,MAAM,IAAI;AAC3C,gBAAM,OAAO,MAAM,IAAI,MAAM,IAAI,MAAM,IAAI;AAE3C,kBAAQ;AACR,kBAAQ;AAAA,QACV;AAAA,MACF;AAGA,YAAM,OAAO,OAAO;AACpB,YAAM,OAAO,OAAO;AAEpB,YAAM,YAAY,KAAK,QAAQ,KAAK;AACpC,aAAO,SAAS,IAAI,KAAK,MAAM,KAAK,IAAI,GAAG,KAAK,IAAI,KAAK,IAAI,CAAC,CAAC;AAC/D,aAAO,SAAS,IAAI,KAAK,MAAM,KAAK,IAAI,GAAG,KAAK,IAAI,KAAK,IAAI,CAAC,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,qBAAqB,QAAmB,OAAe,QAAwB;AAC7F,QAAM,WAAW,QAAQ;AAEzB,UAAQ,QAAQ;AAAA,IACd,KAAK;AAAA,IACL,KAAK;AAGH,aAAO,WAAW,WAAW;AAAA,IAE/B,KAAK;AAGH,aAAO,WAAW,IAAI,WAAW;AAAA,IAEnC,KAAK;AAGH,aAAO,WAAW;AAAA,IAEpB,KAAK;AAGH,aAAO,WAAW;AAAA,IAEpB;AACE,YAAM,IAAI,MAAM,2BAA2B,MAAM,EAAE;AAAA,EACvD;AACF;","names":[]}
package/package.json ADDED
@@ -0,0 +1,83 @@
1
+ {
2
+ "name": "@gyeonghokim/fisheye.js",
3
+ "private": false,
4
+ "version": "0.0.0",
5
+ "description": "Modern fisheye dewarping library for the web using WebGPU",
6
+ "type": "module",
7
+ "main": "./dist/index.js",
8
+ "module": "./dist/index.js",
9
+ "types": "./dist/index.d.ts",
10
+ "exports": {
11
+ ".": {
12
+ "types": "./dist/index.d.ts",
13
+ "import": "./dist/index.js"
14
+ }
15
+ },
16
+ "files": [
17
+ "dist"
18
+ ],
19
+ "scripts": {
20
+ "build": "tsup",
21
+ "dev": "tsup --watch",
22
+ "lint": "biome check .",
23
+ "lint:fix": "biome check --write .",
24
+ "format": "biome format --write .",
25
+ "type-check": "tsc --noEmit",
26
+ "test": "npm run test:unit && npm run test:e2e",
27
+ "test:unit": "vitest run",
28
+ "test:unit:watch": "vitest",
29
+ "test:unit:ui": "vitest --ui",
30
+ "test:e2e": "playwright test",
31
+ "test:e2e:update": "playwright test --update-snapshots",
32
+ "test:e2e:ui": "playwright test --ui",
33
+ "prepare": "husky",
34
+ "release": "semantic-release"
35
+ },
36
+ "keywords": [
37
+ "fisheye",
38
+ "dewarping",
39
+ "webgpu",
40
+ "typegpu",
41
+ "video",
42
+ "videoframe",
43
+ "webcodecs"
44
+ ],
45
+ "author": "GyeongHo Kim",
46
+ "license": "MIT",
47
+ "repository": {
48
+ "type": "git",
49
+ "url": "https://github.com/GyeongHoKim/fisheye.js.git"
50
+ },
51
+ "publishConfig": {
52
+ "registry": "https://registry.npmjs.org/",
53
+ "tag": "latest",
54
+ "access": "public"
55
+ },
56
+ "bugs": {
57
+ "url": "https://github.com/GyeongHoKim/fisheye.js/issues"
58
+ },
59
+ "homepage": "https://github.com/GyeongHoKim/fisheye.js#readme",
60
+ "dependencies": {
61
+ "typegpu": "^0.9.0"
62
+ },
63
+ "devDependencies": {
64
+ "@biomejs/biome": "^2.3.13",
65
+ "@commitlint/cli": "^20.3.1",
66
+ "@commitlint/config-conventional": "^20.3.1",
67
+ "@playwright/test": "^1.48.0",
68
+ "@semantic-release/changelog": "^6.0.3",
69
+ "@semantic-release/git": "^10.0.1",
70
+ "@types/node": "^25.0.10",
71
+ "@vitest/ui": "^4.0.18",
72
+ "@webgpu/types": "^0.1.69",
73
+ "happy-dom": "^20.4.0",
74
+ "husky": "^9.1.7",
75
+ "semantic-release": "^25.0.2",
76
+ "tsup": "^8.5.1",
77
+ "typescript": "^5.9.3",
78
+ "vitest": "^4.0.18"
79
+ },
80
+ "engines": {
81
+ "node": ">=18.18.0"
82
+ }
83
+ }