video2ascii 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,803 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __defProps = Object.defineProperties;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getOwnPropSymbols = Object.getOwnPropertySymbols;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __propIsEnum = Object.prototype.propertyIsEnumerable;
10
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
11
+ var __spreadValues = (a, b) => {
12
+ for (var prop in b || (b = {}))
13
+ if (__hasOwnProp.call(b, prop))
14
+ __defNormalProp(a, prop, b[prop]);
15
+ if (__getOwnPropSymbols)
16
+ for (var prop of __getOwnPropSymbols(b)) {
17
+ if (__propIsEnum.call(b, prop))
18
+ __defNormalProp(a, prop, b[prop]);
19
+ }
20
+ return a;
21
+ };
22
+ var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
23
+ var __export = (target, all) => {
24
+ for (var name in all)
25
+ __defProp(target, name, { get: all[name], enumerable: true });
26
+ };
27
+ var __copyProps = (to, from, except, desc) => {
28
+ if (from && typeof from === "object" || typeof from === "function") {
29
+ for (let key of __getOwnPropNames(from))
30
+ if (!__hasOwnProp.call(to, key) && key !== except)
31
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
32
+ }
33
+ return to;
34
+ };
35
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
36
+
37
+ // src/index.ts
38
+ var index_exports = {};
39
+ __export(index_exports, {
40
+ ASCII_CHARSETS: () => ASCII_CHARSETS,
41
+ VideoToAscii: () => VideoToAscii
42
+ });
43
+ module.exports = __toCommonJS(index_exports);
44
+
45
+ // src/hooks/useVideoToAscii.ts
46
+ var import_react = require("react");
47
+
48
+ // src/lib/ascii-charsets.ts
49
+ var ASCII_CHARSETS = {
50
+ /** Classic 10-character gradient - good balance of detail and performance */
51
+ standard: {
52
+ name: "Standard",
53
+ chars: " .:-=+*#%@"
54
+ },
55
+ /** Unicode block characters - chunky retro aesthetic */
56
+ blocks: {
57
+ name: "Blocks",
58
+ chars: " \u2591\u2592\u2593\u2588"
59
+ },
60
+ /** Minimal 5-character set - high contrast, fast rendering */
61
+ minimal: {
62
+ name: "Minimal",
63
+ chars: " .oO@"
64
+ },
65
+ /** Binary on/off - pure silhouette mode */
66
+ binary: {
67
+ name: "Binary",
68
+ chars: " \u2588"
69
+ },
70
+ /** 70-character gradient - maximum detail, best for high resolution */
71
+ detailed: {
72
+ name: "Detailed",
73
+ chars: " .'`^\",:;Il!i><~+_-?][}{1)(|/tfjrxnuvczXYUJCLQ0OZmwqpdbkhao*#MW&8%B@$"
74
+ },
75
+ /** Dot-based - pointillist aesthetic */
76
+ dots: {
77
+ name: "Dots",
78
+ chars: " \xB7\u2022\u25CF"
79
+ },
80
+ /** Directional arrows - experimental */
81
+ arrows: {
82
+ name: "Arrows",
83
+ chars: " \u2190\u2199\u2193\u2198\u2192\u2197\u2191\u2196"
84
+ },
85
+ /** Moon phases - decorative gradient */
86
+ emoji: {
87
+ name: "Emoji",
88
+ chars: " \u2591\u2592\u2593\u{1F311}\u{1F312}\u{1F313}\u{1F314}\u{1F315}"
89
+ }
90
+ };
91
+ var DEFAULT_CHARSET = "standard";
92
+ function getCharArray(charset) {
93
+ return [...ASCII_CHARSETS[charset].chars];
94
+ }
95
+
96
+ // src/lib/webgl/shaders/vertex.glsl
97
+ var vertex_default = "#version 300 es\n\n// Fullscreen quad - passes texture coords to fragment shader\n\nin vec2 a_position;\nin vec2 a_texCoord;\nout vec2 v_texCoord;\n\nvoid main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n}\n";
98
+
99
+ // src/lib/webgl/shaders/fragment.glsl
100
+ var fragment_default = "#version 300 es\nprecision highp float;\n\n// Textures\nuniform sampler2D u_video;\nuniform sampler2D u_asciiAtlas;\n\n// Dimensions\nuniform vec2 u_resolution;\nuniform vec2 u_charSize;\nuniform vec2 u_gridSize;\nuniform float u_numChars;\n\n// Rendering options\nuniform bool u_colored;\nuniform float u_blend;\nuniform float u_highlight;\n\n// Audio\nuniform float u_audioLevel;\nuniform float u_audioReactivity;\nuniform float u_audioSensitivity;\n\n// Mouse\nuniform vec2 u_mouse;\nuniform float u_mouseRadius;\nuniform vec2 u_trail[24];\nuniform int u_trailLength;\n\n// Ripple\nuniform vec4 u_ripples[8];\nuniform float u_time;\nuniform float u_rippleEnabled;\nuniform float u_rippleSpeed;\n\nin vec2 v_texCoord;\nout vec4 fragColor;\n\nvoid main() {\n // Figure out which ASCII cell this pixel is in\n vec2 cellCoord = floor(v_texCoord * u_gridSize);\n vec2 thisCell = cellCoord;\n \n // Sample video at cell center (mipmaps handle averaging)\n vec2 cellCenter = (cellCoord + 0.5) / u_gridSize;\n vec4 videoColor = texture(u_video, cellCenter);\n \n // Perceived brightness using human eye sensitivity weights\n float baseBrightness = dot(videoColor.rgb, vec3(0.299, 0.587, 0.114));\n \n // Audio reactivity - louder = brighter, silence = darker\n float minBrightness = mix(0.3, 0.0, u_audioSensitivity);\n float maxBrightness = mix(1.0, 5.0, u_audioSensitivity);\n float audioMultiplier = mix(minBrightness, maxBrightness, u_audioLevel);\n float audioModulated = baseBrightness * audioMultiplier;\n float brightness = mix(baseBrightness, audioModulated, u_audioReactivity);\n \n // Cursor glow - blocky circle effect\n float cursorGlow = 0.0;\n float cursorRadius = 5.0;\n \n vec2 mouseCell = floor(u_mouse * u_gridSize);\n float cellDist = length(thisCell - mouseCell);\n if (cellDist <= cursorRadius && u_mouse.x >= 0.0) {\n cursorGlow += 1.0 - cellDist / cursorRadius;\n }\n \n // Trail effect\n for (int i = 0; i < 12; i++) {\n if (i >= u_trailLength) break;\n vec2 trailPos = u_trail[i];\n if (trailPos.x < 0.0) continue;\n \n vec2 trailCell = floor(trailPos * u_gridSize);\n float trailDist = length(thisCell - trailCell);\n float trailRadius = cursorRadius * 0.8;\n \n if (trailDist <= trailRadius) {\n float fade = 1.0 - float(i) / float(u_trailLength);\n cursorGlow += (1.0 - trailDist / trailRadius) * 0.5 * fade;\n }\n }\n cursorGlow = min(cursorGlow, 1.0);\n \n // Ripple effect - expanding rings on click\n float rippleGlow = 0.0;\n if (u_rippleEnabled > 0.5) {\n for (int i = 0; i < 8; i++) {\n vec4 ripple = u_ripples[i];\n if (ripple.w < 0.5) continue;\n \n float age = u_time - ripple.z;\n if (age < 0.0) continue;\n \n vec2 rippleCell = floor(ripple.xy * u_gridSize);\n float cellDist = length(thisCell - rippleCell);\n float initialRadius = 5.0;\n \n float distFromEdge = max(0.0, cellDist - initialRadius);\n float rippleSpeed = u_rippleSpeed;\n float reachTime = distFromEdge / rippleSpeed;\n float timeSinceReached = age - reachTime;\n \n float fadeDuration = 0.5;\n if (timeSinceReached >= 0.0 && timeSinceReached < fadeDuration) {\n float pop = 1.0 - timeSinceReached / fadeDuration;\n pop = pop * pop;\n rippleGlow += pop * 0.3;\n }\n }\n rippleGlow = min(rippleGlow, 1.0);\n }\n \n // Map brightness to character index (0 = darkest char, numChars-1 = brightest)\n float charIndex = floor(brightness * (u_numChars - 0.001));\n \n // Find the character in the atlas (horizontal strip of pre-rendered chars)\n float atlasX = charIndex / u_numChars;\n vec2 cellPos = fract(v_texCoord * u_gridSize);\n vec2 atlasCoord = vec2(atlasX + cellPos.x / u_numChars, cellPos.y);\n vec4 charColor = texture(u_asciiAtlas, atlasCoord);\n \n // Pick the color - video colors or green terminal aesthetic\n vec3 baseColor;\n if (u_colored) {\n baseColor = videoColor.rgb;\n } else {\n baseColor = vec3(0.0, 1.0, 0.0);\n }\n \n // Background highlight behind each character\n float bgIntensity = 0.15 + u_highlight * 0.35;\n vec3 bgColor = baseColor * bgIntensity;\n vec3 textColor = baseColor * 1.2;\n vec3 finalColor = mix(bgColor, textColor, charColor.r);\n \n // Add cursor and ripple glow\n finalColor += cursorGlow * baseColor * 0.5;\n finalColor += rippleGlow * baseColor;\n \n // Blend with original video if requested\n vec3 blendedColor = mix(finalColor, videoColor.rgb, u_blend);\n \n fragColor = vec4(blendedColor, 1.0);\n}\n";
101
+
102
+ // src/lib/webgl/utils.ts
103
+ function compileShader(gl, source, type) {
104
+ const shader = gl.createShader(type);
105
+ if (!shader) {
106
+ console.error("Failed to create shader");
107
+ return null;
108
+ }
109
+ gl.shaderSource(shader, source);
110
+ gl.compileShader(shader);
111
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
112
+ console.error("Shader compile error:", gl.getShaderInfoLog(shader));
113
+ gl.deleteShader(shader);
114
+ return null;
115
+ }
116
+ return shader;
117
+ }
118
+ function createProgram(gl, vertexShader, fragmentShader) {
119
+ const program = gl.createProgram();
120
+ if (!program) {
121
+ console.error("Failed to create program");
122
+ return null;
123
+ }
124
+ gl.attachShader(program, vertexShader);
125
+ gl.attachShader(program, fragmentShader);
126
+ gl.linkProgram(program);
127
+ if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
128
+ console.error("Program link error:", gl.getProgramInfoLog(program));
129
+ gl.deleteProgram(program);
130
+ return null;
131
+ }
132
+ return program;
133
+ }
134
+ function createFullscreenQuad(gl, program) {
135
+ const positions = new Float32Array([
136
+ -1,
137
+ -1,
138
+ // bottom-left
139
+ 1,
140
+ -1,
141
+ // bottom-right
142
+ -1,
143
+ 1,
144
+ // top-left
145
+ -1,
146
+ 1,
147
+ // top-left
148
+ 1,
149
+ -1,
150
+ // bottom-right
151
+ 1,
152
+ 1
153
+ // top-right
154
+ ]);
155
+ const texCoords = new Float32Array([0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0]);
156
+ const posBuffer = gl.createBuffer();
157
+ gl.bindBuffer(gl.ARRAY_BUFFER, posBuffer);
158
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
159
+ const posLoc = gl.getAttribLocation(program, "a_position");
160
+ gl.enableVertexAttribArray(posLoc);
161
+ gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0);
162
+ const texBuffer = gl.createBuffer();
163
+ gl.bindBuffer(gl.ARRAY_BUFFER, texBuffer);
164
+ gl.bufferData(gl.ARRAY_BUFFER, texCoords, gl.STATIC_DRAW);
165
+ const texLoc = gl.getAttribLocation(program, "a_texCoord");
166
+ gl.enableVertexAttribArray(texLoc);
167
+ gl.vertexAttribPointer(texLoc, 2, gl.FLOAT, false, 0, 0);
168
+ }
169
+ function createVideoTexture(gl) {
170
+ const texture = gl.createTexture();
171
+ if (!texture) return null;
172
+ gl.bindTexture(gl.TEXTURE_2D, texture);
173
+ gl.texParameteri(
174
+ gl.TEXTURE_2D,
175
+ gl.TEXTURE_MIN_FILTER,
176
+ gl.LINEAR_MIPMAP_LINEAR
177
+ );
178
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
179
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
180
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
181
+ return texture;
182
+ }
183
+ function createAsciiAtlas(gl, chars, charSize = 64) {
184
+ const canvas = document.createElement("canvas");
185
+ canvas.width = charSize * chars.length;
186
+ canvas.height = charSize;
187
+ const ctx = canvas.getContext("2d");
188
+ if (!ctx) return null;
189
+ ctx.fillStyle = "#000";
190
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
191
+ ctx.fillStyle = "#fff";
192
+ ctx.font = `${charSize * 0.8}px monospace`;
193
+ ctx.textAlign = "center";
194
+ ctx.textBaseline = "middle";
195
+ for (let i = 0; i < chars.length; i++) {
196
+ const x = i * charSize + charSize / 2;
197
+ const y = charSize / 2;
198
+ ctx.fillText(chars[i], x, y);
199
+ }
200
+ const texture = gl.createTexture();
201
+ if (!texture) return null;
202
+ gl.bindTexture(gl.TEXTURE_2D, texture);
203
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
204
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
205
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
206
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
207
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
208
+ return texture;
209
+ }
210
+ function calculateGridDimensions(videoWidth, videoHeight, cols) {
211
+ const aspectRatio = videoWidth / videoHeight;
212
+ const rows = Math.round(cols / aspectRatio / 2);
213
+ return { cols, rows };
214
+ }
215
+
216
+ // src/lib/webgl/types.ts
217
+ var CHAR_WIDTH_RATIO = 0.6;
218
+
219
+ // src/hooks/useVideoToAscii.ts
220
+ var MAX_TRAIL_LENGTH = 24;
221
+ var MAX_RIPPLES = 8;
222
+ function useVideoToAscii(options = {}) {
223
+ const {
224
+ fontSize = 10,
225
+ colored = false,
226
+ blend = 0,
227
+ highlight = 0,
228
+ charset = DEFAULT_CHARSET,
229
+ maxWidth = 900,
230
+ onStats
231
+ } = options;
232
+ const containerRef = (0, import_react.useRef)(null);
233
+ const videoRef = (0, import_react.useRef)(null);
234
+ const canvasRef = (0, import_react.useRef)(null);
235
+ const glRef = (0, import_react.useRef)(null);
236
+ const programRef = (0, import_react.useRef)(null);
237
+ const videoTextureRef = (0, import_react.useRef)(null);
238
+ const atlasTextureRef = (0, import_react.useRef)(null);
239
+ const animationRef = (0, import_react.useRef)(0);
240
+ const uniformSettersRef = (0, import_react.useRef)(/* @__PURE__ */ new Map());
241
+ const uniformLocationsRef = (0, import_react.useRef)(null);
242
+ const frameCountRef = (0, import_react.useRef)(0);
243
+ const frameTimesRef = (0, import_react.useRef)([]);
244
+ const lastFpsTimeRef = (0, import_react.useRef)(performance.now());
245
+ const [dimensions, setDimensions] = (0, import_react.useState)({ cols: 80, rows: 24 });
246
+ const [stats, setStats] = (0, import_react.useState)({ fps: 0, frameTime: 0 });
247
+ const [isReady, setIsReady] = (0, import_react.useState)(false);
248
+ const [isPlaying, setIsPlaying] = (0, import_react.useState)(false);
249
+ const charWidth = fontSize * CHAR_WIDTH_RATIO;
250
+ const cols = Math.floor(maxWidth / charWidth);
251
+ const chars = (0, import_react.useMemo)(() => getCharArray(charset), [charset]);
252
+ const registerUniformSetter = (0, import_react.useCallback)(
253
+ (id, setter) => {
254
+ uniformSettersRef.current.set(id, setter);
255
+ },
256
+ []
257
+ );
258
+ const unregisterUniformSetter = (0, import_react.useCallback)((id) => {
259
+ uniformSettersRef.current.delete(id);
260
+ }, []);
261
+ const cacheUniformLocations = (0, import_react.useCallback)(
262
+ (gl, program) => {
263
+ const get = (name) => gl.getUniformLocation(program, name);
264
+ return {
265
+ // Core uniforms
266
+ u_video: get("u_video"),
267
+ u_asciiAtlas: get("u_asciiAtlas"),
268
+ u_resolution: get("u_resolution"),
269
+ u_charSize: get("u_charSize"),
270
+ u_gridSize: get("u_gridSize"),
271
+ u_numChars: get("u_numChars"),
272
+ u_colored: get("u_colored"),
273
+ u_blend: get("u_blend"),
274
+ u_highlight: get("u_highlight"),
275
+ // Mouse uniforms
276
+ u_mouse: get("u_mouse"),
277
+ u_mouseRadius: get("u_mouseRadius"),
278
+ u_trailLength: get("u_trailLength"),
279
+ u_trail: Array.from(
280
+ { length: MAX_TRAIL_LENGTH },
281
+ (_, i) => get(`u_trail[${i}]`)
282
+ ),
283
+ // Ripple uniforms
284
+ u_time: get("u_time"),
285
+ u_rippleEnabled: get("u_rippleEnabled"),
286
+ u_rippleSpeed: get("u_rippleSpeed"),
287
+ u_ripples: Array.from(
288
+ { length: MAX_RIPPLES },
289
+ (_, i) => get(`u_ripples[${i}]`)
290
+ ),
291
+ // Audio uniforms
292
+ u_audioLevel: get("u_audioLevel"),
293
+ u_audioReactivity: get("u_audioReactivity"),
294
+ u_audioSensitivity: get("u_audioSensitivity")
295
+ };
296
+ },
297
+ []
298
+ );
299
+ const initWebGL = (0, import_react.useCallback)(() => {
300
+ const canvas = canvasRef.current;
301
+ const video = videoRef.current;
302
+ if (!canvas || !video || !video.videoWidth) return false;
303
+ const grid = calculateGridDimensions(
304
+ video.videoWidth,
305
+ video.videoHeight,
306
+ cols
307
+ );
308
+ setDimensions(grid);
309
+ const pixelWidth = grid.cols * charWidth;
310
+ const pixelHeight = grid.rows * fontSize;
311
+ canvas.width = pixelWidth;
312
+ canvas.height = pixelHeight;
313
+ const gl = canvas.getContext("webgl2", {
314
+ antialias: false,
315
+ preserveDrawingBuffer: false
316
+ });
317
+ if (!gl) {
318
+ console.error("WebGL2 not supported");
319
+ return false;
320
+ }
321
+ glRef.current = gl;
322
+ const vertexShader = compileShader(gl, vertex_default, gl.VERTEX_SHADER);
323
+ const fragmentShader = compileShader(
324
+ gl,
325
+ fragment_default,
326
+ gl.FRAGMENT_SHADER
327
+ );
328
+ if (!vertexShader || !fragmentShader) return false;
329
+ const program = createProgram(gl, vertexShader, fragmentShader);
330
+ if (!program) return false;
331
+ programRef.current = program;
332
+ gl.useProgram(program);
333
+ createFullscreenQuad(gl, program);
334
+ videoTextureRef.current = createVideoTexture(gl);
335
+ atlasTextureRef.current = createAsciiAtlas(gl, chars, fontSize);
336
+ const locations = cacheUniformLocations(gl, program);
337
+ uniformLocationsRef.current = locations;
338
+ gl.uniform1i(locations.u_video, 0);
339
+ gl.uniform1i(locations.u_asciiAtlas, 1);
340
+ gl.uniform2f(locations.u_resolution, pixelWidth, pixelHeight);
341
+ gl.uniform2f(locations.u_charSize, charWidth, fontSize);
342
+ gl.uniform2f(locations.u_gridSize, cols, grid.rows);
343
+ gl.uniform1f(locations.u_numChars, chars.length);
344
+ gl.uniform2f(locations.u_mouse, -1, -1);
345
+ gl.uniform1f(locations.u_mouseRadius, 0);
346
+ gl.uniform1i(locations.u_trailLength, 0);
347
+ gl.uniform1f(locations.u_rippleEnabled, 0);
348
+ gl.uniform1f(locations.u_audioLevel, 0);
349
+ gl.uniform1f(locations.u_audioReactivity, 0);
350
+ gl.uniform1f(locations.u_audioSensitivity, 0);
351
+ gl.viewport(0, 0, pixelWidth, pixelHeight);
352
+ setIsReady(true);
353
+ return true;
354
+ }, [cols, charWidth, fontSize, chars, cacheUniformLocations]);
355
+ const render = (0, import_react.useCallback)(() => {
356
+ const gl = glRef.current;
357
+ const video = videoRef.current;
358
+ const program = programRef.current;
359
+ const locations = uniformLocationsRef.current;
360
+ if (!gl || !video || !program || !locations || video.paused || video.ended)
361
+ return;
362
+ const frameStart = performance.now();
363
+ gl.activeTexture(gl.TEXTURE0);
364
+ gl.bindTexture(gl.TEXTURE_2D, videoTextureRef.current);
365
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
366
+ gl.generateMipmap(gl.TEXTURE_2D);
367
+ gl.activeTexture(gl.TEXTURE1);
368
+ gl.bindTexture(gl.TEXTURE_2D, atlasTextureRef.current);
369
+ gl.uniform1i(locations.u_colored, colored ? 1 : 0);
370
+ gl.uniform1f(locations.u_blend, blend / 100);
371
+ gl.uniform1f(locations.u_highlight, highlight / 100);
372
+ for (const setter of uniformSettersRef.current.values()) {
373
+ setter(gl, program, locations);
374
+ }
375
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
376
+ const frameEnd = performance.now();
377
+ frameCountRef.current++;
378
+ frameTimesRef.current.push(frameEnd - frameStart);
379
+ if (frameTimesRef.current.length > 60) frameTimesRef.current.shift();
380
+ const now = performance.now();
381
+ if (now - lastFpsTimeRef.current >= 1e3) {
382
+ const avgFrameTime = frameTimesRef.current.reduce((a, b) => a + b, 0) / frameTimesRef.current.length;
383
+ const newStats = { fps: frameCountRef.current, frameTime: avgFrameTime };
384
+ setStats(newStats);
385
+ onStats == null ? void 0 : onStats(newStats);
386
+ frameCountRef.current = 0;
387
+ lastFpsTimeRef.current = now;
388
+ }
389
+ animationRef.current = requestAnimationFrame(render);
390
+ }, [colored, blend, highlight, onStats]);
391
+ (0, import_react.useEffect)(() => {
392
+ const video = videoRef.current;
393
+ if (!video) return;
394
+ const handleLoadedMetadata = () => {
395
+ initWebGL();
396
+ };
397
+ const handlePlay = () => {
398
+ setIsPlaying(true);
399
+ animationRef.current = requestAnimationFrame(render);
400
+ };
401
+ const handlePause = () => {
402
+ setIsPlaying(false);
403
+ cancelAnimationFrame(animationRef.current);
404
+ };
405
+ const handleEnded = () => {
406
+ setIsPlaying(false);
407
+ cancelAnimationFrame(animationRef.current);
408
+ };
409
+ video.addEventListener("loadedmetadata", handleLoadedMetadata);
410
+ video.addEventListener("play", handlePlay);
411
+ video.addEventListener("pause", handlePause);
412
+ video.addEventListener("ended", handleEnded);
413
+ if (video.readyState >= 1) {
414
+ handleLoadedMetadata();
415
+ }
416
+ return () => {
417
+ video.removeEventListener("loadedmetadata", handleLoadedMetadata);
418
+ video.removeEventListener("play", handlePlay);
419
+ video.removeEventListener("pause", handlePause);
420
+ video.removeEventListener("ended", handleEnded);
421
+ cancelAnimationFrame(animationRef.current);
422
+ };
423
+ }, [initWebGL, render]);
424
+ (0, import_react.useEffect)(() => {
425
+ if (videoRef.current && videoRef.current.readyState >= 1) {
426
+ initWebGL();
427
+ }
428
+ }, [initWebGL]);
429
+ (0, import_react.useEffect)(() => {
430
+ return () => {
431
+ const gl = glRef.current;
432
+ if (gl) {
433
+ if (videoTextureRef.current) gl.deleteTexture(videoTextureRef.current);
434
+ if (atlasTextureRef.current) gl.deleteTexture(atlasTextureRef.current);
435
+ if (programRef.current) gl.deleteProgram(programRef.current);
436
+ }
437
+ cancelAnimationFrame(animationRef.current);
438
+ };
439
+ }, []);
440
+ const play = (0, import_react.useCallback)(() => {
441
+ var _a;
442
+ (_a = videoRef.current) == null ? void 0 : _a.play();
443
+ }, []);
444
+ const pause = (0, import_react.useCallback)(() => {
445
+ var _a;
446
+ (_a = videoRef.current) == null ? void 0 : _a.pause();
447
+ }, []);
448
+ const toggle = (0, import_react.useCallback)(() => {
449
+ const video = videoRef.current;
450
+ if (!video) return;
451
+ if (video.paused) {
452
+ video.play();
453
+ } else {
454
+ video.pause();
455
+ }
456
+ }, []);
457
+ (0, import_react.useEffect)(() => {
458
+ const handleKeyDown = (e) => {
459
+ if (e.code === "Space" && e.target === document.body) {
460
+ e.preventDefault();
461
+ toggle();
462
+ }
463
+ };
464
+ window.addEventListener("keydown", handleKeyDown);
465
+ return () => window.removeEventListener("keydown", handleKeyDown);
466
+ }, [toggle]);
467
+ return {
468
+ containerRef,
469
+ videoRef,
470
+ canvasRef,
471
+ glRef,
472
+ programRef,
473
+ uniformLocationsRef,
474
+ registerUniformSetter,
475
+ unregisterUniformSetter,
476
+ dimensions,
477
+ stats,
478
+ isReady,
479
+ isPlaying,
480
+ play,
481
+ pause,
482
+ toggle
483
+ };
484
+ }
485
+
486
+ // src/hooks/useAsciiMouseEffect.ts
487
+ var import_react2 = require("react");
488
+ var MAX_TRAIL_LENGTH2 = 24;
489
+ function useAsciiMouseEffect(ascii, options = {}) {
490
+ const { enabled = true, trailLength = 24 } = options;
491
+ const mouseRef = (0, import_react2.useRef)({ x: -1, y: -1 });
492
+ const trailRef = (0, import_react2.useRef)([]);
493
+ const enabledRef = (0, import_react2.useRef)(enabled);
494
+ const trailLengthRef = (0, import_react2.useRef)(trailLength);
495
+ (0, import_react2.useEffect)(() => {
496
+ enabledRef.current = enabled;
497
+ trailLengthRef.current = trailLength;
498
+ }, [enabled, trailLength]);
499
+ (0, import_react2.useEffect)(() => {
500
+ if (!enabled) return;
501
+ const uniformSetter = (gl, _program, locations) => {
502
+ gl.uniform2f(locations.u_mouse, mouseRef.current.x, mouseRef.current.y);
503
+ const trail = trailRef.current;
504
+ gl.uniform1i(locations.u_trailLength, trail.length);
505
+ for (let i = 0; i < MAX_TRAIL_LENGTH2; i++) {
506
+ const loc = locations.u_trail[i];
507
+ if (loc) {
508
+ const pos = trail[i] || { x: -1, y: -1 };
509
+ gl.uniform2f(loc, pos.x, pos.y);
510
+ }
511
+ }
512
+ };
513
+ ascii.registerUniformSetter("mouse", uniformSetter);
514
+ return () => {
515
+ ascii.unregisterUniformSetter("mouse");
516
+ };
517
+ }, [ascii, enabled]);
518
+ const onMouseMove = (0, import_react2.useCallback)((e) => {
519
+ if (!enabledRef.current) return;
520
+ const rect = e.currentTarget.getBoundingClientRect();
521
+ const newPos = {
522
+ // Convert pixel coords to 0-1 range
523
+ x: (e.clientX - rect.left) / rect.width,
524
+ y: (e.clientY - rect.top) / rect.height
525
+ };
526
+ if (mouseRef.current.x >= 0) {
527
+ trailRef.current.unshift(__spreadValues({}, mouseRef.current));
528
+ if (trailRef.current.length > trailLengthRef.current) {
529
+ trailRef.current.pop();
530
+ }
531
+ }
532
+ mouseRef.current = newPos;
533
+ }, []);
534
+ const onMouseLeave = (0, import_react2.useCallback)(() => {
535
+ mouseRef.current = { x: -1, y: -1 };
536
+ trailRef.current = [];
537
+ }, []);
538
+ return { onMouseMove, onMouseLeave };
539
+ }
540
+
541
+ // src/hooks/useAsciiRipple.ts
542
+ var import_react3 = require("react");
543
+ var MAX_RIPPLES2 = 8;
544
+ function useAsciiRipple(ascii, options = {}) {
545
+ const { enabled = false, speed = 40 } = options;
546
+ const ripplesRef = (0, import_react3.useRef)([]);
547
+ const enabledRef = (0, import_react3.useRef)(enabled);
548
+ const speedRef = (0, import_react3.useRef)(speed);
549
+ (0, import_react3.useEffect)(() => {
550
+ enabledRef.current = enabled;
551
+ speedRef.current = speed;
552
+ }, [enabled, speed]);
553
+ (0, import_react3.useEffect)(() => {
554
+ if (!enabled) return;
555
+ const uniformSetter = (gl, _program, locations) => {
556
+ const currentTime = performance.now() / 1e3;
557
+ gl.uniform1f(locations.u_time, currentTime);
558
+ gl.uniform1f(locations.u_rippleEnabled, 1);
559
+ gl.uniform1f(locations.u_rippleSpeed, speedRef.current);
560
+ const maxDist = Math.sqrt(
561
+ ascii.dimensions.cols ** 2 + ascii.dimensions.rows ** 2
562
+ );
563
+ const maxLifetime = maxDist / speedRef.current + 1;
564
+ ripplesRef.current = ripplesRef.current.filter(
565
+ (r) => currentTime - r.startTime < maxLifetime
566
+ );
567
+ for (let i = 0; i < MAX_RIPPLES2; i++) {
568
+ const loc = locations.u_ripples[i];
569
+ if (loc) {
570
+ const ripple = ripplesRef.current[i];
571
+ if (ripple) {
572
+ gl.uniform4f(loc, ripple.x, ripple.y, ripple.startTime, 1);
573
+ } else {
574
+ gl.uniform4f(loc, 0, 0, 0, 0);
575
+ }
576
+ }
577
+ }
578
+ };
579
+ ascii.registerUniformSetter("ripple", uniformSetter);
580
+ return () => {
581
+ ascii.unregisterUniformSetter("ripple");
582
+ };
583
+ }, [ascii, enabled]);
584
+ const onClick = (0, import_react3.useCallback)((e) => {
585
+ if (!enabledRef.current) return;
586
+ const rect = e.currentTarget.getBoundingClientRect();
587
+ const x = (e.clientX - rect.left) / rect.width;
588
+ const y = (e.clientY - rect.top) / rect.height;
589
+ ripplesRef.current.unshift({
590
+ x,
591
+ y,
592
+ startTime: performance.now() / 1e3
593
+ });
594
+ if (ripplesRef.current.length > MAX_RIPPLES2) {
595
+ ripplesRef.current.pop();
596
+ }
597
+ }, []);
598
+ return { onClick };
599
+ }
600
+
601
+ // src/hooks/useAsciiAudio.ts
602
+ var import_react4 = require("react");
603
+ function useAsciiAudio(ascii, options = {}) {
604
+ const { enabled = false, reactivity = 50, sensitivity = 50 } = options;
605
+ const audioContextRef = (0, import_react4.useRef)(null);
606
+ const analyzerRef = (0, import_react4.useRef)(null);
607
+ const sourceRef = (0, import_react4.useRef)(null);
608
+ const dataArrayRef = (0, import_react4.useRef)(null);
609
+ const volumeRef = (0, import_react4.useRef)(0);
610
+ const connectedVideoRef = (0, import_react4.useRef)(null);
611
+ const enabledRef = (0, import_react4.useRef)(enabled);
612
+ const reactivityRef = (0, import_react4.useRef)(reactivity);
613
+ const sensitivityRef = (0, import_react4.useRef)(sensitivity);
614
+ (0, import_react4.useEffect)(() => {
615
+ enabledRef.current = enabled;
616
+ reactivityRef.current = reactivity;
617
+ sensitivityRef.current = sensitivity;
618
+ }, [enabled, reactivity, sensitivity]);
619
+ const updateVolume = () => {
620
+ const analyzer = analyzerRef.current;
621
+ const dataArray = dataArrayRef.current;
622
+ if (!analyzer || !dataArray) return;
623
+ analyzer.getByteFrequencyData(dataArray);
624
+ let sum = 0;
625
+ for (let i = 0; i < dataArray.length; i++) {
626
+ sum += dataArray[i];
627
+ }
628
+ const average = sum / dataArray.length / 255;
629
+ volumeRef.current = volumeRef.current * 0.7 + average * 0.3;
630
+ };
631
+ (0, import_react4.useEffect)(() => {
632
+ if (!enabled) return;
633
+ const video = ascii.videoRef.current;
634
+ if (!video) return;
635
+ const connectAudio = () => {
636
+ if (connectedVideoRef.current === video && audioContextRef.current) {
637
+ audioContextRef.current.resume();
638
+ return;
639
+ }
640
+ try {
641
+ if (!audioContextRef.current) {
642
+ audioContextRef.current = new AudioContext();
643
+ }
644
+ const ctx = audioContextRef.current;
645
+ const analyzer = ctx.createAnalyser();
646
+ analyzer.fftSize = 256;
647
+ analyzer.smoothingTimeConstant = 0.8;
648
+ analyzerRef.current = analyzer;
649
+ dataArrayRef.current = new Uint8Array(
650
+ analyzer.frequencyBinCount
651
+ );
652
+ const source = ctx.createMediaElementSource(video);
653
+ source.connect(analyzer);
654
+ analyzer.connect(ctx.destination);
655
+ sourceRef.current = source;
656
+ connectedVideoRef.current = video;
657
+ ctx.resume();
658
+ } catch (error) {
659
+ console.warn("Failed to connect audio analyzer:", error);
660
+ }
661
+ };
662
+ const handlePlay = () => {
663
+ connectAudio();
664
+ };
665
+ video.addEventListener("play", handlePlay);
666
+ if (!video.paused) {
667
+ connectAudio();
668
+ }
669
+ return () => {
670
+ video.removeEventListener("play", handlePlay);
671
+ };
672
+ }, [ascii.videoRef, enabled]);
673
+ (0, import_react4.useEffect)(() => {
674
+ if (!enabled) return;
675
+ const uniformSetter = (gl, _program, locations) => {
676
+ updateVolume();
677
+ gl.uniform1f(locations.u_audioLevel, volumeRef.current);
678
+ gl.uniform1f(locations.u_audioReactivity, reactivityRef.current / 100);
679
+ gl.uniform1f(locations.u_audioSensitivity, sensitivityRef.current / 100);
680
+ };
681
+ ascii.registerUniformSetter("audio", uniformSetter);
682
+ return () => {
683
+ ascii.unregisterUniformSetter("audio");
684
+ };
685
+ }, [ascii, enabled]);
686
+ (0, import_react4.useEffect)(() => {
687
+ return () => {
688
+ if (audioContextRef.current) {
689
+ audioContextRef.current.close();
690
+ }
691
+ };
692
+ }, []);
693
+ }
694
+
695
+ // src/components/VideoToAscii.tsx
696
+ var import_jsx_runtime = require("react/jsx-runtime");
697
+ function VideoToAscii({
698
+ src,
699
+ fontSize = 10,
700
+ colored = false,
701
+ blend = 0,
702
+ highlight = 0,
703
+ charset = "standard",
704
+ maxWidth = 900,
705
+ enableMouse = true,
706
+ trailLength = 24,
707
+ enableRipple = false,
708
+ rippleSpeed = 40,
709
+ audioReactivity = 0,
710
+ audioSensitivity = 50,
711
+ showStats = false,
712
+ className = ""
713
+ }) {
714
+ const ascii = useVideoToAscii({
715
+ fontSize,
716
+ colored,
717
+ blend,
718
+ highlight,
719
+ charset,
720
+ maxWidth
721
+ });
722
+ const {
723
+ containerRef,
724
+ videoRef,
725
+ canvasRef,
726
+ stats,
727
+ dimensions,
728
+ isReady,
729
+ isPlaying
730
+ } = ascii;
731
+ const mouseHandlers = useAsciiMouseEffect(ascii, {
732
+ enabled: enableMouse,
733
+ trailLength
734
+ });
735
+ const rippleHandlers = useAsciiRipple(ascii, {
736
+ enabled: enableRipple,
737
+ speed: rippleSpeed
738
+ });
739
+ useAsciiAudio(ascii, {
740
+ enabled: audioReactivity > 0,
741
+ reactivity: audioReactivity,
742
+ sensitivity: audioSensitivity
743
+ });
744
+ const charWidth = fontSize * CHAR_WIDTH_RATIO;
745
+ const pixelWidth = dimensions.cols * charWidth;
746
+ const pixelHeight = dimensions.rows * fontSize;
747
+ return /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("div", { className: `video-to-ascii ${className}`, children: [
748
+ /* @__PURE__ */ (0, import_jsx_runtime.jsx)(
749
+ "video",
750
+ {
751
+ ref: videoRef,
752
+ src,
753
+ muted: audioReactivity === 0,
754
+ loop: true,
755
+ playsInline: true,
756
+ crossOrigin: "anonymous",
757
+ style: { display: "none" }
758
+ }
759
+ ),
760
+ /* @__PURE__ */ (0, import_jsx_runtime.jsxs)(
761
+ "div",
762
+ __spreadProps(__spreadValues(__spreadValues({
763
+ ref: containerRef,
764
+ className: "relative cursor-pointer select-none overflow-hidden rounded",
765
+ style: {
766
+ width: pixelWidth || "100%",
767
+ height: pixelHeight || "auto",
768
+ backgroundColor: "#000"
769
+ }
770
+ }, enableMouse ? mouseHandlers : {}), enableRipple ? rippleHandlers : {}), {
771
+ children: [
772
+ /* @__PURE__ */ (0, import_jsx_runtime.jsx)(
773
+ "canvas",
774
+ {
775
+ ref: canvasRef,
776
+ style: {
777
+ width: "100%",
778
+ height: "100%",
779
+ display: "block"
780
+ }
781
+ }
782
+ ),
783
+ showStats && isReady && /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("div", { className: "absolute top-2 left-2 bg-black/70 text-green-400 px-2 py-1 text-xs font-mono rounded", children: [
784
+ stats.fps,
785
+ " FPS | ",
786
+ stats.frameTime.toFixed(2),
787
+ "ms | ",
788
+ dimensions.cols,
789
+ "\xD7",
790
+ dimensions.rows
791
+ ] }),
792
+ !isPlaying && isReady && /* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { className: "absolute inset-0 flex items-center justify-center bg-black/50", children: /* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { className: "text-white text-lg", children: "\u25B6 Press Space to Play" }) })
793
+ ]
794
+ })
795
+ )
796
+ ] });
797
+ }
798
+ // Annotate the CommonJS export names for ESM import in node:
799
+ 0 && (module.exports = {
800
+ ASCII_CHARSETS,
801
+ VideoToAscii
802
+ });
803
+ //# sourceMappingURL=index.js.map