video2ascii 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,778 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __defProps = Object.defineProperties;
3
+ var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
4
+ var __getOwnPropSymbols = Object.getOwnPropertySymbols;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __propIsEnum = Object.prototype.propertyIsEnumerable;
7
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
8
+ var __spreadValues = (a, b) => {
9
+ for (var prop in b || (b = {}))
10
+ if (__hasOwnProp.call(b, prop))
11
+ __defNormalProp(a, prop, b[prop]);
12
+ if (__getOwnPropSymbols)
13
+ for (var prop of __getOwnPropSymbols(b)) {
14
+ if (__propIsEnum.call(b, prop))
15
+ __defNormalProp(a, prop, b[prop]);
16
+ }
17
+ return a;
18
+ };
19
+ var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
20
+
21
+ // src/hooks/useVideoToAscii.ts
22
+ import { useRef, useState, useCallback, useEffect, useMemo } from "react";
23
+
24
+ // src/lib/ascii-charsets.ts
25
+ var ASCII_CHARSETS = {
26
+ /** Classic 10-character gradient - good balance of detail and performance */
27
+ standard: {
28
+ name: "Standard",
29
+ chars: " .:-=+*#%@"
30
+ },
31
+ /** Unicode block characters - chunky retro aesthetic */
32
+ blocks: {
33
+ name: "Blocks",
34
+ chars: " \u2591\u2592\u2593\u2588"
35
+ },
36
+ /** Minimal 5-character set - high contrast, fast rendering */
37
+ minimal: {
38
+ name: "Minimal",
39
+ chars: " .oO@"
40
+ },
41
+ /** Binary on/off - pure silhouette mode */
42
+ binary: {
43
+ name: "Binary",
44
+ chars: " \u2588"
45
+ },
46
+ /** 70-character gradient - maximum detail, best for high resolution */
47
+ detailed: {
48
+ name: "Detailed",
49
+ chars: " .'`^\",:;Il!i><~+_-?][}{1)(|/tfjrxnuvczXYUJCLQ0OZmwqpdbkhao*#MW&8%B@$"
50
+ },
51
+ /** Dot-based - pointillist aesthetic */
52
+ dots: {
53
+ name: "Dots",
54
+ chars: " \xB7\u2022\u25CF"
55
+ },
56
+ /** Directional arrows - experimental */
57
+ arrows: {
58
+ name: "Arrows",
59
+ chars: " \u2190\u2199\u2193\u2198\u2192\u2197\u2191\u2196"
60
+ },
61
+ /** Moon phases - decorative gradient */
62
+ emoji: {
63
+ name: "Emoji",
64
+ chars: " \u2591\u2592\u2593\u{1F311}\u{1F312}\u{1F313}\u{1F314}\u{1F315}"
65
+ }
66
+ };
67
+ var DEFAULT_CHARSET = "standard";
68
+ function getCharArray(charset) {
69
+ return [...ASCII_CHARSETS[charset].chars];
70
+ }
71
+
72
+ // src/lib/webgl/shaders/vertex.glsl
73
+ var vertex_default = "#version 300 es\n\n// Fullscreen quad - passes texture coords to fragment shader\n\nin vec2 a_position;\nin vec2 a_texCoord;\nout vec2 v_texCoord;\n\nvoid main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n}\n";
74
+
75
+ // src/lib/webgl/shaders/fragment.glsl
76
+ var fragment_default = "#version 300 es\nprecision highp float;\n\n// Textures\nuniform sampler2D u_video;\nuniform sampler2D u_asciiAtlas;\n\n// Dimensions\nuniform vec2 u_resolution;\nuniform vec2 u_charSize;\nuniform vec2 u_gridSize;\nuniform float u_numChars;\n\n// Rendering options\nuniform bool u_colored;\nuniform float u_blend;\nuniform float u_highlight;\n\n// Audio\nuniform float u_audioLevel;\nuniform float u_audioReactivity;\nuniform float u_audioSensitivity;\n\n// Mouse\nuniform vec2 u_mouse;\nuniform float u_mouseRadius;\nuniform vec2 u_trail[24];\nuniform int u_trailLength;\n\n// Ripple\nuniform vec4 u_ripples[8];\nuniform float u_time;\nuniform float u_rippleEnabled;\nuniform float u_rippleSpeed;\n\nin vec2 v_texCoord;\nout vec4 fragColor;\n\nvoid main() {\n // Figure out which ASCII cell this pixel is in\n vec2 cellCoord = floor(v_texCoord * u_gridSize);\n vec2 thisCell = cellCoord;\n \n // Sample video at cell center (mipmaps handle averaging)\n vec2 cellCenter = (cellCoord + 0.5) / u_gridSize;\n vec4 videoColor = texture(u_video, cellCenter);\n \n // Perceived brightness using human eye sensitivity weights\n float baseBrightness = dot(videoColor.rgb, vec3(0.299, 0.587, 0.114));\n \n // Audio reactivity - louder = brighter, silence = darker\n float minBrightness = mix(0.3, 0.0, u_audioSensitivity);\n float maxBrightness = mix(1.0, 5.0, u_audioSensitivity);\n float audioMultiplier = mix(minBrightness, maxBrightness, u_audioLevel);\n float audioModulated = baseBrightness * audioMultiplier;\n float brightness = mix(baseBrightness, audioModulated, u_audioReactivity);\n \n // Cursor glow - blocky circle effect\n float cursorGlow = 0.0;\n float cursorRadius = 5.0;\n \n vec2 mouseCell = floor(u_mouse * u_gridSize);\n float cellDist = length(thisCell - mouseCell);\n if (cellDist <= cursorRadius && u_mouse.x >= 0.0) {\n cursorGlow += 1.0 - cellDist / cursorRadius;\n }\n \n // Trail effect\n for (int i = 0; i < 12; i++) {\n if (i >= u_trailLength) break;\n vec2 trailPos = u_trail[i];\n if (trailPos.x < 0.0) continue;\n \n vec2 trailCell = floor(trailPos * u_gridSize);\n float trailDist = length(thisCell - trailCell);\n float trailRadius = cursorRadius * 0.8;\n \n if (trailDist <= trailRadius) {\n float fade = 1.0 - float(i) / float(u_trailLength);\n cursorGlow += (1.0 - trailDist / trailRadius) * 0.5 * fade;\n }\n }\n cursorGlow = min(cursorGlow, 1.0);\n \n // Ripple effect - expanding rings on click\n float rippleGlow = 0.0;\n if (u_rippleEnabled > 0.5) {\n for (int i = 0; i < 8; i++) {\n vec4 ripple = u_ripples[i];\n if (ripple.w < 0.5) continue;\n \n float age = u_time - ripple.z;\n if (age < 0.0) continue;\n \n vec2 rippleCell = floor(ripple.xy * u_gridSize);\n float cellDist = length(thisCell - rippleCell);\n float initialRadius = 5.0;\n \n float distFromEdge = max(0.0, cellDist - initialRadius);\n float rippleSpeed = u_rippleSpeed;\n float reachTime = distFromEdge / rippleSpeed;\n float timeSinceReached = age - reachTime;\n \n float fadeDuration = 0.5;\n if (timeSinceReached >= 0.0 && timeSinceReached < fadeDuration) {\n float pop = 1.0 - timeSinceReached / fadeDuration;\n pop = pop * pop;\n rippleGlow += pop * 0.3;\n }\n }\n rippleGlow = min(rippleGlow, 1.0);\n }\n \n // Map brightness to character index (0 = darkest char, numChars-1 = brightest)\n float charIndex = floor(brightness * (u_numChars - 0.001));\n \n // Find the character in the atlas (horizontal strip of pre-rendered chars)\n float atlasX = charIndex / u_numChars;\n vec2 cellPos = fract(v_texCoord * u_gridSize);\n vec2 atlasCoord = vec2(atlasX + cellPos.x / u_numChars, cellPos.y);\n vec4 charColor = texture(u_asciiAtlas, atlasCoord);\n \n // Pick the color - video colors or green terminal aesthetic\n vec3 baseColor;\n if (u_colored) {\n baseColor = videoColor.rgb;\n } else {\n baseColor = vec3(0.0, 1.0, 0.0);\n }\n \n // Background highlight behind each character\n float bgIntensity = 0.15 + u_highlight * 0.35;\n vec3 bgColor = baseColor * bgIntensity;\n vec3 textColor = baseColor * 1.2;\n vec3 finalColor = mix(bgColor, textColor, charColor.r);\n \n // Add cursor and ripple glow\n finalColor += cursorGlow * baseColor * 0.5;\n finalColor += rippleGlow * baseColor;\n \n // Blend with original video if requested\n vec3 blendedColor = mix(finalColor, videoColor.rgb, u_blend);\n \n fragColor = vec4(blendedColor, 1.0);\n}\n";
77
+
78
+ // src/lib/webgl/utils.ts
79
+ function compileShader(gl, source, type) {
80
+ const shader = gl.createShader(type);
81
+ if (!shader) {
82
+ console.error("Failed to create shader");
83
+ return null;
84
+ }
85
+ gl.shaderSource(shader, source);
86
+ gl.compileShader(shader);
87
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
88
+ console.error("Shader compile error:", gl.getShaderInfoLog(shader));
89
+ gl.deleteShader(shader);
90
+ return null;
91
+ }
92
+ return shader;
93
+ }
94
+ function createProgram(gl, vertexShader, fragmentShader) {
95
+ const program = gl.createProgram();
96
+ if (!program) {
97
+ console.error("Failed to create program");
98
+ return null;
99
+ }
100
+ gl.attachShader(program, vertexShader);
101
+ gl.attachShader(program, fragmentShader);
102
+ gl.linkProgram(program);
103
+ if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
104
+ console.error("Program link error:", gl.getProgramInfoLog(program));
105
+ gl.deleteProgram(program);
106
+ return null;
107
+ }
108
+ return program;
109
+ }
110
+ function createFullscreenQuad(gl, program) {
111
+ const positions = new Float32Array([
112
+ -1,
113
+ -1,
114
+ // bottom-left
115
+ 1,
116
+ -1,
117
+ // bottom-right
118
+ -1,
119
+ 1,
120
+ // top-left
121
+ -1,
122
+ 1,
123
+ // top-left
124
+ 1,
125
+ -1,
126
+ // bottom-right
127
+ 1,
128
+ 1
129
+ // top-right
130
+ ]);
131
+ const texCoords = new Float32Array([0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0]);
132
+ const posBuffer = gl.createBuffer();
133
+ gl.bindBuffer(gl.ARRAY_BUFFER, posBuffer);
134
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
135
+ const posLoc = gl.getAttribLocation(program, "a_position");
136
+ gl.enableVertexAttribArray(posLoc);
137
+ gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0);
138
+ const texBuffer = gl.createBuffer();
139
+ gl.bindBuffer(gl.ARRAY_BUFFER, texBuffer);
140
+ gl.bufferData(gl.ARRAY_BUFFER, texCoords, gl.STATIC_DRAW);
141
+ const texLoc = gl.getAttribLocation(program, "a_texCoord");
142
+ gl.enableVertexAttribArray(texLoc);
143
+ gl.vertexAttribPointer(texLoc, 2, gl.FLOAT, false, 0, 0);
144
+ }
145
+ function createVideoTexture(gl) {
146
+ const texture = gl.createTexture();
147
+ if (!texture) return null;
148
+ gl.bindTexture(gl.TEXTURE_2D, texture);
149
+ gl.texParameteri(
150
+ gl.TEXTURE_2D,
151
+ gl.TEXTURE_MIN_FILTER,
152
+ gl.LINEAR_MIPMAP_LINEAR
153
+ );
154
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
155
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
156
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
157
+ return texture;
158
+ }
159
+ function createAsciiAtlas(gl, chars, charSize = 64) {
160
+ const canvas = document.createElement("canvas");
161
+ canvas.width = charSize * chars.length;
162
+ canvas.height = charSize;
163
+ const ctx = canvas.getContext("2d");
164
+ if (!ctx) return null;
165
+ ctx.fillStyle = "#000";
166
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
167
+ ctx.fillStyle = "#fff";
168
+ ctx.font = `${charSize * 0.8}px monospace`;
169
+ ctx.textAlign = "center";
170
+ ctx.textBaseline = "middle";
171
+ for (let i = 0; i < chars.length; i++) {
172
+ const x = i * charSize + charSize / 2;
173
+ const y = charSize / 2;
174
+ ctx.fillText(chars[i], x, y);
175
+ }
176
+ const texture = gl.createTexture();
177
+ if (!texture) return null;
178
+ gl.bindTexture(gl.TEXTURE_2D, texture);
179
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
180
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
181
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
182
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
183
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
184
+ return texture;
185
+ }
186
+ function calculateGridDimensions(videoWidth, videoHeight, cols) {
187
+ const aspectRatio = videoWidth / videoHeight;
188
+ const rows = Math.round(cols / aspectRatio / 2);
189
+ return { cols, rows };
190
+ }
191
+
192
+ // src/lib/webgl/types.ts
193
+ var CHAR_WIDTH_RATIO = 0.6;
194
+
195
+ // src/hooks/useVideoToAscii.ts
196
+ var MAX_TRAIL_LENGTH = 24;
197
+ var MAX_RIPPLES = 8;
198
+ function useVideoToAscii(options = {}) {
199
+ const {
200
+ fontSize = 10,
201
+ colored = false,
202
+ blend = 0,
203
+ highlight = 0,
204
+ charset = DEFAULT_CHARSET,
205
+ maxWidth = 900,
206
+ onStats
207
+ } = options;
208
+ const containerRef = useRef(null);
209
+ const videoRef = useRef(null);
210
+ const canvasRef = useRef(null);
211
+ const glRef = useRef(null);
212
+ const programRef = useRef(null);
213
+ const videoTextureRef = useRef(null);
214
+ const atlasTextureRef = useRef(null);
215
+ const animationRef = useRef(0);
216
+ const uniformSettersRef = useRef(/* @__PURE__ */ new Map());
217
+ const uniformLocationsRef = useRef(null);
218
+ const frameCountRef = useRef(0);
219
+ const frameTimesRef = useRef([]);
220
+ const lastFpsTimeRef = useRef(performance.now());
221
+ const [dimensions, setDimensions] = useState({ cols: 80, rows: 24 });
222
+ const [stats, setStats] = useState({ fps: 0, frameTime: 0 });
223
+ const [isReady, setIsReady] = useState(false);
224
+ const [isPlaying, setIsPlaying] = useState(false);
225
+ const charWidth = fontSize * CHAR_WIDTH_RATIO;
226
+ const cols = Math.floor(maxWidth / charWidth);
227
+ const chars = useMemo(() => getCharArray(charset), [charset]);
228
+ const registerUniformSetter = useCallback(
229
+ (id, setter) => {
230
+ uniformSettersRef.current.set(id, setter);
231
+ },
232
+ []
233
+ );
234
+ const unregisterUniformSetter = useCallback((id) => {
235
+ uniformSettersRef.current.delete(id);
236
+ }, []);
237
+ const cacheUniformLocations = useCallback(
238
+ (gl, program) => {
239
+ const get = (name) => gl.getUniformLocation(program, name);
240
+ return {
241
+ // Core uniforms
242
+ u_video: get("u_video"),
243
+ u_asciiAtlas: get("u_asciiAtlas"),
244
+ u_resolution: get("u_resolution"),
245
+ u_charSize: get("u_charSize"),
246
+ u_gridSize: get("u_gridSize"),
247
+ u_numChars: get("u_numChars"),
248
+ u_colored: get("u_colored"),
249
+ u_blend: get("u_blend"),
250
+ u_highlight: get("u_highlight"),
251
+ // Mouse uniforms
252
+ u_mouse: get("u_mouse"),
253
+ u_mouseRadius: get("u_mouseRadius"),
254
+ u_trailLength: get("u_trailLength"),
255
+ u_trail: Array.from(
256
+ { length: MAX_TRAIL_LENGTH },
257
+ (_, i) => get(`u_trail[${i}]`)
258
+ ),
259
+ // Ripple uniforms
260
+ u_time: get("u_time"),
261
+ u_rippleEnabled: get("u_rippleEnabled"),
262
+ u_rippleSpeed: get("u_rippleSpeed"),
263
+ u_ripples: Array.from(
264
+ { length: MAX_RIPPLES },
265
+ (_, i) => get(`u_ripples[${i}]`)
266
+ ),
267
+ // Audio uniforms
268
+ u_audioLevel: get("u_audioLevel"),
269
+ u_audioReactivity: get("u_audioReactivity"),
270
+ u_audioSensitivity: get("u_audioSensitivity")
271
+ };
272
+ },
273
+ []
274
+ );
275
+ const initWebGL = useCallback(() => {
276
+ const canvas = canvasRef.current;
277
+ const video = videoRef.current;
278
+ if (!canvas || !video || !video.videoWidth) return false;
279
+ const grid = calculateGridDimensions(
280
+ video.videoWidth,
281
+ video.videoHeight,
282
+ cols
283
+ );
284
+ setDimensions(grid);
285
+ const pixelWidth = grid.cols * charWidth;
286
+ const pixelHeight = grid.rows * fontSize;
287
+ canvas.width = pixelWidth;
288
+ canvas.height = pixelHeight;
289
+ const gl = canvas.getContext("webgl2", {
290
+ antialias: false,
291
+ preserveDrawingBuffer: false
292
+ });
293
+ if (!gl) {
294
+ console.error("WebGL2 not supported");
295
+ return false;
296
+ }
297
+ glRef.current = gl;
298
+ const vertexShader = compileShader(gl, vertex_default, gl.VERTEX_SHADER);
299
+ const fragmentShader = compileShader(
300
+ gl,
301
+ fragment_default,
302
+ gl.FRAGMENT_SHADER
303
+ );
304
+ if (!vertexShader || !fragmentShader) return false;
305
+ const program = createProgram(gl, vertexShader, fragmentShader);
306
+ if (!program) return false;
307
+ programRef.current = program;
308
+ gl.useProgram(program);
309
+ createFullscreenQuad(gl, program);
310
+ videoTextureRef.current = createVideoTexture(gl);
311
+ atlasTextureRef.current = createAsciiAtlas(gl, chars, fontSize);
312
+ const locations = cacheUniformLocations(gl, program);
313
+ uniformLocationsRef.current = locations;
314
+ gl.uniform1i(locations.u_video, 0);
315
+ gl.uniform1i(locations.u_asciiAtlas, 1);
316
+ gl.uniform2f(locations.u_resolution, pixelWidth, pixelHeight);
317
+ gl.uniform2f(locations.u_charSize, charWidth, fontSize);
318
+ gl.uniform2f(locations.u_gridSize, cols, grid.rows);
319
+ gl.uniform1f(locations.u_numChars, chars.length);
320
+ gl.uniform2f(locations.u_mouse, -1, -1);
321
+ gl.uniform1f(locations.u_mouseRadius, 0);
322
+ gl.uniform1i(locations.u_trailLength, 0);
323
+ gl.uniform1f(locations.u_rippleEnabled, 0);
324
+ gl.uniform1f(locations.u_audioLevel, 0);
325
+ gl.uniform1f(locations.u_audioReactivity, 0);
326
+ gl.uniform1f(locations.u_audioSensitivity, 0);
327
+ gl.viewport(0, 0, pixelWidth, pixelHeight);
328
+ setIsReady(true);
329
+ return true;
330
+ }, [cols, charWidth, fontSize, chars, cacheUniformLocations]);
331
+ const render = useCallback(() => {
332
+ const gl = glRef.current;
333
+ const video = videoRef.current;
334
+ const program = programRef.current;
335
+ const locations = uniformLocationsRef.current;
336
+ if (!gl || !video || !program || !locations || video.paused || video.ended)
337
+ return;
338
+ const frameStart = performance.now();
339
+ gl.activeTexture(gl.TEXTURE0);
340
+ gl.bindTexture(gl.TEXTURE_2D, videoTextureRef.current);
341
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
342
+ gl.generateMipmap(gl.TEXTURE_2D);
343
+ gl.activeTexture(gl.TEXTURE1);
344
+ gl.bindTexture(gl.TEXTURE_2D, atlasTextureRef.current);
345
+ gl.uniform1i(locations.u_colored, colored ? 1 : 0);
346
+ gl.uniform1f(locations.u_blend, blend / 100);
347
+ gl.uniform1f(locations.u_highlight, highlight / 100);
348
+ for (const setter of uniformSettersRef.current.values()) {
349
+ setter(gl, program, locations);
350
+ }
351
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
352
+ const frameEnd = performance.now();
353
+ frameCountRef.current++;
354
+ frameTimesRef.current.push(frameEnd - frameStart);
355
+ if (frameTimesRef.current.length > 60) frameTimesRef.current.shift();
356
+ const now = performance.now();
357
+ if (now - lastFpsTimeRef.current >= 1e3) {
358
+ const avgFrameTime = frameTimesRef.current.reduce((a, b) => a + b, 0) / frameTimesRef.current.length;
359
+ const newStats = { fps: frameCountRef.current, frameTime: avgFrameTime };
360
+ setStats(newStats);
361
+ onStats == null ? void 0 : onStats(newStats);
362
+ frameCountRef.current = 0;
363
+ lastFpsTimeRef.current = now;
364
+ }
365
+ animationRef.current = requestAnimationFrame(render);
366
+ }, [colored, blend, highlight, onStats]);
367
+ useEffect(() => {
368
+ const video = videoRef.current;
369
+ if (!video) return;
370
+ const handleLoadedMetadata = () => {
371
+ initWebGL();
372
+ };
373
+ const handlePlay = () => {
374
+ setIsPlaying(true);
375
+ animationRef.current = requestAnimationFrame(render);
376
+ };
377
+ const handlePause = () => {
378
+ setIsPlaying(false);
379
+ cancelAnimationFrame(animationRef.current);
380
+ };
381
+ const handleEnded = () => {
382
+ setIsPlaying(false);
383
+ cancelAnimationFrame(animationRef.current);
384
+ };
385
+ video.addEventListener("loadedmetadata", handleLoadedMetadata);
386
+ video.addEventListener("play", handlePlay);
387
+ video.addEventListener("pause", handlePause);
388
+ video.addEventListener("ended", handleEnded);
389
+ if (video.readyState >= 1) {
390
+ handleLoadedMetadata();
391
+ }
392
+ return () => {
393
+ video.removeEventListener("loadedmetadata", handleLoadedMetadata);
394
+ video.removeEventListener("play", handlePlay);
395
+ video.removeEventListener("pause", handlePause);
396
+ video.removeEventListener("ended", handleEnded);
397
+ cancelAnimationFrame(animationRef.current);
398
+ };
399
+ }, [initWebGL, render]);
400
+ useEffect(() => {
401
+ if (videoRef.current && videoRef.current.readyState >= 1) {
402
+ initWebGL();
403
+ }
404
+ }, [initWebGL]);
405
+ useEffect(() => {
406
+ return () => {
407
+ const gl = glRef.current;
408
+ if (gl) {
409
+ if (videoTextureRef.current) gl.deleteTexture(videoTextureRef.current);
410
+ if (atlasTextureRef.current) gl.deleteTexture(atlasTextureRef.current);
411
+ if (programRef.current) gl.deleteProgram(programRef.current);
412
+ }
413
+ cancelAnimationFrame(animationRef.current);
414
+ };
415
+ }, []);
416
+ const play = useCallback(() => {
417
+ var _a;
418
+ (_a = videoRef.current) == null ? void 0 : _a.play();
419
+ }, []);
420
+ const pause = useCallback(() => {
421
+ var _a;
422
+ (_a = videoRef.current) == null ? void 0 : _a.pause();
423
+ }, []);
424
+ const toggle = useCallback(() => {
425
+ const video = videoRef.current;
426
+ if (!video) return;
427
+ if (video.paused) {
428
+ video.play();
429
+ } else {
430
+ video.pause();
431
+ }
432
+ }, []);
433
+ useEffect(() => {
434
+ const handleKeyDown = (e) => {
435
+ if (e.code === "Space" && e.target === document.body) {
436
+ e.preventDefault();
437
+ toggle();
438
+ }
439
+ };
440
+ window.addEventListener("keydown", handleKeyDown);
441
+ return () => window.removeEventListener("keydown", handleKeyDown);
442
+ }, [toggle]);
443
+ return {
444
+ containerRef,
445
+ videoRef,
446
+ canvasRef,
447
+ glRef,
448
+ programRef,
449
+ uniformLocationsRef,
450
+ registerUniformSetter,
451
+ unregisterUniformSetter,
452
+ dimensions,
453
+ stats,
454
+ isReady,
455
+ isPlaying,
456
+ play,
457
+ pause,
458
+ toggle
459
+ };
460
+ }
461
+
462
+ // src/hooks/useAsciiMouseEffect.ts
463
+ import { useCallback as useCallback2, useEffect as useEffect2, useRef as useRef2 } from "react";
464
+ var MAX_TRAIL_LENGTH2 = 24;
465
+ function useAsciiMouseEffect(ascii, options = {}) {
466
+ const { enabled = true, trailLength = 24 } = options;
467
+ const mouseRef = useRef2({ x: -1, y: -1 });
468
+ const trailRef = useRef2([]);
469
+ const enabledRef = useRef2(enabled);
470
+ const trailLengthRef = useRef2(trailLength);
471
+ useEffect2(() => {
472
+ enabledRef.current = enabled;
473
+ trailLengthRef.current = trailLength;
474
+ }, [enabled, trailLength]);
475
+ useEffect2(() => {
476
+ if (!enabled) return;
477
+ const uniformSetter = (gl, _program, locations) => {
478
+ gl.uniform2f(locations.u_mouse, mouseRef.current.x, mouseRef.current.y);
479
+ const trail = trailRef.current;
480
+ gl.uniform1i(locations.u_trailLength, trail.length);
481
+ for (let i = 0; i < MAX_TRAIL_LENGTH2; i++) {
482
+ const loc = locations.u_trail[i];
483
+ if (loc) {
484
+ const pos = trail[i] || { x: -1, y: -1 };
485
+ gl.uniform2f(loc, pos.x, pos.y);
486
+ }
487
+ }
488
+ };
489
+ ascii.registerUniformSetter("mouse", uniformSetter);
490
+ return () => {
491
+ ascii.unregisterUniformSetter("mouse");
492
+ };
493
+ }, [ascii, enabled]);
494
+ const onMouseMove = useCallback2((e) => {
495
+ if (!enabledRef.current) return;
496
+ const rect = e.currentTarget.getBoundingClientRect();
497
+ const newPos = {
498
+ // Convert pixel coords to 0-1 range
499
+ x: (e.clientX - rect.left) / rect.width,
500
+ y: (e.clientY - rect.top) / rect.height
501
+ };
502
+ if (mouseRef.current.x >= 0) {
503
+ trailRef.current.unshift(__spreadValues({}, mouseRef.current));
504
+ if (trailRef.current.length > trailLengthRef.current) {
505
+ trailRef.current.pop();
506
+ }
507
+ }
508
+ mouseRef.current = newPos;
509
+ }, []);
510
+ const onMouseLeave = useCallback2(() => {
511
+ mouseRef.current = { x: -1, y: -1 };
512
+ trailRef.current = [];
513
+ }, []);
514
+ return { onMouseMove, onMouseLeave };
515
+ }
516
+
517
+ // src/hooks/useAsciiRipple.ts
518
+ import { useCallback as useCallback3, useEffect as useEffect3, useRef as useRef3 } from "react";
519
+ var MAX_RIPPLES2 = 8;
520
+ function useAsciiRipple(ascii, options = {}) {
521
+ const { enabled = false, speed = 40 } = options;
522
+ const ripplesRef = useRef3([]);
523
+ const enabledRef = useRef3(enabled);
524
+ const speedRef = useRef3(speed);
525
+ useEffect3(() => {
526
+ enabledRef.current = enabled;
527
+ speedRef.current = speed;
528
+ }, [enabled, speed]);
529
+ useEffect3(() => {
530
+ if (!enabled) return;
531
+ const uniformSetter = (gl, _program, locations) => {
532
+ const currentTime = performance.now() / 1e3;
533
+ gl.uniform1f(locations.u_time, currentTime);
534
+ gl.uniform1f(locations.u_rippleEnabled, 1);
535
+ gl.uniform1f(locations.u_rippleSpeed, speedRef.current);
536
+ const maxDist = Math.sqrt(
537
+ ascii.dimensions.cols ** 2 + ascii.dimensions.rows ** 2
538
+ );
539
+ const maxLifetime = maxDist / speedRef.current + 1;
540
+ ripplesRef.current = ripplesRef.current.filter(
541
+ (r) => currentTime - r.startTime < maxLifetime
542
+ );
543
+ for (let i = 0; i < MAX_RIPPLES2; i++) {
544
+ const loc = locations.u_ripples[i];
545
+ if (loc) {
546
+ const ripple = ripplesRef.current[i];
547
+ if (ripple) {
548
+ gl.uniform4f(loc, ripple.x, ripple.y, ripple.startTime, 1);
549
+ } else {
550
+ gl.uniform4f(loc, 0, 0, 0, 0);
551
+ }
552
+ }
553
+ }
554
+ };
555
+ ascii.registerUniformSetter("ripple", uniformSetter);
556
+ return () => {
557
+ ascii.unregisterUniformSetter("ripple");
558
+ };
559
+ }, [ascii, enabled]);
560
+ const onClick = useCallback3((e) => {
561
+ if (!enabledRef.current) return;
562
+ const rect = e.currentTarget.getBoundingClientRect();
563
+ const x = (e.clientX - rect.left) / rect.width;
564
+ const y = (e.clientY - rect.top) / rect.height;
565
+ ripplesRef.current.unshift({
566
+ x,
567
+ y,
568
+ startTime: performance.now() / 1e3
569
+ });
570
+ if (ripplesRef.current.length > MAX_RIPPLES2) {
571
+ ripplesRef.current.pop();
572
+ }
573
+ }, []);
574
+ return { onClick };
575
+ }
576
+
577
+ // src/hooks/useAsciiAudio.ts
578
+ import { useEffect as useEffect4, useRef as useRef4 } from "react";
579
+ function useAsciiAudio(ascii, options = {}) {
580
+ const { enabled = false, reactivity = 50, sensitivity = 50 } = options;
581
+ const audioContextRef = useRef4(null);
582
+ const analyzerRef = useRef4(null);
583
+ const sourceRef = useRef4(null);
584
+ const dataArrayRef = useRef4(null);
585
+ const volumeRef = useRef4(0);
586
+ const connectedVideoRef = useRef4(null);
587
+ const enabledRef = useRef4(enabled);
588
+ const reactivityRef = useRef4(reactivity);
589
+ const sensitivityRef = useRef4(sensitivity);
590
+ useEffect4(() => {
591
+ enabledRef.current = enabled;
592
+ reactivityRef.current = reactivity;
593
+ sensitivityRef.current = sensitivity;
594
+ }, [enabled, reactivity, sensitivity]);
595
+ const updateVolume = () => {
596
+ const analyzer = analyzerRef.current;
597
+ const dataArray = dataArrayRef.current;
598
+ if (!analyzer || !dataArray) return;
599
+ analyzer.getByteFrequencyData(dataArray);
600
+ let sum = 0;
601
+ for (let i = 0; i < dataArray.length; i++) {
602
+ sum += dataArray[i];
603
+ }
604
+ const average = sum / dataArray.length / 255;
605
+ volumeRef.current = volumeRef.current * 0.7 + average * 0.3;
606
+ };
607
+ useEffect4(() => {
608
+ if (!enabled) return;
609
+ const video = ascii.videoRef.current;
610
+ if (!video) return;
611
+ const connectAudio = () => {
612
+ if (connectedVideoRef.current === video && audioContextRef.current) {
613
+ audioContextRef.current.resume();
614
+ return;
615
+ }
616
+ try {
617
+ if (!audioContextRef.current) {
618
+ audioContextRef.current = new AudioContext();
619
+ }
620
+ const ctx = audioContextRef.current;
621
+ const analyzer = ctx.createAnalyser();
622
+ analyzer.fftSize = 256;
623
+ analyzer.smoothingTimeConstant = 0.8;
624
+ analyzerRef.current = analyzer;
625
+ dataArrayRef.current = new Uint8Array(
626
+ analyzer.frequencyBinCount
627
+ );
628
+ const source = ctx.createMediaElementSource(video);
629
+ source.connect(analyzer);
630
+ analyzer.connect(ctx.destination);
631
+ sourceRef.current = source;
632
+ connectedVideoRef.current = video;
633
+ ctx.resume();
634
+ } catch (error) {
635
+ console.warn("Failed to connect audio analyzer:", error);
636
+ }
637
+ };
638
+ const handlePlay = () => {
639
+ connectAudio();
640
+ };
641
+ video.addEventListener("play", handlePlay);
642
+ if (!video.paused) {
643
+ connectAudio();
644
+ }
645
+ return () => {
646
+ video.removeEventListener("play", handlePlay);
647
+ };
648
+ }, [ascii.videoRef, enabled]);
649
+ useEffect4(() => {
650
+ if (!enabled) return;
651
+ const uniformSetter = (gl, _program, locations) => {
652
+ updateVolume();
653
+ gl.uniform1f(locations.u_audioLevel, volumeRef.current);
654
+ gl.uniform1f(locations.u_audioReactivity, reactivityRef.current / 100);
655
+ gl.uniform1f(locations.u_audioSensitivity, sensitivityRef.current / 100);
656
+ };
657
+ ascii.registerUniformSetter("audio", uniformSetter);
658
+ return () => {
659
+ ascii.unregisterUniformSetter("audio");
660
+ };
661
+ }, [ascii, enabled]);
662
+ useEffect4(() => {
663
+ return () => {
664
+ if (audioContextRef.current) {
665
+ audioContextRef.current.close();
666
+ }
667
+ };
668
+ }, []);
669
+ }
670
+
671
+ // src/components/VideoToAscii.tsx
672
+ import { jsx, jsxs } from "react/jsx-runtime";
673
+ function VideoToAscii({
674
+ src,
675
+ fontSize = 10,
676
+ colored = false,
677
+ blend = 0,
678
+ highlight = 0,
679
+ charset = "standard",
680
+ maxWidth = 900,
681
+ enableMouse = true,
682
+ trailLength = 24,
683
+ enableRipple = false,
684
+ rippleSpeed = 40,
685
+ audioReactivity = 0,
686
+ audioSensitivity = 50,
687
+ showStats = false,
688
+ className = ""
689
+ }) {
690
+ const ascii = useVideoToAscii({
691
+ fontSize,
692
+ colored,
693
+ blend,
694
+ highlight,
695
+ charset,
696
+ maxWidth
697
+ });
698
+ const {
699
+ containerRef,
700
+ videoRef,
701
+ canvasRef,
702
+ stats,
703
+ dimensions,
704
+ isReady,
705
+ isPlaying
706
+ } = ascii;
707
+ const mouseHandlers = useAsciiMouseEffect(ascii, {
708
+ enabled: enableMouse,
709
+ trailLength
710
+ });
711
+ const rippleHandlers = useAsciiRipple(ascii, {
712
+ enabled: enableRipple,
713
+ speed: rippleSpeed
714
+ });
715
+ useAsciiAudio(ascii, {
716
+ enabled: audioReactivity > 0,
717
+ reactivity: audioReactivity,
718
+ sensitivity: audioSensitivity
719
+ });
720
+ const charWidth = fontSize * CHAR_WIDTH_RATIO;
721
+ const pixelWidth = dimensions.cols * charWidth;
722
+ const pixelHeight = dimensions.rows * fontSize;
723
+ return /* @__PURE__ */ jsxs("div", { className: `video-to-ascii ${className}`, children: [
724
+ /* @__PURE__ */ jsx(
725
+ "video",
726
+ {
727
+ ref: videoRef,
728
+ src,
729
+ muted: audioReactivity === 0,
730
+ loop: true,
731
+ playsInline: true,
732
+ crossOrigin: "anonymous",
733
+ style: { display: "none" }
734
+ }
735
+ ),
736
+ /* @__PURE__ */ jsxs(
737
+ "div",
738
+ __spreadProps(__spreadValues(__spreadValues({
739
+ ref: containerRef,
740
+ className: "relative cursor-pointer select-none overflow-hidden rounded",
741
+ style: {
742
+ width: pixelWidth || "100%",
743
+ height: pixelHeight || "auto",
744
+ backgroundColor: "#000"
745
+ }
746
+ }, enableMouse ? mouseHandlers : {}), enableRipple ? rippleHandlers : {}), {
747
+ children: [
748
+ /* @__PURE__ */ jsx(
749
+ "canvas",
750
+ {
751
+ ref: canvasRef,
752
+ style: {
753
+ width: "100%",
754
+ height: "100%",
755
+ display: "block"
756
+ }
757
+ }
758
+ ),
759
+ showStats && isReady && /* @__PURE__ */ jsxs("div", { className: "absolute top-2 left-2 bg-black/70 text-green-400 px-2 py-1 text-xs font-mono rounded", children: [
760
+ stats.fps,
761
+ " FPS | ",
762
+ stats.frameTime.toFixed(2),
763
+ "ms | ",
764
+ dimensions.cols,
765
+ "\xD7",
766
+ dimensions.rows
767
+ ] }),
768
+ !isPlaying && isReady && /* @__PURE__ */ jsx("div", { className: "absolute inset-0 flex items-center justify-center bg-black/50", children: /* @__PURE__ */ jsx("div", { className: "text-white text-lg", children: "\u25B6 Press Space to Play" }) })
769
+ ]
770
+ })
771
+ )
772
+ ] });
773
+ }
774
+ export {
775
+ ASCII_CHARSETS,
776
+ VideoToAscii
777
+ };
778
+ //# sourceMappingURL=index.mjs.map