video2ascii 1.0.1 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -20
- package/dist/index.d.mts +11 -15
- package/dist/index.d.ts +11 -15
- package/dist/index.js +99 -49
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +98 -48
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -13,37 +13,44 @@ npm install video2ascii
|
|
|
13
13
|
## Usage
|
|
14
14
|
|
|
15
15
|
```tsx
|
|
16
|
-
import
|
|
16
|
+
import Video2Ascii from "video2ascii";
|
|
17
17
|
|
|
18
|
-
<
|
|
18
|
+
<Video2Ascii
|
|
19
19
|
src="/video.mp4"
|
|
20
|
-
|
|
20
|
+
numColumns={120}
|
|
21
21
|
colored={true}
|
|
22
|
-
|
|
22
|
+
brightness={1.0}
|
|
23
|
+
audioEffect={50}
|
|
23
24
|
enableMouse={true}
|
|
24
25
|
enableRipple={true}
|
|
25
26
|
charset="detailed"
|
|
27
|
+
isPlaying={true}
|
|
28
|
+
autoPlay={true}
|
|
26
29
|
/>;
|
|
27
30
|
```
|
|
28
31
|
|
|
29
32
|
## Props
|
|
30
33
|
|
|
31
|
-
| Prop
|
|
32
|
-
|
|
|
33
|
-
| `src`
|
|
34
|
-
| `
|
|
35
|
-
| `colored`
|
|
36
|
-
| `
|
|
37
|
-
| `
|
|
38
|
-
| `
|
|
39
|
-
| `
|
|
40
|
-
| `enableMouse`
|
|
41
|
-
| `trailLength`
|
|
42
|
-
| `enableRipple`
|
|
43
|
-
| `rippleSpeed`
|
|
44
|
-
| `
|
|
45
|
-
| `
|
|
46
|
-
| `
|
|
34
|
+
| Prop | Type | Default | Description |
|
|
35
|
+
| ---------------------- | ------------ | ------------ | ------------------------------------------------- |
|
|
36
|
+
| `src` | `string` | required | Video URL |
|
|
37
|
+
| `numColumns` | `number` | - | Number of columns (controls size) |
|
|
38
|
+
| `colored` | `boolean` | `true` | Use video colors vs green terminal |
|
|
39
|
+
| `brightness` | `number` | `1.0` | Brightness multiplier (0-2, 1.0 = normal) |
|
|
40
|
+
| `blend` | `number` | `0` | 0 = ASCII, 100 = original video |
|
|
41
|
+
| `highlight` | `number` | `0` | Background behind characters (0-100) |
|
|
42
|
+
| `charset` | `CharsetKey` | `"standard"` | Character set |
|
|
43
|
+
| `enableMouse` | `boolean` | `true` | Cursor glow effect |
|
|
44
|
+
| `trailLength` | `number` | `24` | Mouse trail length |
|
|
45
|
+
| `enableRipple` | `boolean` | `false` | Click ripple effect |
|
|
46
|
+
| `rippleSpeed` | `number` | `40` | Ripple expansion speed |
|
|
47
|
+
| `audioEffect` | `number` | `0` | How much audio affects brightness (0-100) |
|
|
48
|
+
| `audioRange` | `number` | `50` | How dramatic audio brightness changes are (0-100) |
|
|
49
|
+
| `isPlaying` | `boolean` | `true` | Whether video is playing |
|
|
50
|
+
| `autoPlay` | `boolean` | `true` | Auto-play on load |
|
|
51
|
+
| `enableSpacebarToggle` | `boolean` | `false` | Enable spacebar to toggle play/pause |
|
|
52
|
+
| `showStats` | `boolean` | `false` | Show FPS overlay |
|
|
53
|
+
| `className` | `string` | `""` | CSS class name |
|
|
47
54
|
|
|
48
55
|
## Character Sets
|
|
49
56
|
|
package/dist/index.d.mts
CHANGED
|
@@ -57,31 +57,27 @@ declare const ASCII_CHARSETS: {
|
|
|
57
57
|
/** Type-safe key for selecting character sets */
|
|
58
58
|
type CharsetKey = keyof typeof ASCII_CHARSETS;
|
|
59
59
|
|
|
60
|
-
interface
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
}
|
|
64
|
-
interface UseVideoToAsciiOptions {
|
|
65
|
-
fontSize?: number;
|
|
60
|
+
interface VideoToAsciiProps {
|
|
61
|
+
src: string;
|
|
62
|
+
numColumns?: number;
|
|
66
63
|
colored?: boolean;
|
|
67
64
|
blend?: number;
|
|
68
65
|
highlight?: number;
|
|
66
|
+
brightness?: number;
|
|
69
67
|
charset?: CharsetKey;
|
|
70
|
-
maxWidth?: number;
|
|
71
|
-
onStats?: (stats: AsciiStats) => void;
|
|
72
|
-
}
|
|
73
|
-
interface VideoToAsciiProps extends UseVideoToAsciiOptions {
|
|
74
|
-
src: string;
|
|
75
68
|
enableMouse?: boolean;
|
|
76
69
|
trailLength?: number;
|
|
77
70
|
enableRipple?: boolean;
|
|
78
71
|
rippleSpeed?: number;
|
|
79
|
-
|
|
80
|
-
|
|
72
|
+
audioEffect?: number;
|
|
73
|
+
audioRange?: number;
|
|
74
|
+
isPlaying?: boolean;
|
|
75
|
+
autoPlay?: boolean;
|
|
76
|
+
enableSpacebarToggle?: boolean;
|
|
81
77
|
showStats?: boolean;
|
|
82
78
|
className?: string;
|
|
83
79
|
}
|
|
84
80
|
|
|
85
|
-
declare function
|
|
81
|
+
declare function Video2Ascii({ src, numColumns, colored, blend, highlight, brightness, charset, enableMouse, trailLength, enableRipple, rippleSpeed, audioEffect, audioRange, isPlaying, autoPlay, enableSpacebarToggle, showStats, className, }: VideoToAsciiProps): react_jsx_runtime.JSX.Element;
|
|
86
82
|
|
|
87
|
-
export { ASCII_CHARSETS, type CharsetKey,
|
|
83
|
+
export { ASCII_CHARSETS, type CharsetKey, Video2Ascii, type VideoToAsciiProps, Video2Ascii as default };
|
package/dist/index.d.ts
CHANGED
|
@@ -57,31 +57,27 @@ declare const ASCII_CHARSETS: {
|
|
|
57
57
|
/** Type-safe key for selecting character sets */
|
|
58
58
|
type CharsetKey = keyof typeof ASCII_CHARSETS;
|
|
59
59
|
|
|
60
|
-
interface
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
}
|
|
64
|
-
interface UseVideoToAsciiOptions {
|
|
65
|
-
fontSize?: number;
|
|
60
|
+
interface VideoToAsciiProps {
|
|
61
|
+
src: string;
|
|
62
|
+
numColumns?: number;
|
|
66
63
|
colored?: boolean;
|
|
67
64
|
blend?: number;
|
|
68
65
|
highlight?: number;
|
|
66
|
+
brightness?: number;
|
|
69
67
|
charset?: CharsetKey;
|
|
70
|
-
maxWidth?: number;
|
|
71
|
-
onStats?: (stats: AsciiStats) => void;
|
|
72
|
-
}
|
|
73
|
-
interface VideoToAsciiProps extends UseVideoToAsciiOptions {
|
|
74
|
-
src: string;
|
|
75
68
|
enableMouse?: boolean;
|
|
76
69
|
trailLength?: number;
|
|
77
70
|
enableRipple?: boolean;
|
|
78
71
|
rippleSpeed?: number;
|
|
79
|
-
|
|
80
|
-
|
|
72
|
+
audioEffect?: number;
|
|
73
|
+
audioRange?: number;
|
|
74
|
+
isPlaying?: boolean;
|
|
75
|
+
autoPlay?: boolean;
|
|
76
|
+
enableSpacebarToggle?: boolean;
|
|
81
77
|
showStats?: boolean;
|
|
82
78
|
className?: string;
|
|
83
79
|
}
|
|
84
80
|
|
|
85
|
-
declare function
|
|
81
|
+
declare function Video2Ascii({ src, numColumns, colored, blend, highlight, brightness, charset, enableMouse, trailLength, enableRipple, rippleSpeed, audioEffect, audioRange, isPlaying, autoPlay, enableSpacebarToggle, showStats, className, }: VideoToAsciiProps): react_jsx_runtime.JSX.Element;
|
|
86
82
|
|
|
87
|
-
export { ASCII_CHARSETS, type CharsetKey,
|
|
83
|
+
export { ASCII_CHARSETS, type CharsetKey, Video2Ascii, type VideoToAsciiProps, Video2Ascii as default };
|
package/dist/index.js
CHANGED
|
@@ -38,10 +38,14 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
38
38
|
var index_exports = {};
|
|
39
39
|
__export(index_exports, {
|
|
40
40
|
ASCII_CHARSETS: () => ASCII_CHARSETS,
|
|
41
|
-
|
|
41
|
+
Video2Ascii: () => Video2Ascii,
|
|
42
|
+
default: () => VideoToAscii_default
|
|
42
43
|
});
|
|
43
44
|
module.exports = __toCommonJS(index_exports);
|
|
44
45
|
|
|
46
|
+
// src/components/VideoToAscii.tsx
|
|
47
|
+
var import_react5 = require("react");
|
|
48
|
+
|
|
45
49
|
// src/hooks/useVideoToAscii.ts
|
|
46
50
|
var import_react = require("react");
|
|
47
51
|
|
|
@@ -97,7 +101,7 @@ function getCharArray(charset) {
|
|
|
97
101
|
var vertex_default = "#version 300 es\n\n// Fullscreen quad - passes texture coords to fragment shader\n\nin vec2 a_position;\nin vec2 a_texCoord;\nout vec2 v_texCoord;\n\nvoid main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n}\n";
|
|
98
102
|
|
|
99
103
|
// src/lib/webgl/shaders/fragment.glsl
|
|
100
|
-
var fragment_default = "#version 300 es\nprecision highp float;\n\n// Textures\nuniform sampler2D u_video;\nuniform sampler2D u_asciiAtlas;\n\n// Dimensions\nuniform vec2 u_resolution;\nuniform vec2 u_charSize;\nuniform vec2 u_gridSize;\nuniform float u_numChars;\n\n// Rendering options\nuniform bool u_colored;\nuniform float u_blend;\nuniform float u_highlight;\n\n// Audio\nuniform float u_audioLevel;\nuniform float u_audioReactivity;\nuniform float u_audioSensitivity;\n\n// Mouse\nuniform vec2 u_mouse;\nuniform float u_mouseRadius;\nuniform vec2 u_trail[24];\nuniform int u_trailLength;\n\n// Ripple\nuniform vec4 u_ripples[8];\nuniform float u_time;\nuniform float u_rippleEnabled;\nuniform float u_rippleSpeed;\n\nin vec2 v_texCoord;\nout vec4 fragColor;\n\nvoid main() {\n // Figure out which ASCII cell this pixel is in\n vec2 cellCoord = floor(v_texCoord * u_gridSize);\n vec2 thisCell = cellCoord;\n \n // Sample video at cell center (mipmaps handle averaging)\n vec2 cellCenter = (cellCoord + 0.5) / u_gridSize;\n vec4 videoColor = texture(u_video, cellCenter);\n \n // Perceived brightness using human eye sensitivity weights\n float baseBrightness = dot(videoColor.rgb, vec3(0.299, 0.587, 0.114));\n \n // Audio reactivity - louder = brighter, silence = darker\n float minBrightness = mix(0.3, 0.0, u_audioSensitivity);\n float maxBrightness = mix(1.0, 5.0, u_audioSensitivity);\n float audioMultiplier = mix(minBrightness, maxBrightness, u_audioLevel);\n float audioModulated = baseBrightness * audioMultiplier;\n float brightness = mix(baseBrightness, audioModulated, u_audioReactivity);\n \n // Cursor glow - blocky circle effect\n float cursorGlow = 0.0;\n float cursorRadius = 5.0;\n \n vec2 mouseCell = floor(u_mouse * u_gridSize);\n float cellDist = length(thisCell - mouseCell);\n if (cellDist <= cursorRadius && u_mouse.x >= 0.0) {\n cursorGlow += 1.0 - cellDist / cursorRadius;\n }\n \n // Trail effect\n for (int i = 0; i < 12; i++) {\n if (i >= u_trailLength) break;\n vec2 trailPos = u_trail[i];\n if (trailPos.x < 0.0) continue;\n \n vec2 trailCell = floor(trailPos * u_gridSize);\n float trailDist = length(thisCell - trailCell);\n float trailRadius = cursorRadius * 0.8;\n \n if (trailDist <= trailRadius) {\n float fade = 1.0 - float(i) / float(u_trailLength);\n cursorGlow += (1.0 - trailDist / trailRadius) * 0.5 * fade;\n }\n }\n cursorGlow = min(cursorGlow, 1.0);\n \n // Ripple effect - expanding rings on click\n float rippleGlow = 0.0;\n if (u_rippleEnabled > 0.5) {\n for (int i = 0; i < 8; i++) {\n vec4 ripple = u_ripples[i];\n if (ripple.w < 0.5) continue;\n \n float age = u_time - ripple.z;\n if (age < 0.0) continue;\n \n vec2 rippleCell = floor(ripple.xy * u_gridSize);\n float cellDist = length(thisCell - rippleCell);\n float initialRadius = 5.0;\n \n float distFromEdge = max(0.0, cellDist - initialRadius);\n float rippleSpeed = u_rippleSpeed;\n float reachTime = distFromEdge / rippleSpeed;\n float timeSinceReached = age - reachTime;\n \n float fadeDuration = 0.5;\n if (timeSinceReached >= 0.0 && timeSinceReached < fadeDuration) {\n float pop = 1.0 - timeSinceReached / fadeDuration;\n pop = pop * pop;\n rippleGlow += pop * 0.3;\n }\n }\n rippleGlow = min(rippleGlow, 1.0);\n }\n \n // Map brightness to character index (0 = darkest char, numChars-1 = brightest)\n float charIndex = floor(
|
|
104
|
+
var fragment_default = "#version 300 es\nprecision highp float;\n\n// Textures\nuniform sampler2D u_video;\nuniform sampler2D u_asciiAtlas;\n\n// Dimensions\nuniform vec2 u_resolution;\nuniform vec2 u_charSize;\nuniform vec2 u_gridSize;\nuniform float u_numChars;\n\n// Rendering options\nuniform bool u_colored;\nuniform float u_blend;\nuniform float u_highlight;\nuniform float u_brightness;\n\n// Audio\nuniform float u_audioLevel;\nuniform float u_audioReactivity;\nuniform float u_audioSensitivity;\n\n// Mouse\nuniform vec2 u_mouse;\nuniform float u_mouseRadius;\nuniform vec2 u_trail[24];\nuniform int u_trailLength;\n\n// Ripple\nuniform vec4 u_ripples[8];\nuniform float u_time;\nuniform float u_rippleEnabled;\nuniform float u_rippleSpeed;\n\nin vec2 v_texCoord;\nout vec4 fragColor;\n\nvoid main() {\n // Figure out which ASCII cell this pixel is in\n vec2 cellCoord = floor(v_texCoord * u_gridSize);\n vec2 thisCell = cellCoord;\n \n // Sample video at cell center (mipmaps handle averaging)\n vec2 cellCenter = (cellCoord + 0.5) / u_gridSize;\n vec4 videoColor = texture(u_video, cellCenter);\n \n // Perceived brightness using human eye sensitivity weights\n float baseBrightness = dot(videoColor.rgb, vec3(0.299, 0.587, 0.114));\n \n // Audio reactivity - louder = brighter, silence = darker\n float minBrightness = mix(0.3, 0.0, u_audioSensitivity);\n float maxBrightness = mix(1.0, 5.0, u_audioSensitivity);\n float audioMultiplier = mix(minBrightness, maxBrightness, u_audioLevel);\n float audioModulated = baseBrightness * audioMultiplier;\n float brightness = mix(baseBrightness, audioModulated, u_audioReactivity);\n \n // Cursor glow - blocky circle effect\n float cursorGlow = 0.0;\n float cursorRadius = 5.0;\n \n vec2 mouseCell = floor(u_mouse * u_gridSize);\n float cellDist = length(thisCell - mouseCell);\n if (cellDist <= cursorRadius && u_mouse.x >= 0.0) {\n cursorGlow += 1.0 - cellDist / cursorRadius;\n }\n \n // Trail effect\n for (int i = 0; i < 12; i++) {\n if (i >= u_trailLength) break;\n vec2 trailPos = u_trail[i];\n if (trailPos.x < 0.0) continue;\n \n vec2 trailCell = floor(trailPos * u_gridSize);\n float trailDist = length(thisCell - trailCell);\n float trailRadius = cursorRadius * 0.8;\n \n if (trailDist <= trailRadius) {\n float fade = 1.0 - float(i) / float(u_trailLength);\n cursorGlow += (1.0 - trailDist / trailRadius) * 0.5 * fade;\n }\n }\n cursorGlow = min(cursorGlow, 1.0);\n \n // Ripple effect - expanding rings on click\n float rippleGlow = 0.0;\n if (u_rippleEnabled > 0.5) {\n for (int i = 0; i < 8; i++) {\n vec4 ripple = u_ripples[i];\n if (ripple.w < 0.5) continue;\n \n float age = u_time - ripple.z;\n if (age < 0.0) continue;\n \n vec2 rippleCell = floor(ripple.xy * u_gridSize);\n float cellDist = length(thisCell - rippleCell);\n float initialRadius = 5.0;\n \n float distFromEdge = max(0.0, cellDist - initialRadius);\n float rippleSpeed = u_rippleSpeed;\n float reachTime = distFromEdge / rippleSpeed;\n float timeSinceReached = age - reachTime;\n \n float fadeDuration = 0.5;\n if (timeSinceReached >= 0.0 && timeSinceReached < fadeDuration) {\n float pop = 1.0 - timeSinceReached / fadeDuration;\n pop = pop * pop;\n rippleGlow += pop * 0.3;\n }\n }\n rippleGlow = min(rippleGlow, 1.0);\n }\n \n // Apply brightness multiplier\n // brightness < 1.0: darkens (multiply)\n // brightness > 1.0: brightens (compress dark values toward 1.0)\n float adjustedBrightness;\n if (u_brightness <= 1.0) {\n adjustedBrightness = brightness * u_brightness;\n } else {\n // For brightness > 1.0, compress the range: dark values get pushed up\n // Formula: 1.0 - (1.0 - brightness) / u_brightness\n // This makes dark values brighter while keeping bright values near 1.0\n adjustedBrightness = 1.0 - (1.0 - brightness) / u_brightness;\n }\n adjustedBrightness = clamp(adjustedBrightness, 0.0, 1.0);\n \n // Map brightness to character index (0 = darkest char, numChars-1 = brightest)\n float charIndex = floor(adjustedBrightness * (u_numChars - 0.001));\n \n // Find the character in the atlas (horizontal strip of pre-rendered chars)\n float atlasX = charIndex / u_numChars;\n vec2 cellPos = fract(v_texCoord * u_gridSize);\n vec2 atlasCoord = vec2(atlasX + cellPos.x / u_numChars, cellPos.y);\n vec4 charColor = texture(u_asciiAtlas, atlasCoord);\n \n // Pick the color - video colors or green terminal aesthetic\n vec3 baseColor;\n if (u_colored) {\n baseColor = videoColor.rgb;\n } else {\n baseColor = vec3(0.0, 1.0, 0.0);\n }\n \n // Background highlight behind each character\n float bgIntensity = 0.15 + u_highlight * 0.35;\n vec3 bgColor = baseColor * bgIntensity;\n vec3 textColor = baseColor * 1.2;\n vec3 finalColor = mix(bgColor, textColor, charColor.r);\n \n // Add cursor and ripple glow\n finalColor += cursorGlow * baseColor * 0.5;\n finalColor += rippleGlow * baseColor;\n \n // Blend with original video if requested\n vec3 blendedColor = mix(finalColor, videoColor.rgb, u_blend);\n \n fragColor = vec4(blendedColor, 1.0);\n}\n";
|
|
101
105
|
|
|
102
106
|
// src/lib/webgl/utils.ts
|
|
103
107
|
function compileShader(gl, source, type) {
|
|
@@ -221,12 +225,15 @@ var MAX_TRAIL_LENGTH = 24;
|
|
|
221
225
|
var MAX_RIPPLES = 8;
|
|
222
226
|
function useVideoToAscii(options = {}) {
|
|
223
227
|
const {
|
|
224
|
-
fontSize
|
|
225
|
-
|
|
228
|
+
fontSize,
|
|
229
|
+
numColumns,
|
|
230
|
+
colored = true,
|
|
226
231
|
blend = 0,
|
|
227
232
|
highlight = 0,
|
|
233
|
+
brightness = 1,
|
|
228
234
|
charset = DEFAULT_CHARSET,
|
|
229
|
-
maxWidth
|
|
235
|
+
maxWidth,
|
|
236
|
+
enableSpacebarToggle = false,
|
|
230
237
|
onStats
|
|
231
238
|
} = options;
|
|
232
239
|
const containerRef = (0, import_react.useRef)(null);
|
|
@@ -246,8 +253,12 @@ function useVideoToAscii(options = {}) {
|
|
|
246
253
|
const [stats, setStats] = (0, import_react.useState)({ fps: 0, frameTime: 0 });
|
|
247
254
|
const [isReady, setIsReady] = (0, import_react.useState)(false);
|
|
248
255
|
const [isPlaying, setIsPlaying] = (0, import_react.useState)(false);
|
|
249
|
-
const
|
|
250
|
-
const
|
|
256
|
+
const defaultWidth = typeof window !== "undefined" ? window.innerWidth : 900;
|
|
257
|
+
const containerWidth = maxWidth || defaultWidth;
|
|
258
|
+
const calculatedFontSize = numColumns ? containerWidth / (numColumns * CHAR_WIDTH_RATIO) : fontSize || 10;
|
|
259
|
+
const calculatedMaxWidth = numColumns ? numColumns * calculatedFontSize * CHAR_WIDTH_RATIO : maxWidth || 900;
|
|
260
|
+
const charWidth = calculatedFontSize * CHAR_WIDTH_RATIO;
|
|
261
|
+
const cols = numColumns || Math.floor(calculatedMaxWidth / charWidth);
|
|
251
262
|
const chars = (0, import_react.useMemo)(() => getCharArray(charset), [charset]);
|
|
252
263
|
const registerUniformSetter = (0, import_react.useCallback)(
|
|
253
264
|
(id, setter) => {
|
|
@@ -272,6 +283,7 @@ function useVideoToAscii(options = {}) {
|
|
|
272
283
|
u_colored: get("u_colored"),
|
|
273
284
|
u_blend: get("u_blend"),
|
|
274
285
|
u_highlight: get("u_highlight"),
|
|
286
|
+
u_brightness: get("u_brightness"),
|
|
275
287
|
// Mouse uniforms
|
|
276
288
|
u_mouse: get("u_mouse"),
|
|
277
289
|
u_mouseRadius: get("u_mouseRadius"),
|
|
@@ -299,15 +311,24 @@ function useVideoToAscii(options = {}) {
|
|
|
299
311
|
const initWebGL = (0, import_react.useCallback)(() => {
|
|
300
312
|
const canvas = canvasRef.current;
|
|
301
313
|
const video = videoRef.current;
|
|
314
|
+
const container = containerRef.current;
|
|
302
315
|
if (!canvas || !video || !video.videoWidth) return false;
|
|
316
|
+
let finalFontSize = calculatedFontSize;
|
|
317
|
+
let finalCols = cols;
|
|
318
|
+
if (numColumns && container) {
|
|
319
|
+
const actualWidth = container.clientWidth || defaultWidth;
|
|
320
|
+
finalFontSize = actualWidth / (numColumns * CHAR_WIDTH_RATIO);
|
|
321
|
+
finalCols = numColumns;
|
|
322
|
+
}
|
|
303
323
|
const grid = calculateGridDimensions(
|
|
304
324
|
video.videoWidth,
|
|
305
325
|
video.videoHeight,
|
|
306
|
-
|
|
326
|
+
finalCols
|
|
307
327
|
);
|
|
308
328
|
setDimensions(grid);
|
|
309
|
-
const
|
|
310
|
-
const
|
|
329
|
+
const finalCharWidth = finalFontSize * CHAR_WIDTH_RATIO;
|
|
330
|
+
const pixelWidth = grid.cols * finalCharWidth;
|
|
331
|
+
const pixelHeight = grid.rows * finalFontSize;
|
|
311
332
|
canvas.width = pixelWidth;
|
|
312
333
|
canvas.height = pixelHeight;
|
|
313
334
|
const gl = canvas.getContext("webgl2", {
|
|
@@ -332,15 +353,21 @@ function useVideoToAscii(options = {}) {
|
|
|
332
353
|
gl.useProgram(program);
|
|
333
354
|
createFullscreenQuad(gl, program);
|
|
334
355
|
videoTextureRef.current = createVideoTexture(gl);
|
|
335
|
-
|
|
356
|
+
const finalFontSizeForAtlas = numColumns && container ? (container.clientWidth || defaultWidth) / (numColumns * CHAR_WIDTH_RATIO) : calculatedFontSize;
|
|
357
|
+
atlasTextureRef.current = createAsciiAtlas(
|
|
358
|
+
gl,
|
|
359
|
+
chars,
|
|
360
|
+
finalFontSizeForAtlas
|
|
361
|
+
);
|
|
336
362
|
const locations = cacheUniformLocations(gl, program);
|
|
337
363
|
uniformLocationsRef.current = locations;
|
|
338
364
|
gl.uniform1i(locations.u_video, 0);
|
|
339
365
|
gl.uniform1i(locations.u_asciiAtlas, 1);
|
|
340
366
|
gl.uniform2f(locations.u_resolution, pixelWidth, pixelHeight);
|
|
341
|
-
gl.uniform2f(locations.u_charSize,
|
|
342
|
-
gl.uniform2f(locations.u_gridSize,
|
|
367
|
+
gl.uniform2f(locations.u_charSize, finalCharWidth, finalFontSize);
|
|
368
|
+
gl.uniform2f(locations.u_gridSize, finalCols, grid.rows);
|
|
343
369
|
gl.uniform1f(locations.u_numChars, chars.length);
|
|
370
|
+
gl.uniform1f(locations.u_brightness, brightness);
|
|
344
371
|
gl.uniform2f(locations.u_mouse, -1, -1);
|
|
345
372
|
gl.uniform1f(locations.u_mouseRadius, 0);
|
|
346
373
|
gl.uniform1i(locations.u_trailLength, 0);
|
|
@@ -351,7 +378,15 @@ function useVideoToAscii(options = {}) {
|
|
|
351
378
|
gl.viewport(0, 0, pixelWidth, pixelHeight);
|
|
352
379
|
setIsReady(true);
|
|
353
380
|
return true;
|
|
354
|
-
}, [
|
|
381
|
+
}, [
|
|
382
|
+
cols,
|
|
383
|
+
numColumns,
|
|
384
|
+
calculatedFontSize,
|
|
385
|
+
chars,
|
|
386
|
+
cacheUniformLocations,
|
|
387
|
+
brightness,
|
|
388
|
+
defaultWidth
|
|
389
|
+
]);
|
|
355
390
|
const render = (0, import_react.useCallback)(() => {
|
|
356
391
|
const gl = glRef.current;
|
|
357
392
|
const video = videoRef.current;
|
|
@@ -369,6 +404,7 @@ function useVideoToAscii(options = {}) {
|
|
|
369
404
|
gl.uniform1i(locations.u_colored, colored ? 1 : 0);
|
|
370
405
|
gl.uniform1f(locations.u_blend, blend / 100);
|
|
371
406
|
gl.uniform1f(locations.u_highlight, highlight / 100);
|
|
407
|
+
gl.uniform1f(locations.u_brightness, brightness);
|
|
372
408
|
for (const setter of uniformSettersRef.current.values()) {
|
|
373
409
|
setter(gl, program, locations);
|
|
374
410
|
}
|
|
@@ -387,7 +423,7 @@ function useVideoToAscii(options = {}) {
|
|
|
387
423
|
lastFpsTimeRef.current = now;
|
|
388
424
|
}
|
|
389
425
|
animationRef.current = requestAnimationFrame(render);
|
|
390
|
-
}, [colored, blend, highlight, onStats]);
|
|
426
|
+
}, [colored, blend, highlight, brightness, onStats]);
|
|
391
427
|
(0, import_react.useEffect)(() => {
|
|
392
428
|
const video = videoRef.current;
|
|
393
429
|
if (!video) return;
|
|
@@ -426,6 +462,19 @@ function useVideoToAscii(options = {}) {
|
|
|
426
462
|
initWebGL();
|
|
427
463
|
}
|
|
428
464
|
}, [initWebGL]);
|
|
465
|
+
(0, import_react.useEffect)(() => {
|
|
466
|
+
if (!numColumns || !containerRef.current) return;
|
|
467
|
+
const container = containerRef.current;
|
|
468
|
+
const resizeObserver = new ResizeObserver(() => {
|
|
469
|
+
if (videoRef.current && videoRef.current.readyState >= 1) {
|
|
470
|
+
initWebGL();
|
|
471
|
+
}
|
|
472
|
+
});
|
|
473
|
+
resizeObserver.observe(container);
|
|
474
|
+
return () => {
|
|
475
|
+
resizeObserver.disconnect();
|
|
476
|
+
};
|
|
477
|
+
}, [numColumns, initWebGL]);
|
|
429
478
|
(0, import_react.useEffect)(() => {
|
|
430
479
|
return () => {
|
|
431
480
|
const gl = glRef.current;
|
|
@@ -455,6 +504,7 @@ function useVideoToAscii(options = {}) {
|
|
|
455
504
|
}
|
|
456
505
|
}, []);
|
|
457
506
|
(0, import_react.useEffect)(() => {
|
|
507
|
+
if (!enableSpacebarToggle) return;
|
|
458
508
|
const handleKeyDown = (e) => {
|
|
459
509
|
if (e.code === "Space" && e.target === document.body) {
|
|
460
510
|
e.preventDefault();
|
|
@@ -463,7 +513,7 @@ function useVideoToAscii(options = {}) {
|
|
|
463
513
|
};
|
|
464
514
|
window.addEventListener("keydown", handleKeyDown);
|
|
465
515
|
return () => window.removeEventListener("keydown", handleKeyDown);
|
|
466
|
-
}, [toggle]);
|
|
516
|
+
}, [toggle, enableSpacebarToggle]);
|
|
467
517
|
return {
|
|
468
518
|
containerRef,
|
|
469
519
|
videoRef,
|
|
@@ -694,40 +744,36 @@ function useAsciiAudio(ascii, options = {}) {
|
|
|
694
744
|
|
|
695
745
|
// src/components/VideoToAscii.tsx
|
|
696
746
|
var import_jsx_runtime = require("react/jsx-runtime");
|
|
697
|
-
function
|
|
747
|
+
function Video2Ascii({
|
|
698
748
|
src,
|
|
699
|
-
|
|
700
|
-
colored =
|
|
749
|
+
numColumns,
|
|
750
|
+
colored = true,
|
|
701
751
|
blend = 0,
|
|
702
752
|
highlight = 0,
|
|
753
|
+
brightness = 1,
|
|
703
754
|
charset = "standard",
|
|
704
|
-
maxWidth = 900,
|
|
705
755
|
enableMouse = true,
|
|
706
756
|
trailLength = 24,
|
|
707
757
|
enableRipple = false,
|
|
708
758
|
rippleSpeed = 40,
|
|
709
|
-
|
|
710
|
-
|
|
759
|
+
audioEffect = 0,
|
|
760
|
+
audioRange = 50,
|
|
761
|
+
isPlaying = true,
|
|
762
|
+
autoPlay = true,
|
|
763
|
+
enableSpacebarToggle = false,
|
|
711
764
|
showStats = false,
|
|
712
765
|
className = ""
|
|
713
766
|
}) {
|
|
714
767
|
const ascii = useVideoToAscii({
|
|
715
|
-
|
|
768
|
+
numColumns,
|
|
716
769
|
colored,
|
|
717
770
|
blend,
|
|
718
771
|
highlight,
|
|
772
|
+
brightness,
|
|
719
773
|
charset,
|
|
720
|
-
|
|
774
|
+
enableSpacebarToggle
|
|
721
775
|
});
|
|
722
|
-
const {
|
|
723
|
-
containerRef,
|
|
724
|
-
videoRef,
|
|
725
|
-
canvasRef,
|
|
726
|
-
stats,
|
|
727
|
-
dimensions,
|
|
728
|
-
isReady,
|
|
729
|
-
isPlaying
|
|
730
|
-
} = ascii;
|
|
776
|
+
const { containerRef, videoRef, canvasRef, stats, dimensions, isReady } = ascii;
|
|
731
777
|
const mouseHandlers = useAsciiMouseEffect(ascii, {
|
|
732
778
|
enabled: enableMouse,
|
|
733
779
|
trailLength
|
|
@@ -737,20 +783,29 @@ function VideoToAscii({
|
|
|
737
783
|
speed: rippleSpeed
|
|
738
784
|
});
|
|
739
785
|
useAsciiAudio(ascii, {
|
|
740
|
-
enabled:
|
|
741
|
-
reactivity:
|
|
742
|
-
sensitivity:
|
|
786
|
+
enabled: audioEffect > 0,
|
|
787
|
+
reactivity: audioEffect,
|
|
788
|
+
sensitivity: audioRange
|
|
743
789
|
});
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
790
|
+
(0, import_react5.useEffect)(() => {
|
|
791
|
+
const video = videoRef.current;
|
|
792
|
+
if (!video) return;
|
|
793
|
+
if (isPlaying) {
|
|
794
|
+
if (autoPlay && isReady) {
|
|
795
|
+
video.play().catch(() => {
|
|
796
|
+
});
|
|
797
|
+
}
|
|
798
|
+
} else {
|
|
799
|
+
video.pause();
|
|
800
|
+
}
|
|
801
|
+
}, [isPlaying, autoPlay, isReady, videoRef]);
|
|
747
802
|
return /* @__PURE__ */ (0, import_jsx_runtime.jsxs)("div", { className: `video-to-ascii ${className}`, children: [
|
|
748
803
|
/* @__PURE__ */ (0, import_jsx_runtime.jsx)(
|
|
749
804
|
"video",
|
|
750
805
|
{
|
|
751
806
|
ref: videoRef,
|
|
752
807
|
src,
|
|
753
|
-
muted:
|
|
808
|
+
muted: audioEffect === 0,
|
|
754
809
|
loop: true,
|
|
755
810
|
playsInline: true,
|
|
756
811
|
crossOrigin: "anonymous",
|
|
@@ -761,12 +816,7 @@ function VideoToAscii({
|
|
|
761
816
|
"div",
|
|
762
817
|
__spreadProps(__spreadValues(__spreadValues({
|
|
763
818
|
ref: containerRef,
|
|
764
|
-
className: "relative cursor-pointer select-none overflow-hidden rounded"
|
|
765
|
-
style: {
|
|
766
|
-
width: pixelWidth || "100%",
|
|
767
|
-
height: pixelHeight || "auto",
|
|
768
|
-
backgroundColor: "#000"
|
|
769
|
-
}
|
|
819
|
+
className: "relative cursor-pointer select-none overflow-hidden rounded bg-black"
|
|
770
820
|
}, enableMouse ? mouseHandlers : {}), enableRipple ? rippleHandlers : {}), {
|
|
771
821
|
children: [
|
|
772
822
|
/* @__PURE__ */ (0, import_jsx_runtime.jsx)(
|
|
@@ -788,16 +838,16 @@ function VideoToAscii({
|
|
|
788
838
|
dimensions.cols,
|
|
789
839
|
"\xD7",
|
|
790
840
|
dimensions.rows
|
|
791
|
-
] })
|
|
792
|
-
!isPlaying && isReady && /* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { className: "absolute inset-0 flex items-center justify-center bg-black/50", children: /* @__PURE__ */ (0, import_jsx_runtime.jsx)("div", { className: "text-white text-lg", children: "\u25B6 Press Space to Play" }) })
|
|
841
|
+
] })
|
|
793
842
|
]
|
|
794
843
|
})
|
|
795
844
|
)
|
|
796
845
|
] });
|
|
797
846
|
}
|
|
847
|
+
var VideoToAscii_default = Video2Ascii;
|
|
798
848
|
// Annotate the CommonJS export names for ESM import in node:
|
|
799
849
|
0 && (module.exports = {
|
|
800
850
|
ASCII_CHARSETS,
|
|
801
|
-
|
|
851
|
+
Video2Ascii
|
|
802
852
|
});
|
|
803
853
|
//# sourceMappingURL=index.js.map
|