yta-video-opengl 0.0.22__py3-none-any.whl → 0.0.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. yta_video_opengl/editor.py +333 -0
  2. yta_video_opengl/nodes/__init__.py +32 -28
  3. yta_video_opengl/nodes/audio/__init__.py +164 -55
  4. yta_video_opengl/nodes/video/__init__.py +27 -1
  5. yta_video_opengl/nodes/video/{opengl.py → opengl/__init__.py} +8 -4
  6. yta_video_opengl/nodes/video/opengl/experimental.py +760 -0
  7. yta_video_opengl/tests.py +236 -358
  8. yta_video_opengl/utils.py +9 -421
  9. {yta_video_opengl-0.0.22.dist-info → yta_video_opengl-0.0.24.dist-info}/METADATA +2 -6
  10. yta_video_opengl-0.0.24.dist-info/RECORD +13 -0
  11. yta_video_opengl/audio.py +0 -219
  12. yta_video_opengl/classes.py +0 -1276
  13. yta_video_opengl/complete/__init__.py +0 -0
  14. yta_video_opengl/complete/frame_combinator.py +0 -204
  15. yta_video_opengl/complete/frame_generator.py +0 -319
  16. yta_video_opengl/complete/frame_wrapper.py +0 -135
  17. yta_video_opengl/complete/timeline.py +0 -571
  18. yta_video_opengl/complete/track/__init__.py +0 -500
  19. yta_video_opengl/complete/track/media/__init__.py +0 -222
  20. yta_video_opengl/complete/track/parts.py +0 -267
  21. yta_video_opengl/complete/track/utils.py +0 -78
  22. yta_video_opengl/media.py +0 -347
  23. yta_video_opengl/reader/__init__.py +0 -710
  24. yta_video_opengl/reader/cache/__init__.py +0 -253
  25. yta_video_opengl/reader/cache/audio.py +0 -195
  26. yta_video_opengl/reader/cache/utils.py +0 -48
  27. yta_video_opengl/reader/cache/video.py +0 -113
  28. yta_video_opengl/t.py +0 -233
  29. yta_video_opengl/video.py +0 -277
  30. yta_video_opengl/writer.py +0 -278
  31. yta_video_opengl-0.0.22.dist-info/RECORD +0 -31
  32. {yta_video_opengl-0.0.22.dist-info → yta_video_opengl-0.0.24.dist-info}/LICENSE +0 -0
  33. {yta_video_opengl-0.0.22.dist-info → yta_video_opengl-0.0.24.dist-info}/WHEEL +0 -0
@@ -0,0 +1,760 @@
1
+ """
2
+ Experimental module. The effects here
3
+ have not been tested completely or the
4
+ result is not as good as it should be
5
+ to be considered a definitive effect.
6
+ """
7
+ from yta_video_opengl.nodes.video.opengl import OpenglNodeBase
8
+ from typing import Union
9
+
10
+ import numpy as np
11
+ import moderngl
12
+ import math
13
+
14
+
15
+ class BreathingFrame(OpenglNodeBase):
16
+ """
17
+ The frame but as if it was breathing.
18
+ """
19
+
20
+ @property
21
+ def vertex_shader(
22
+ self
23
+ ) -> str:
24
+ return (
25
+ '''
26
+ #version 330
27
+ in vec2 in_vert;
28
+ in vec2 in_texcoord;
29
+ out vec2 v_text;
30
+ void main() {
31
+ gl_Position = vec4(in_vert, 0.0, 1.0);
32
+ v_text = in_texcoord;
33
+ }
34
+ '''
35
+ )
36
+
37
+ @property
38
+ def fragment_shader(
39
+ self
40
+ ) -> str:
41
+ return (
42
+ '''
43
+ #version 330
44
+ uniform sampler2D tex;
45
+ uniform float time;
46
+ in vec2 v_text;
47
+ out vec4 f_color;
48
+ // Use uniforms to be customizable
49
+
50
+ void main() {
51
+ // Dynamic zoom scaled with t
52
+ float scale = 1.0 + 0.05 * sin(time * 2.0); // 5% de zoom
53
+ vec2 center = vec2(0.5, 0.5);
54
+
55
+ // Recalculate coords according to center
56
+ vec2 uv = (v_text - center) / scale + center;
57
+
58
+ // Clamp to avoid artifacts
59
+ uv = clamp(uv, 0.0, 1.0);
60
+
61
+ f_color = texture(tex, uv);
62
+ }
63
+ '''
64
+ )
65
+
66
+ def process(
67
+ self,
68
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray'],
69
+ t: float = 0.0,
70
+ ) -> moderngl.Texture:
71
+ """
72
+ Apply the shader to the 'input', that
73
+ must be a frame or a texture, and return
74
+ the new resulting texture.
75
+
76
+ We use and return textures to maintain
77
+ the process in GPU and optimize it.
78
+ """
79
+ self.uniforms.set('time', t)
80
+
81
+ return super().process(input)
82
+
83
+ class HandheldFrame(OpenglNodeBase):
84
+ """
85
+ The frame but as if it was being recorder by
86
+ someone holding a camera, that is not 100%
87
+ stable.
88
+ """
89
+
90
+ @property
91
+ def vertex_shader(
92
+ self
93
+ ) -> str:
94
+ return (
95
+ '''
96
+ #version 330
97
+ in vec2 in_vert;
98
+ in vec2 in_texcoord;
99
+ out vec2 v_text;
100
+
101
+ uniform mat3 transform;
102
+
103
+ void main() {
104
+ vec3 pos = vec3(in_vert, 1.0);
105
+ pos = transform * pos;
106
+ gl_Position = vec4(pos.xy, 0.0, 1.0);
107
+ v_text = in_texcoord;
108
+ }
109
+ '''
110
+ )
111
+
112
+ @property
113
+ def fragment_shader(
114
+ self
115
+ ) -> str:
116
+ return (
117
+ '''
118
+ #version 330
119
+ uniform sampler2D tex;
120
+ in vec2 v_text;
121
+ out vec4 f_color;
122
+
123
+ void main() {
124
+ f_color = texture(tex, v_text);
125
+ }
126
+ '''
127
+ )
128
+
129
+ def _handheld_matrix(
130
+ self,
131
+ t
132
+ ):
133
+ # Rotación más notoria
134
+ angle = self._smooth_noise(t, freq=0.8, scale=0.05) # antes 0.02
135
+
136
+ # Traslaciones más grandes
137
+ tx = self._smooth_noise(t, freq=1.1, scale=0.04) # antes 0.015
138
+ ty = self._smooth_noise(t, freq=1.4, scale=0.04)
139
+
140
+ # Zoom más agresivo
141
+ zoom = 1.0 + self._smooth_noise(t, freq=0.5, scale=0.06) # antes 0.02
142
+
143
+ cos_a, sin_a = math.cos(angle), math.sin(angle)
144
+
145
+ return np.array([
146
+ [ cos_a * zoom, -sin_a * zoom, tx],
147
+ [ sin_a * zoom, cos_a * zoom, ty],
148
+ [ 0.0, 0.0, 1.0]
149
+ ], dtype="f4")
150
+
151
+ def _smooth_noise(
152
+ self,
153
+ t,
154
+ freq = 1.5,
155
+ scale = 1.0
156
+ ):
157
+ """
158
+ Small noise by using sin and cos mixed.
159
+ """
160
+ return (
161
+ math.sin(t * freq) +
162
+ 0.5 * math.cos(t * freq * 0.5 + 1.7) +
163
+ 0.25 * math.sin(t * freq * 0.25 + 2.5)
164
+ ) * scale
165
+
166
+ def process(
167
+ self,
168
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray'],
169
+ t: float = 0.0,
170
+ ) -> moderngl.Texture:
171
+ """
172
+ Apply the shader to the 'input', that
173
+ must be a frame or a texture, and return
174
+ the new resulting texture.
175
+
176
+ We use and return textures to maintain
177
+ the process in GPU and optimize it.
178
+ """
179
+ self.uniforms.set_mat('transform', self._handheld_matrix(t).tobytes())
180
+
181
+ return super().process(input)
182
+
183
+ class OrbitingFrame(OpenglNodeBase):
184
+ """
185
+ The frame but orbiting around the camera.
186
+ """
187
+
188
+ @property
189
+ def vertex_shader(
190
+ self
191
+ ) -> str:
192
+ return (
193
+ '''
194
+ #version 330
195
+
196
+ in vec2 in_vert;
197
+ in vec2 in_texcoord;
198
+
199
+ out vec2 v_uv;
200
+
201
+ uniform mat4 mvp; // Model-View-Projection matrix
202
+
203
+ void main() {
204
+ v_uv = in_texcoord;
205
+ // El quad está en XY, lo pasamos a XYZ con z=0
206
+ vec4 pos = vec4(in_vert, 0.0, 1.0);
207
+ gl_Position = mvp * pos;
208
+ }
209
+ '''
210
+ )
211
+
212
+ @property
213
+ def fragment_shader(
214
+ self
215
+ ) -> str:
216
+ return (
217
+ '''
218
+ #version 330
219
+
220
+ uniform sampler2D tex;
221
+ in vec2 v_uv;
222
+ out vec4 f_color;
223
+
224
+ void main() {
225
+ f_color = texture(tex, v_uv);
226
+ }
227
+ '''
228
+ )
229
+
230
+ def _perspective(
231
+ self,
232
+ fov_y_rad,
233
+ aspect,
234
+ near,
235
+ far
236
+ ):
237
+ f = 1.0 / np.tan(fov_y_rad / 2.0)
238
+ m = np.zeros((4,4), dtype='f4')
239
+ m[0,0] = f / aspect
240
+ m[1,1] = f
241
+ m[2,2] = (far + near) / (near - far)
242
+ m[2,3] = (2 * far * near) / (near - far)
243
+ m[3,2] = -1.0
244
+
245
+ return m
246
+
247
+ def _look_at(
248
+ self,
249
+ eye,
250
+ target,
251
+ up = (0, 1, 0)
252
+ ):
253
+ eye = np.array(eye, dtype='f4')
254
+ target = np.array(target, dtype='f4')
255
+ up = np.array(up, dtype='f4')
256
+
257
+ f = target - eye
258
+ f = f / np.linalg.norm(f)
259
+ s = np.cross(f, up)
260
+ s = s / np.linalg.norm(s)
261
+ u = np.cross(s, f)
262
+
263
+ m = np.eye(4, dtype='f4')
264
+ m[0,0:3] = s
265
+ m[1,0:3] = u
266
+ m[2,0:3] = -f
267
+ m[0,3] = -np.dot(s, eye)
268
+ m[1,3] = -np.dot(u, eye)
269
+ m[2,3] = np.dot(f, eye)
270
+
271
+ return m
272
+
273
+ def _translate(
274
+ self,
275
+ x,
276
+ y,
277
+ z
278
+ ):
279
+ m = np.eye(4, dtype='f4')
280
+ m[0,3] = x
281
+ m[1,3] = y
282
+ m[2,3] = z
283
+
284
+ return m
285
+
286
+ def _rotate_y(
287
+ self,
288
+ angle
289
+ ):
290
+ c, s = np.cos(angle), np.sin(angle)
291
+ m = np.eye(4, dtype='f4')
292
+ m[0,0], m[0,2] = c, s
293
+ m[2,0], m[2,2] = -s, c
294
+
295
+ return m
296
+
297
+ def _scale_uniform(
298
+ self,
299
+ k
300
+ ):
301
+ m = np.eye(4, dtype='f4')
302
+ m[0,0] = m[1,1] = m[2,2] = k
303
+
304
+ return m
305
+
306
+ def _carousel_mvp(
307
+ self,
308
+ t,
309
+ *,
310
+ aspect,
311
+ fov_deg = 60.0,
312
+ radius = 4.0,
313
+ center_z = -6.0,
314
+ speed = 1.0,
315
+ face_center_strength = 1.0,
316
+ extra_scale = 1.0
317
+ ):
318
+ """
319
+ t: tiempo en segundos
320
+ aspect: width/height del framebuffer
321
+ radius: radio en XZ
322
+ center_z: desplaza el carrusel entero hacia -Z para que esté frente a cámara
323
+ speed: velocidad angular
324
+ face_center_strength: 1.0 = panel mira al centro; 0.0 = no gira con la órbita
325
+ """
326
+
327
+ # Proyección y vista (cámara en el origen mirando hacia -Z)
328
+ proj = self._perspective(np.radians(fov_deg), aspect, 0.1, 100.0)
329
+ view = np.eye(4, dtype='f4') # o look_at((0,0,0), (0,0,-1))
330
+
331
+ # Ángulo de órbita (elige el offset para que "entre" por la izquierda)
332
+ theta = speed * t - np.pi * 0.5
333
+
334
+ # Órbita en XZ con el centro desplazado a center_z
335
+ # x = radius * np.cos(theta)
336
+ # z = radius * np.sin(theta) + center_z
337
+ x = radius * np.cos(theta)
338
+ z = (radius * 0.2) * np.sin(theta) + center_z
339
+
340
+ # Yaw para que el panel apunte al centro (0,0,center_z)
341
+ # El vector desde panel -> centro es (-x, 0, center_z - z)
342
+ yaw_to_center = np.arctan2(-x, (center_z - z)) # atan2(X, Z)
343
+ yaw = face_center_strength * yaw_to_center
344
+
345
+ model = self._translate(x, 0.0, z) @ self._rotate_y(yaw) @ self._scale_uniform(extra_scale)
346
+
347
+ # ¡IMPORTANTE! OpenGL espera column-major: transponemos al escribir
348
+ mvp = proj @ view @ model
349
+
350
+ return mvp
351
+
352
+ def process(
353
+ self,
354
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray'],
355
+ t: float = 0.0,
356
+ ) -> moderngl.Texture:
357
+ """
358
+ Apply the shader to the 'input', that
359
+ must be a frame or a texture, and return
360
+ the new resulting texture.
361
+
362
+ We use and return textures to maintain
363
+ the process in GPU and optimize it.
364
+ """
365
+ aspect = self.size[0] / self.size[1]
366
+ mvp = self._carousel_mvp(t, aspect=aspect, radius=4.0, center_z=-4.0, speed=1.2, face_center_strength=1.0, extra_scale = 1.0)
367
+
368
+ self.uniforms.set_mat('mvp', mvp.T.tobytes())
369
+
370
+ return super().process(input)
371
+
372
+ class RotatingInCenterFrame(OpenglNodeBase):
373
+ """
374
+ The frame but orbiting around the camera.
375
+ """
376
+
377
+ @property
378
+ def vertex_shader(
379
+ self
380
+ ) -> str:
381
+ return (
382
+ '''
383
+ #version 330
384
+
385
+ in vec2 in_vert;
386
+ in vec2 in_texcoord;
387
+ out vec2 v_uv;
388
+
389
+ uniform float time;
390
+ uniform float speed;
391
+
392
+ void main() {
393
+ v_uv = in_texcoord;
394
+
395
+ // Rotación alrededor del eje Y
396
+ float angle = time * speed; // puedes usar time directamente, o time * speed
397
+ float cosA = cos(angle);
398
+ float sinA = sin(angle);
399
+
400
+ // Convertimos el quad a 3D (x, y, z)
401
+ vec3 pos = vec3(in_vert.xy, 0.0);
402
+
403
+ // Rotación Y
404
+ mat3 rotY = mat3(
405
+ cosA, 0.0, sinA,
406
+ 0.0 , 1.0, 0.0,
407
+ -sinA, 0.0, cosA
408
+ );
409
+
410
+ pos = rotY * pos;
411
+
412
+ gl_Position = vec4(pos, 1.0);
413
+ }
414
+ '''
415
+ )
416
+
417
+ @property
418
+ def fragment_shader(
419
+ self
420
+ ) -> str:
421
+ return (
422
+ '''
423
+ #version 330
424
+
425
+ in vec2 v_uv;
426
+ out vec4 f_color;
427
+
428
+ uniform sampler2D tex;
429
+
430
+ void main() {
431
+ f_color = texture(tex, v_uv);
432
+ }
433
+ '''
434
+ )
435
+
436
+ def __init__(
437
+ self,
438
+ size,
439
+ first_frame,
440
+ context = None,
441
+ speed: float = 30
442
+ ):
443
+ super().__init__(size, first_frame, context)
444
+
445
+ self.uniforms.set('speed', speed)
446
+
447
+ def process(
448
+ self,
449
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray'],
450
+ t: float = 0.0,
451
+ ) -> moderngl.Texture:
452
+ """
453
+ Apply the shader to the 'input', that
454
+ must be a frame or a texture, and return
455
+ the new resulting texture.
456
+
457
+ We use and return textures to maintain
458
+ the process in GPU and optimize it.
459
+ """
460
+ self.uniforms.set('time', t)
461
+
462
+ return super().process(input)
463
+
464
+ class StrangeTvFrame(OpenglNodeBase):
465
+ """
466
+ Nice effect like a tv screen or something...
467
+ """
468
+
469
+ @property
470
+ def vertex_shader(
471
+ self
472
+ ) -> str:
473
+ return (
474
+ '''
475
+ #version 330
476
+ in vec2 in_vert;
477
+ in vec2 in_texcoord;
478
+ out vec2 v_uv;
479
+
480
+ void main() {
481
+ v_uv = in_texcoord;
482
+ gl_Position = vec4(in_vert, 0.0, 1.0);
483
+ }
484
+ '''
485
+ )
486
+
487
+ @property
488
+ def fragment_shader(
489
+ self
490
+ ) -> str:
491
+ return (
492
+ '''
493
+ #version 330
494
+
495
+ uniform sampler2D tex;
496
+ uniform float time;
497
+
498
+ // ---- Parámetros principales (ajústalos en runtime) ----
499
+ uniform float aberr_strength; // 0..3 (fuerza del RGB split radial)
500
+ uniform float barrel_k; // -0.5..0.5 (distorsión de lente; positivo = barrel)
501
+ uniform float blur_radius; // 0..0.02 (radio de motion blur en UV)
502
+ uniform float blur_angle; // en radianes (dirección del arrastre)
503
+ uniform int blur_samples; // 4..24 (taps del blur)
504
+ uniform float vignette_strength; // 0..2
505
+ uniform float grain_amount; // 0..0.1
506
+ uniform float flicker_amount; // 0..0.2
507
+ uniform float scanline_amount; // 0..0.2
508
+
509
+ in vec2 v_uv;
510
+ out vec4 f_color;
511
+
512
+ // --- helpers ---
513
+ float rand(vec2 co){
514
+ return fract(sin(dot(co, vec2(12.9898,78.233))) * 43758.5453);
515
+ }
516
+
517
+ // Barrel distortion (simple, k>0 curva hacia fuera)
518
+ vec2 barrel(vec2 uv, float k){
519
+ // map to [-1,1]
520
+ vec2 p = uv * 2.0 - 1.0;
521
+ float r2 = dot(p, p);
522
+ p *= (1.0 + k * r2);
523
+ // back to [0,1]
524
+ return p * 0.5 + 0.5;
525
+ }
526
+
527
+ // Aberración cromática radial
528
+ vec3 sample_chromatic(sampler2D t, vec2 uv, vec2 center, float strength){
529
+ // Offset radial según distancia al centro
530
+ vec2 d = uv - center;
531
+ float r = length(d);
532
+ vec2 dir = (r > 1e-5) ? d / r : vec2(0.0);
533
+ // Cada canal se desplaza un poco distinto
534
+ float s = strength * r * 0.005; // escala fina
535
+ float sr = s * 1.0;
536
+ float sg = s * 0.5;
537
+ float sb = s * -0.5; // azul hacia dentro para contraste
538
+
539
+ float rC = texture(t, uv + dir * sr).r;
540
+ float gC = texture(t, uv + dir * sg).g;
541
+ float bC = texture(t, uv + dir * sb).b;
542
+ return vec3(rC, gC, bC);
543
+ }
544
+
545
+ void main(){
546
+ vec2 uv = v_uv;
547
+ vec2 center = vec2(0.5, 0.5);
548
+
549
+ // Lente (barrel/pincushion)
550
+ uv = barrel(uv, barrel_k);
551
+
552
+ // Early out si nos salimos mucho (fade de bordes)
553
+ vec2 uv_clamped = clamp(uv, 0.0, 1.0);
554
+ float edge = smoothstep(0.0, 0.02, 1.0 - max(max(-uv.x, uv.x-1.0), max(-uv.y, uv.y-1.0)));
555
+
556
+ // Dirección del motion blur
557
+ vec2 dir = vec2(cos(blur_angle), sin(blur_angle));
558
+ // Pequeña variación temporal para que “respire”
559
+ float jitter = (sin(time * 13.0) * 0.5 + 0.5) * 0.4 + 0.6;
560
+
561
+ // Acumulación de blur con pesos
562
+ vec3 acc = vec3(0.0);
563
+ float wsum = 0.0;
564
+
565
+ int N = max(1, blur_samples);
566
+ for(int i = 0; i < 64; ++i){ // hard cap de seguridad
567
+ if(i >= N) break;
568
+ // t de -1..1 distribuye muestras a ambos lados
569
+ float fi = float(i);
570
+ float t = (fi / float(N - 1)) * 2.0 - 1.0;
571
+
572
+ // curva de pesos (gauss approx)
573
+ float w = exp(-t*t * 2.5);
574
+ // offset base
575
+ vec2 ofs = dir * t * blur_radius * jitter;
576
+
577
+ // micro-jitter por muestra para romper banding
578
+ ofs += vec2(rand(uv + fi)*0.0005, rand(uv + fi + 3.14)*0.0005) * blur_radius;
579
+
580
+ // muestreo con aberración cromática
581
+ vec3 c = sample_chromatic(tex, uv + ofs, center, aberr_strength);
582
+
583
+ acc += c * w;
584
+ wsum += w;
585
+ }
586
+ vec3 col = acc / max(wsum, 1e-6);
587
+
588
+ // Scanlines + flicker
589
+ float scan = 1.0 - scanline_amount * (0.5 + 0.5 * sin((uv.y + time*1.7)*3.14159*480.0));
590
+ float flick = 1.0 + flicker_amount * (sin(time*60.0 + uv.x*10.0) * 0.5 + 0.5);
591
+ col *= scan * flick;
592
+
593
+ // Vignette (radial)
594
+ float r = distance(uv, center);
595
+ float vig = 1.0 - smoothstep(0.7, 1.0, r * (1.0 + 0.5*vignette_strength));
596
+ col *= mix(1.0, vig, vignette_strength);
597
+
598
+ // Grano
599
+ float g = (rand(uv * (time*37.0 + 1.0)) - 0.5) * 2.0 * grain_amount;
600
+ col += g;
601
+
602
+ // Fade de bordes por clamp/warp
603
+ col *= edge;
604
+
605
+ f_color = vec4(col, 1.0);
606
+ }
607
+ '''
608
+ )
609
+
610
+ def __init__(
611
+ self,
612
+ size,
613
+ first_frame,
614
+ context = None,
615
+ aberr_strength = 1.5,
616
+ barrel_k = 0.08,
617
+ blur_radius = 0.006,
618
+ blur_angle = 0.0, # (0 = horizontal, 1.57 ≈ vertical)
619
+ blur_samples = 12,
620
+ vignette_strength = 0.8,
621
+ grain_amount = 0.02,
622
+ flicker_amount = 0.05,
623
+ scanline_amount = 0.05
624
+ ):
625
+ super().__init__(size, first_frame, context)
626
+
627
+ self.uniforms.set('aberr_strength', aberr_strength)
628
+ self.uniforms.set('barrel_k', barrel_k)
629
+ self.uniforms.set('blur_radius', blur_radius)
630
+ self.uniforms.set('blur_angle', blur_angle)
631
+ self.uniforms.set('blur_samples', blur_samples)
632
+ self.uniforms.set('vignette_strength', vignette_strength)
633
+ self.uniforms.set('grain_amount', grain_amount)
634
+ self.uniforms.set('flicker_amount', flicker_amount)
635
+ self.uniforms.set('scanline_amount', scanline_amount)
636
+
637
+ def process(
638
+ self,
639
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray'],
640
+ t: float = 0.0,
641
+ ) -> moderngl.Texture:
642
+ """
643
+ Apply the shader to the 'input', that
644
+ must be a frame or a texture, and return
645
+ the new resulting texture.
646
+
647
+ We use and return textures to maintain
648
+ the process in GPU and optimize it.
649
+ """
650
+ self.uniforms.set('time', t)
651
+
652
+ return super().process(input)
653
+
654
+ class GlitchRgbFrame(OpenglNodeBase):
655
+ """
656
+ Nice effect like a tv screen or something...
657
+ """
658
+
659
+ @property
660
+ def vertex_shader(
661
+ self
662
+ ) -> str:
663
+ return (
664
+ '''
665
+ #version 330
666
+
667
+ // ----------- Vertex Shader -----------
668
+ in vec2 in_vert;
669
+ in vec2 in_texcoord;
670
+
671
+ out vec2 v_uv;
672
+
673
+ void main() {
674
+ v_uv = in_texcoord;
675
+ gl_Position = vec4(in_vert, 0.0, 1.0);
676
+ }
677
+ '''
678
+ )
679
+
680
+ @property
681
+ def fragment_shader(
682
+ self
683
+ ) -> str:
684
+ return (
685
+ '''
686
+ #version 330
687
+
688
+ // ----------- Fragment Shader -----------
689
+ uniform sampler2D tex;
690
+ uniform float time;
691
+
692
+ // Intensidades del efecto
693
+ uniform float amp; // amplitud de distorsión
694
+ uniform float freq; // frecuencia de la onda
695
+ uniform float glitchAmp; // fuerza del glitch
696
+ uniform float glitchSpeed;
697
+
698
+ in vec2 v_uv;
699
+ out vec4 f_color;
700
+
701
+ void main() {
702
+ // Distorsión sinusoidal en Y
703
+ float wave = sin(v_uv.x * freq + time * 2.0) * amp;
704
+
705
+ // Pequeño desplazamiento aleatorio (shake)
706
+ float shakeX = (fract(sin(time * 12.9898) * 43758.5453) - 0.5) * 0.01;
707
+ float shakeY = (fract(sin(time * 78.233) * 12345.6789) - 0.5) * 0.01;
708
+
709
+ // Coordenadas base con distorsión
710
+ vec2 uv = vec2(v_uv.x + shakeX, v_uv.y + wave + shakeY);
711
+
712
+ // Glitch con separación RGB
713
+ float glitch = sin(time * glitchSpeed) * glitchAmp;
714
+ vec2 uv_r = uv + vec2(glitch, 0.0);
715
+ vec2 uv_g = uv + vec2(-glitch * 0.5, glitch * 0.5);
716
+ vec2 uv_b = uv + vec2(0.0, -glitch);
717
+
718
+ // Muestreo canales desplazados
719
+ float r = texture(tex, uv_r).r;
720
+ float g = texture(tex, uv_g).g;
721
+ float b = texture(tex, uv_b).b;
722
+
723
+ f_color = vec4(r, g, b, 1.0);
724
+ }
725
+ '''
726
+ )
727
+
728
+ def __init__(
729
+ self,
730
+ size,
731
+ first_frame,
732
+ context = None,
733
+ amp = 0.02,
734
+ freq = 25.0,
735
+ glitchAmp = 0.02,
736
+ glitchSpeed = 30.0
737
+ ):
738
+ super().__init__(size, first_frame, context)
739
+
740
+ self.uniforms.set('amp', amp)
741
+ self.uniforms.set('freq', freq)
742
+ self.uniforms.set('glitchAmp', glitchAmp)
743
+ self.uniforms.set('glitchSpeed', glitchSpeed)
744
+
745
+ def process(
746
+ self,
747
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray'],
748
+ t: float = 0.0,
749
+ ) -> moderngl.Texture:
750
+ """
751
+ Apply the shader to the 'input', that
752
+ must be a frame or a texture, and return
753
+ the new resulting texture.
754
+
755
+ We use and return textures to maintain
756
+ the process in GPU and optimize it.
757
+ """
758
+ self.uniforms.set('time', t)
759
+
760
+ return super().process(input)