yta-video-opengl 0.0.5__py3-none-any.whl → 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1275 @@
1
+ """
2
+ TODO: Please, rename, refactor and move.
3
+
4
+ Opengl doesn't know how to draw a quad
5
+ or any other complex shape. The basics
6
+ that opengl can handle are triangles,
7
+ so we use different triangles to build
8
+ our shapes (quad normally).
9
+ """
10
+ from yta_validation.parameter import ParameterValidator
11
+ from yta_validation import PythonValidator
12
+ from yta_video_opengl.utils import frame_to_texture, get_fullscreen_quad_vao
13
+ from abc import ABC, abstractmethod
14
+ from typing import Union
15
+
16
+ import av
17
+ import moderngl
18
+ import numpy as np
19
+
20
+
21
+ class _Uniforms:
22
+ """
23
+ Class to wrap the functionality related to
24
+ handling the opengl program uniforms.
25
+ """
26
+
27
+ @property
28
+ def uniforms(
29
+ self
30
+ ) -> dict:
31
+ """
32
+ The uniforms in the program, as a dict, in
33
+ the format `{key, value}`.
34
+ """
35
+ return {
36
+ key: self.program[key].value
37
+ for key in self.program
38
+ if PythonValidator.is_instance_of(self.program[key], moderngl.Uniform)
39
+ }
40
+
41
+ def __init__(
42
+ self,
43
+ program: moderngl.Program
44
+ ):
45
+ self.program: moderngl.Program = program
46
+ """
47
+ The program instance this handler class
48
+ belongs to.
49
+ """
50
+
51
+ def get(
52
+ self,
53
+ name: str
54
+ ) -> Union[any, None]:
55
+ """
56
+ Get the value of the uniform with the
57
+ given 'name'.
58
+ """
59
+ return self.uniforms.get(name, None)
60
+
61
+ # TODO: I need to refactor these method to
62
+ # accept a **kwargs maybe, or to auto-detect
63
+ # the type and add the uniform as it must be
64
+ # done
65
+ def set(
66
+ self,
67
+ name: str,
68
+ value
69
+ ) -> '_Uniforms':
70
+ """
71
+ Set the provided 'value' to the normal type
72
+ uniform with the given 'name'. Here you have
73
+ some examples of defined uniforms we can set
74
+ with this method:
75
+ - `uniform float name;`
76
+
77
+ TODO: Add more examples
78
+ """
79
+ if name in self.program:
80
+ self.program[name].value = value
81
+
82
+ return self
83
+
84
+ def set_vec(
85
+ self,
86
+ name: str,
87
+ values
88
+ ) -> '_Uniforms':
89
+ """
90
+ Set the provided 'value' to the normal type
91
+ uniform with the given 'name'. Here you have
92
+ some examples of defined uniforms we can set
93
+ with this method:
94
+ - `uniform vec2 name;`
95
+
96
+ TODO: Is this example ok? I didn't use it yet
97
+ """
98
+ if name in self.program:
99
+ self.program[name].write(np.array(values, dtype = 'f4').tobytes())
100
+
101
+ return self
102
+
103
+ def set_mat(
104
+ self,
105
+ name: str,
106
+ value
107
+ ) -> '_Uniforms':
108
+ """
109
+ Set the provided 'value' to a `matN` type
110
+ uniform with the given 'name'. The 'value'
111
+ must be a NxN matrix (maybe numpy array)
112
+ transformed to bytes ('.tobytes()').
113
+
114
+ This uniform must be defined in the vertex
115
+ like this:
116
+ - `uniform matN name;`
117
+
118
+ TODO: Maybe we can accept a NxN numpy
119
+ array and do the .tobytes() by ourselves...
120
+ """
121
+ if name in self.program:
122
+ self.program[name].write(value)
123
+
124
+ return self
125
+
126
+ def print(
127
+ self
128
+ ) -> '_Uniforms':
129
+ """
130
+ Print the defined uniforms in console.
131
+ """
132
+ for key, value in self.uniforms.items():
133
+ print(f'"{key}": {str(value)}')
134
+
135
+ class BaseNode:
136
+ """
137
+ The basic class of a node to manipulate frames
138
+ as opengl textures. This node will process the
139
+ frame as an input texture and will generate
140
+ also a texture as the output.
141
+
142
+ Nodes can be chained and the result from one
143
+ node can be applied on another node.
144
+ """
145
+
146
+ @property
147
+ @abstractmethod
148
+ def vertex_shader(
149
+ self
150
+ ) -> str:
151
+ """
152
+ The code of the vertex shader.
153
+ """
154
+ pass
155
+
156
+ @property
157
+ @abstractmethod
158
+ def fragment_shader(
159
+ self
160
+ ) -> str:
161
+ """
162
+ The code of the fragment shader.
163
+ """
164
+ pass
165
+
166
+ def __init__(
167
+ self,
168
+ context: moderngl.Context,
169
+ size: tuple[int, int],
170
+ **kwargs
171
+ ):
172
+ ParameterValidator.validate_mandatory_instance_of('context', context, moderngl.Context)
173
+ # TODO: Validate size
174
+
175
+ self.context: moderngl.Context = context
176
+ """
177
+ The context of the program.
178
+ """
179
+ self.size: tuple[int, int] = size
180
+ """
181
+ The size we want to use for the frame buffer
182
+ in a (width, height) format.
183
+ """
184
+ # Compile shaders within the program
185
+ self.program: moderngl.Program = self.context.program(
186
+ vertex_shader = self.vertex_shader,
187
+ fragment_shader = self.fragment_shader
188
+ )
189
+
190
+ # Create the fullscreen quad
191
+ self.quad = get_fullscreen_quad_vao(
192
+ context = self.context,
193
+ program = self.program
194
+ )
195
+
196
+ # Create the output fbo
197
+ self.output_tex = self.context.texture(self.size, 4)
198
+ self.output_tex.filter = (moderngl.LINEAR, moderngl.LINEAR)
199
+ self.fbo = self.context.framebuffer(color_attachments = [self.output_tex])
200
+
201
+ self.uniforms: _Uniforms = _Uniforms(self.program)
202
+ """
203
+ Shortcut to the uniforms functionality.
204
+ """
205
+ # Auto set uniforms dynamically if existing
206
+ for key, value in kwargs.items():
207
+ self.uniforms.set(key, value)
208
+
209
+ def process(
210
+ self,
211
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray']
212
+ ) -> moderngl.Texture:
213
+ """
214
+ Apply the shader to the 'input', that
215
+ must be a frame or a texture, and return
216
+ the new resulting texture.
217
+
218
+ We use and return textures to maintain
219
+ the process in GPU and optimize it.
220
+ """
221
+ # TODO: Maybe we can accept a VideoFrame
222
+ # or a numpy array and transform it here
223
+ # into a texture, ready to be used:
224
+ # frame_to_texture(
225
+ # # TODO: Do not use Pillow
226
+ # frame = np.array(Image.open("input.jpg").convert("RGBA")),
227
+ # context = self.context,
228
+ # numpy_format = 'rgba'
229
+ # )
230
+ if PythonValidator.is_instance_of(input, ['VideoFrame', 'ndarray']):
231
+ # TODO: What about the numpy format (?)
232
+ input = frame_to_texture(input, self.context)
233
+
234
+ self.fbo.use()
235
+ self.context.clear(0.0, 0.0, 0.0, 0.0)
236
+
237
+ input.use(location = 0)
238
+
239
+ if 'texture' in self.program:
240
+ self.program['texture'] = 0
241
+
242
+ self.quad.render()
243
+
244
+ return self.output_tex
245
+
246
+ class WavingNode(BaseNode):
247
+ """
248
+ Just an example, without the shaders code
249
+ actually, to indicate that we can use
250
+ custom parameters to make it work.
251
+ """
252
+
253
+ @property
254
+ def vertex_shader(
255
+ self
256
+ ) -> str:
257
+ return (
258
+ '''
259
+ #version 330
260
+ in vec2 in_vert;
261
+ in vec2 in_texcoord;
262
+ out vec2 v_uv;
263
+ void main() {
264
+ v_uv = in_texcoord;
265
+ gl_Position = vec4(in_vert, 0.0, 1.0);
266
+ }
267
+ '''
268
+ )
269
+
270
+ @property
271
+ def fragment_shader(
272
+ self
273
+ ) -> str:
274
+ return (
275
+ '''
276
+ #version 330
277
+ uniform sampler2D tex;
278
+ uniform float time;
279
+ uniform float amplitude;
280
+ uniform float frequency;
281
+ uniform float speed;
282
+ in vec2 v_uv;
283
+ out vec4 f_color;
284
+ void main() {
285
+ float wave = sin(v_uv.x * frequency + time * speed) * amplitude;
286
+ vec2 uv = vec2(v_uv.x, v_uv.y + wave);
287
+ f_color = texture(tex, uv);
288
+ }
289
+ '''
290
+ )
291
+
292
+ def __init__(
293
+ self,
294
+ context: moderngl.Context,
295
+ size: tuple[int, int],
296
+ amplitude: float = 0.05,
297
+ frequency: float = 10.0,
298
+ speed: float = 2.0
299
+ ):
300
+ super().__init__(
301
+ context = context,
302
+ size = size,
303
+ amplitude = amplitude,
304
+ frequency = frequency,
305
+ speed = speed
306
+ )
307
+
308
+ # This is just an example and we are not
309
+ # using the parameters actually, but we
310
+ # could set those specific uniforms to be
311
+ # processed by the code
312
+ def process(
313
+ self,
314
+ input: Union[moderngl.Texture, 'VideoFrame', 'np.ndarray'],
315
+ t: float = 0.0,
316
+ ) -> moderngl.Texture:
317
+ """
318
+ Apply the shader to the 'input', that
319
+ must be a frame or a texture, and return
320
+ the new resulting texture.
321
+
322
+ We use and return textures to maintain
323
+ the process in GPU and optimize it.
324
+ """
325
+ self.uniforms.set('time', t)
326
+
327
+ return super().process(input)
328
+
329
+
330
+
331
+ """
332
+ TODO: I should try to use the Node classes
333
+ to manipulate the frames because this is how
334
+ Davinci Resolve and other editors work.
335
+ """
336
+
337
+
338
+ class FrameShaderBase(ABC):
339
+ """
340
+ Class to be inherited by any of our own
341
+ custom opengl program classes.
342
+
343
+ This shader base class must be used by all
344
+ the classes that are modifying the frames
345
+ one by one.
346
+ """
347
+
348
+ @property
349
+ @abstractmethod
350
+ def vertex_shader(
351
+ self
352
+ ) -> str:
353
+ """
354
+ Source code of the vertex shader.
355
+ """
356
+ pass
357
+
358
+ @property
359
+ @abstractmethod
360
+ def fragment_shader(
361
+ self
362
+ ) -> str:
363
+ """
364
+ Source code of the fragment shader.
365
+ """
366
+ pass
367
+
368
+ def __init__(
369
+ self,
370
+ size: tuple[int, int],
371
+ first_frame: Union['VideoFrame', 'np.ndarray'],
372
+ context: Union[moderngl.Context, None] = None,
373
+ ):
374
+ context = (
375
+ moderngl.create_context(standalone = True)
376
+ if context is None else
377
+ context
378
+ )
379
+
380
+ self.size: tuple[int, int] = size
381
+ """
382
+ The size we want to use for the frame buffer
383
+ in a (width, height) format.
384
+ """
385
+ self.first_frame: Union['VideoFrame', 'np.ndarray'] = first_frame
386
+ """
387
+ The first frame of the video in which we will
388
+ apply the effect. Needed to build the texture.
389
+ """
390
+ self.context: moderngl.Context = context
391
+ """
392
+ The context of the program.
393
+ """
394
+ self.program: moderngl.Program = None
395
+ """
396
+ The opengl program.
397
+ """
398
+ self.fbo: moderngl.Framebuffer = None
399
+ """
400
+ The frame buffer object.
401
+ """
402
+ self.uniforms: _Uniforms = None
403
+ """
404
+ Shortcut to the uniforms functionality.
405
+ """
406
+
407
+ self._initialize_program()
408
+
409
+ def _initialize_program(
410
+ self
411
+ ):
412
+ """
413
+ This method is to allow the effects to
414
+ change their '__init__' method to be able
415
+ to provide parameters that will be set as
416
+ uniforms.
417
+ """
418
+ # Compile shaders within the program
419
+ self.program: moderngl.Program = self.context.program(
420
+ vertex_shader = self.vertex_shader,
421
+ fragment_shader = self.fragment_shader
422
+ )
423
+
424
+ # Create frame buffer
425
+ self.fbo = self.context.simple_framebuffer(self.size)
426
+ # Create quad vertex array
427
+ self.vao: moderngl.VertexArray = get_fullscreen_quad_vao(self.context, self.program)
428
+ self.uniforms: _Uniforms = _Uniforms(self.program)
429
+
430
+ # TODO: How do I manage these textures (?)
431
+ self.textures = {}
432
+
433
+ # TODO: Should we do this here (?)
434
+ texture: moderngl.Texture = frame_to_texture(self.first_frame, self.context)
435
+ texture.build_mipmaps()
436
+
437
+ # TODO: I'm not using this method, but sounds
438
+ # interesting to simplify the 'process_frame'
439
+ # method in different mini actions
440
+ def load_texture(
441
+ self,
442
+ image: np.ndarray,
443
+ uniform_name: str,
444
+ texture_unit = 0
445
+ ):
446
+ """
447
+ Load a texture with the given 'image' and set
448
+ it to the uniform with the given 'uniform_name'.
449
+
450
+ TODO: Understand better the 'texture_unit'
451
+ """
452
+ # This is to receive a path (str) to an image
453
+ #img = Image.open(path).transpose(Image.FLIP_TOP_BOTTOM).convert("RGBA")
454
+ image = np.flipud(image)
455
+ tex = self.context.texture((image.shape[1], image.shape[0]), 4, image.tobytes())
456
+ tex.use(texture_unit)
457
+ self.textures[uniform_name] = tex
458
+ self.uniforms.set(uniform_name, texture_unit)
459
+
460
+ @abstractmethod
461
+ def _prepare_frame(
462
+ self,
463
+ t: float
464
+ ):
465
+ """
466
+ Set the uniforms we need to process that
467
+ specific frame and the code to calculate
468
+ those uniforms we need.
469
+ """
470
+ pass
471
+
472
+ def process_frame(
473
+ self,
474
+ frame: Union['VideoFrame', np.ndarray],
475
+ t: float,
476
+ numpy_format: str = 'rgb24'
477
+ ) -> 'VideoFrame':
478
+ # TODO: This method accepts 'np.ndarray' to
479
+ # prepare it to frames coming from other source
480
+ # different than reading a video here (that
481
+ # will be processed as VideoFrame). Check the
482
+ # sizes and [0], [1] indexes.
483
+ ParameterValidator.validate_mandatory_instance_of('frame', frame, ['VideoFrame', 'ndarray'])
484
+
485
+ # By now I call this here because I don't need
486
+ # to send nothing specific when calculating the
487
+ # frame...
488
+ self._prepare_frame(t)
489
+
490
+ # Set frame as a texture
491
+ texture = frame_to_texture(frame, self.context, numpy_format)
492
+ # TODO: Why 0 (?)
493
+ #texture.use(0)
494
+ texture.use()
495
+
496
+ # # TODO: Check this
497
+ # if 'u_texture' in self.program:
498
+ # self.program['u_texture'].value = 0
499
+
500
+ # Set the frame buffer a a whole black frame
501
+ self.context.clear(0.0, 0.0, 0.0)
502
+ # TODO: No 'self.fbo.use()' here (?)
503
+ self.fbo.use()
504
+ self.vao.render(moderngl.TRIANGLE_STRIP)
505
+
506
+ # Read output of fbo
507
+ output = np.flipud(
508
+ np.frombuffer(
509
+ self.fbo.read(components = 3, alignment = 1),
510
+ dtype = np.uint8
511
+ ).reshape((texture.size[1], texture.size[0], 3))
512
+ #).reshape((self.size[1], self.size[0], 3))
513
+ )
514
+
515
+ # We want a VideoFrame instance because we
516
+ # we can send it directly to the mux to
517
+ # write
518
+ output: 'VideoFrame' = av.VideoFrame.from_ndarray(output, format = numpy_format)
519
+
520
+ return output
521
+
522
+ # Example classes below
523
+ class WavingFrame(FrameShaderBase):
524
+ """
525
+ The frame but waving as a flag.
526
+ """
527
+
528
+ @property
529
+ def vertex_shader(
530
+ self
531
+ ) -> str:
532
+ return (
533
+ '''
534
+ #version 330
535
+ in vec2 in_vert;
536
+ in vec2 in_texcoord;
537
+ out vec2 v_uv;
538
+ void main() {
539
+ v_uv = in_texcoord;
540
+ gl_Position = vec4(in_vert, 0.0, 1.0);
541
+ }
542
+ '''
543
+ )
544
+
545
+ @property
546
+ def fragment_shader(
547
+ self
548
+ ) -> str:
549
+ return (
550
+ '''
551
+ #version 330
552
+ uniform sampler2D tex;
553
+ uniform float time;
554
+ uniform float amp;
555
+ uniform float freq;
556
+ uniform float speed;
557
+ in vec2 v_uv;
558
+ out vec4 f_color;
559
+ void main() {
560
+ float wave = sin(v_uv.x * freq + time * speed) * amp;
561
+ vec2 uv = vec2(v_uv.x, v_uv.y + wave);
562
+ f_color = texture(tex, uv);
563
+ }
564
+ '''
565
+ )
566
+
567
+ def __init__(
568
+ self,
569
+ size,
570
+ first_frame,
571
+ context = None,
572
+ amplitude: float = 0.05,
573
+ frequency: float = 10.0,
574
+ speed: float = 2.0
575
+ ):
576
+ super().__init__(size, first_frame, context)
577
+
578
+ # TODO: Use automatic way of detecting the
579
+ # parameters that are not 'self', 'size',
580
+ # 'first_frame' nor 'context' and set those
581
+ # as uniforms automatically
582
+
583
+ self.uniforms.set('amp', amplitude)
584
+ self.uniforms.set('freq', frequency)
585
+ self.uniforms.set('speed', speed)
586
+
587
+ def _prepare_frame(
588
+ self,
589
+ t: float
590
+ ) -> 'WavingFrame':
591
+ """
592
+ Precalculate all the things we need to process
593
+ a frame, like the uniforms, etc.
594
+ """
595
+ self.uniforms.set('time', t)
596
+
597
+ return self
598
+
599
+ class BreathingFrame(FrameShaderBase):
600
+ """
601
+ The frame but as if it was breathing.
602
+ """
603
+
604
+ @property
605
+ def vertex_shader(
606
+ self
607
+ ) -> str:
608
+ return (
609
+ '''
610
+ #version 330
611
+ in vec2 in_vert;
612
+ in vec2 in_texcoord;
613
+ out vec2 v_text;
614
+ void main() {
615
+ gl_Position = vec4(in_vert, 0.0, 1.0);
616
+ v_text = in_texcoord;
617
+ }
618
+ '''
619
+ )
620
+
621
+ @property
622
+ def fragment_shader(
623
+ self
624
+ ) -> str:
625
+ return (
626
+ '''
627
+ #version 330
628
+ uniform sampler2D tex;
629
+ uniform float time;
630
+ in vec2 v_text;
631
+ out vec4 f_color;
632
+ // Use uniforms to be customizable
633
+
634
+ void main() {
635
+ // Dynamic zoom scaled with t
636
+ float scale = 1.0 + 0.05 * sin(time * 2.0); // 5% de zoom
637
+ vec2 center = vec2(0.5, 0.5);
638
+
639
+ // Recalculate coords according to center
640
+ vec2 uv = (v_text - center) / scale + center;
641
+
642
+ // Clamp to avoid artifacts
643
+ uv = clamp(uv, 0.0, 1.0);
644
+
645
+ f_color = texture(tex, uv);
646
+ }
647
+ '''
648
+ )
649
+
650
+ def _prepare_frame(
651
+ self,
652
+ t: float
653
+ ) -> 'BreathingFrame':
654
+ # TODO: Use automatic way of detecting the
655
+ # parameters that are not 'self', 'size',
656
+ # 'first_frame' nor 'context' and set those
657
+ # as uniforms automatically
658
+
659
+ self.uniforms.set('time', t)
660
+
661
+ return self
662
+
663
+ class HandheldFrame(FrameShaderBase):
664
+ """
665
+ The frame but as if it was being recorder by
666
+ someone holding a camera, that is not 100%
667
+ stable.
668
+ """
669
+
670
+ @property
671
+ def vertex_shader(
672
+ self
673
+ ) -> str:
674
+ return (
675
+ '''
676
+ #version 330
677
+ in vec2 in_vert;
678
+ in vec2 in_texcoord;
679
+ out vec2 v_text;
680
+
681
+ uniform mat3 transform;
682
+
683
+ void main() {
684
+ vec3 pos = vec3(in_vert, 1.0);
685
+ pos = transform * pos;
686
+ gl_Position = vec4(pos.xy, 0.0, 1.0);
687
+ v_text = in_texcoord;
688
+ }
689
+ '''
690
+ )
691
+
692
+ @property
693
+ def fragment_shader(
694
+ self
695
+ ) -> str:
696
+ return (
697
+ '''
698
+ #version 330
699
+ uniform sampler2D tex;
700
+ in vec2 v_text;
701
+ out vec4 f_color;
702
+
703
+ void main() {
704
+ f_color = texture(tex, v_text);
705
+ }
706
+ '''
707
+ )
708
+
709
+ def _prepare_frame(
710
+ self,
711
+ t: float
712
+ ) -> 'HandheldFrame':
713
+ import math
714
+ def handheld_matrix_exaggerated(t):
715
+ # Rotación más notoria
716
+ angle = smooth_noise(t, freq=0.8, scale=0.05) # antes 0.02
717
+
718
+ # Traslaciones más grandes
719
+ tx = smooth_noise(t, freq=1.1, scale=0.04) # antes 0.015
720
+ ty = smooth_noise(t, freq=1.4, scale=0.04)
721
+
722
+ # Zoom más agresivo
723
+ zoom = 1.0 + smooth_noise(t, freq=0.5, scale=0.06) # antes 0.02
724
+
725
+ cos_a, sin_a = math.cos(angle), math.sin(angle)
726
+
727
+ return np.array([
728
+ [ cos_a * zoom, -sin_a * zoom, tx],
729
+ [ sin_a * zoom, cos_a * zoom, ty],
730
+ [ 0.0, 0.0, 1.0]
731
+ ], dtype="f4")
732
+
733
+ def smooth_noise(t, freq=1.5, scale=1.0):
734
+ """Pequeño ruido orgánico usando senos y cosenos mezclados"""
735
+ return (
736
+ math.sin(t * freq) +
737
+ 0.5 * math.cos(t * freq * 0.5 + 1.7) +
738
+ 0.25 * math.sin(t * freq * 0.25 + 2.5)
739
+ ) * scale
740
+
741
+ def handheld_matrix(t):
742
+ # Rotación ligera (en radianes)
743
+ angle = smooth_noise(t, freq=0.8, scale=0.02)
744
+
745
+ # Traslación horizontal/vertical
746
+ tx = smooth_noise(t, freq=1.1, scale=0.015)
747
+ ty = smooth_noise(t, freq=1.4, scale=0.015)
748
+
749
+ # Zoom (escala)
750
+ zoom = 1.0 + smooth_noise(t, freq=0.5, scale=0.02)
751
+
752
+ cos_a, sin_a = math.cos(angle), math.sin(angle)
753
+
754
+ # Matriz de transformación: Zoom * Rotación + Traslación
755
+ return np.array([
756
+ [ cos_a * zoom, -sin_a * zoom, tx],
757
+ [ sin_a * zoom, cos_a * zoom, ty],
758
+ [ 0.0, 0.0, 1.0]
759
+ ], dtype = "f4")
760
+
761
+ self.uniforms.set_mat('transform', handheld_matrix_exaggerated(t).tobytes())
762
+
763
+ return self
764
+
765
+ class OrbitingFrame(FrameShaderBase):
766
+ """
767
+ The frame but orbiting around the camera.
768
+ """
769
+
770
+ @property
771
+ def vertex_shader(
772
+ self
773
+ ) -> str:
774
+ return (
775
+ '''
776
+ #version 330
777
+
778
+ in vec2 in_vert;
779
+ in vec2 in_texcoord;
780
+
781
+ out vec2 v_uv;
782
+
783
+ uniform mat4 mvp; // Model-View-Projection matrix
784
+
785
+ void main() {
786
+ v_uv = in_texcoord;
787
+ // El quad está en XY, lo pasamos a XYZ con z=0
788
+ vec4 pos = vec4(in_vert, 0.0, 1.0);
789
+ gl_Position = mvp * pos;
790
+ }
791
+ '''
792
+ )
793
+
794
+ @property
795
+ def fragment_shader(
796
+ self
797
+ ) -> str:
798
+ return (
799
+ '''
800
+ #version 330
801
+
802
+ uniform sampler2D tex;
803
+ in vec2 v_uv;
804
+ out vec4 f_color;
805
+
806
+ void main() {
807
+ f_color = texture(tex, v_uv);
808
+ }
809
+ '''
810
+ )
811
+
812
+ def _prepare_frame(
813
+ self,
814
+ t: float
815
+ ) -> 'OrbitingFrame':
816
+ def perspective(fov_y_rad, aspect, near, far):
817
+ f = 1.0 / np.tan(fov_y_rad / 2.0)
818
+ m = np.zeros((4,4), dtype='f4')
819
+ m[0,0] = f / aspect
820
+ m[1,1] = f
821
+ m[2,2] = (far + near) / (near - far)
822
+ m[2,3] = (2 * far * near) / (near - far)
823
+ m[3,2] = -1.0
824
+ return m
825
+
826
+ def look_at(eye, target, up=(0,1,0)):
827
+ eye = np.array(eye, dtype='f4')
828
+ target = np.array(target, dtype='f4')
829
+ up = np.array(up, dtype='f4')
830
+
831
+ f = target - eye
832
+ f = f / np.linalg.norm(f)
833
+ s = np.cross(f, up)
834
+ s = s / np.linalg.norm(s)
835
+ u = np.cross(s, f)
836
+
837
+ m = np.eye(4, dtype='f4')
838
+ m[0,0:3] = s
839
+ m[1,0:3] = u
840
+ m[2,0:3] = -f
841
+ m[0,3] = -np.dot(s, eye)
842
+ m[1,3] = -np.dot(u, eye)
843
+ m[2,3] = np.dot(f, eye)
844
+ return m
845
+
846
+ def translate(x, y, z):
847
+ m = np.eye(4, dtype='f4')
848
+ m[0,3] = x
849
+ m[1,3] = y
850
+ m[2,3] = z
851
+ return m
852
+
853
+ def rotate_y(angle):
854
+ c, s = np.cos(angle), np.sin(angle)
855
+ m = np.eye(4, dtype='f4')
856
+ m[0,0], m[0,2] = c, s
857
+ m[2,0], m[2,2] = -s, c
858
+ return m
859
+
860
+ def scale_uniform(k):
861
+ m = np.eye(4, dtype='f4')
862
+ m[0,0] = m[1,1] = m[2,2] = k
863
+ return m
864
+
865
+ def carousel_mvp(t, *,
866
+ aspect,
867
+ fov_deg=60.0,
868
+ radius=4.0,
869
+ center_z=-6.0,
870
+ speed=1.0,
871
+ face_center_strength=1.0,
872
+ extra_scale=1.0):
873
+ """
874
+ t: tiempo en segundos
875
+ aspect: width/height del framebuffer
876
+ radius: radio en XZ
877
+ center_z: desplaza el carrusel entero hacia -Z para que esté frente a cámara
878
+ speed: velocidad angular
879
+ face_center_strength: 1.0 = panel mira al centro; 0.0 = no gira con la órbita
880
+ """
881
+
882
+ # Proyección y vista (cámara en el origen mirando hacia -Z)
883
+ proj = perspective(np.radians(fov_deg), aspect, 0.1, 100.0)
884
+ view = np.eye(4, dtype='f4') # o look_at((0,0,0), (0,0,-1))
885
+
886
+ # Ángulo de órbita (elige el offset para que "entre" por la izquierda)
887
+ theta = speed * t - np.pi * 0.5
888
+
889
+ # Órbita en XZ con el centro desplazado a center_z
890
+ # x = radius * np.cos(theta)
891
+ # z = radius * np.sin(theta) + center_z
892
+ x = radius * np.cos(theta)
893
+ z = (radius * 0.2) * np.sin(theta) + center_z
894
+
895
+ # Yaw para que el panel apunte al centro (0,0,center_z)
896
+ # El vector desde panel -> centro es (-x, 0, center_z - z)
897
+ yaw_to_center = np.arctan2(-x, (center_z - z)) # atan2(X, Z)
898
+ yaw = face_center_strength * yaw_to_center
899
+
900
+ model = translate(x, 0.0, z) @ rotate_y(yaw) @ scale_uniform(extra_scale)
901
+
902
+ # ¡IMPORTANTE! OpenGL espera column-major: transponemos al escribir
903
+ mvp = proj @ view @ model
904
+ return mvp
905
+
906
+ aspect = self.size[0] / self.size[1]
907
+ mvp = carousel_mvp(t, aspect=aspect, radius=4.0, center_z=-4.0, speed=1.2, face_center_strength=1.0, extra_scale = 1.0)
908
+
909
+ self.uniforms.set_mat('mvp', mvp.T.tobytes())
910
+
911
+ return self
912
+
913
+ class RotatingInCenterFrame(FrameShaderBase):
914
+ """
915
+ The frame but orbiting around the camera.
916
+ """
917
+
918
+ @property
919
+ def vertex_shader(
920
+ self
921
+ ) -> str:
922
+ return (
923
+ '''
924
+ #version 330
925
+
926
+ in vec2 in_vert;
927
+ in vec2 in_texcoord;
928
+ out vec2 v_uv;
929
+
930
+ uniform float time;
931
+ uniform float speed;
932
+
933
+ void main() {
934
+ v_uv = in_texcoord;
935
+
936
+ // Rotación alrededor del eje Y
937
+ float angle = time * speed; // puedes usar time directamente, o time * speed
938
+ float cosA = cos(angle);
939
+ float sinA = sin(angle);
940
+
941
+ // Convertimos el quad a 3D (x, y, z)
942
+ vec3 pos = vec3(in_vert.xy, 0.0);
943
+
944
+ // Rotación Y
945
+ mat3 rotY = mat3(
946
+ cosA, 0.0, sinA,
947
+ 0.0 , 1.0, 0.0,
948
+ -sinA, 0.0, cosA
949
+ );
950
+
951
+ pos = rotY * pos;
952
+
953
+ gl_Position = vec4(pos, 1.0);
954
+ }
955
+ '''
956
+ )
957
+
958
+ @property
959
+ def fragment_shader(
960
+ self
961
+ ) -> str:
962
+ return (
963
+ '''
964
+ #version 330
965
+
966
+ in vec2 v_uv;
967
+ out vec4 f_color;
968
+
969
+ uniform sampler2D tex;
970
+
971
+ void main() {
972
+ f_color = texture(tex, v_uv);
973
+ }
974
+ '''
975
+ )
976
+
977
+ def __init__(
978
+ self,
979
+ size,
980
+ first_frame,
981
+ context = None,
982
+ speed: float = 30
983
+ ):
984
+ super().__init__(size, first_frame, context)
985
+
986
+ self.uniforms.set('speed', speed)
987
+
988
+ def _prepare_frame(
989
+ self,
990
+ t: float
991
+ ) -> 'BreathingFrame':
992
+ self.uniforms.set('time', t)
993
+
994
+ return self
995
+
996
+ class StrangeTvFrame(FrameShaderBase):
997
+ """
998
+ Nice effect like a tv screen or something...
999
+ """
1000
+
1001
+ @property
1002
+ def vertex_shader(
1003
+ self
1004
+ ) -> str:
1005
+ return (
1006
+ '''
1007
+ #version 330
1008
+ in vec2 in_vert;
1009
+ in vec2 in_texcoord;
1010
+ out vec2 v_uv;
1011
+
1012
+ void main() {
1013
+ v_uv = in_texcoord;
1014
+ gl_Position = vec4(in_vert, 0.0, 1.0);
1015
+ }
1016
+ '''
1017
+ )
1018
+
1019
+ @property
1020
+ def fragment_shader(
1021
+ self
1022
+ ) -> str:
1023
+ return (
1024
+ '''
1025
+ #version 330
1026
+
1027
+ uniform sampler2D tex;
1028
+ uniform float time;
1029
+
1030
+ // ---- Parámetros principales (ajústalos en runtime) ----
1031
+ uniform float aberr_strength; // 0..3 (fuerza del RGB split radial)
1032
+ uniform float barrel_k; // -0.5..0.5 (distorsión de lente; positivo = barrel)
1033
+ uniform float blur_radius; // 0..0.02 (radio de motion blur en UV)
1034
+ uniform float blur_angle; // en radianes (dirección del arrastre)
1035
+ uniform int blur_samples; // 4..24 (taps del blur)
1036
+ uniform float vignette_strength; // 0..2
1037
+ uniform float grain_amount; // 0..0.1
1038
+ uniform float flicker_amount; // 0..0.2
1039
+ uniform float scanline_amount; // 0..0.2
1040
+
1041
+ in vec2 v_uv;
1042
+ out vec4 f_color;
1043
+
1044
+ // --- helpers ---
1045
+ float rand(vec2 co){
1046
+ return fract(sin(dot(co, vec2(12.9898,78.233))) * 43758.5453);
1047
+ }
1048
+
1049
+ // Barrel distortion (simple, k>0 curva hacia fuera)
1050
+ vec2 barrel(vec2 uv, float k){
1051
+ // map to [-1,1]
1052
+ vec2 p = uv * 2.0 - 1.0;
1053
+ float r2 = dot(p, p);
1054
+ p *= (1.0 + k * r2);
1055
+ // back to [0,1]
1056
+ return p * 0.5 + 0.5;
1057
+ }
1058
+
1059
+ // Aberración cromática radial
1060
+ vec3 sample_chromatic(sampler2D t, vec2 uv, vec2 center, float strength){
1061
+ // Offset radial según distancia al centro
1062
+ vec2 d = uv - center;
1063
+ float r = length(d);
1064
+ vec2 dir = (r > 1e-5) ? d / r : vec2(0.0);
1065
+ // Cada canal se desplaza un poco distinto
1066
+ float s = strength * r * 0.005; // escala fina
1067
+ float sr = s * 1.0;
1068
+ float sg = s * 0.5;
1069
+ float sb = s * -0.5; // azul hacia dentro para contraste
1070
+
1071
+ float rC = texture(t, uv + dir * sr).r;
1072
+ float gC = texture(t, uv + dir * sg).g;
1073
+ float bC = texture(t, uv + dir * sb).b;
1074
+ return vec3(rC, gC, bC);
1075
+ }
1076
+
1077
+ void main(){
1078
+ vec2 uv = v_uv;
1079
+ vec2 center = vec2(0.5, 0.5);
1080
+
1081
+ // Lente (barrel/pincushion)
1082
+ uv = barrel(uv, barrel_k);
1083
+
1084
+ // Early out si nos salimos mucho (fade de bordes)
1085
+ vec2 uv_clamped = clamp(uv, 0.0, 1.0);
1086
+ float edge = smoothstep(0.0, 0.02, 1.0 - max(max(-uv.x, uv.x-1.0), max(-uv.y, uv.y-1.0)));
1087
+
1088
+ // Dirección del motion blur
1089
+ vec2 dir = vec2(cos(blur_angle), sin(blur_angle));
1090
+ // Pequeña variación temporal para que “respire”
1091
+ float jitter = (sin(time * 13.0) * 0.5 + 0.5) * 0.4 + 0.6;
1092
+
1093
+ // Acumulación de blur con pesos
1094
+ vec3 acc = vec3(0.0);
1095
+ float wsum = 0.0;
1096
+
1097
+ int N = max(1, blur_samples);
1098
+ for(int i = 0; i < 64; ++i){ // hard cap de seguridad
1099
+ if(i >= N) break;
1100
+ // t de -1..1 distribuye muestras a ambos lados
1101
+ float fi = float(i);
1102
+ float t = (fi / float(N - 1)) * 2.0 - 1.0;
1103
+
1104
+ // curva de pesos (gauss approx)
1105
+ float w = exp(-t*t * 2.5);
1106
+ // offset base
1107
+ vec2 ofs = dir * t * blur_radius * jitter;
1108
+
1109
+ // micro-jitter por muestra para romper banding
1110
+ ofs += vec2(rand(uv + fi)*0.0005, rand(uv + fi + 3.14)*0.0005) * blur_radius;
1111
+
1112
+ // muestreo con aberración cromática
1113
+ vec3 c = sample_chromatic(tex, uv + ofs, center, aberr_strength);
1114
+
1115
+ acc += c * w;
1116
+ wsum += w;
1117
+ }
1118
+ vec3 col = acc / max(wsum, 1e-6);
1119
+
1120
+ // Scanlines + flicker
1121
+ float scan = 1.0 - scanline_amount * (0.5 + 0.5 * sin((uv.y + time*1.7)*3.14159*480.0));
1122
+ float flick = 1.0 + flicker_amount * (sin(time*60.0 + uv.x*10.0) * 0.5 + 0.5);
1123
+ col *= scan * flick;
1124
+
1125
+ // Vignette (radial)
1126
+ float r = distance(uv, center);
1127
+ float vig = 1.0 - smoothstep(0.7, 1.0, r * (1.0 + 0.5*vignette_strength));
1128
+ col *= mix(1.0, vig, vignette_strength);
1129
+
1130
+ // Grano
1131
+ float g = (rand(uv * (time*37.0 + 1.0)) - 0.5) * 2.0 * grain_amount;
1132
+ col += g;
1133
+
1134
+ // Fade de bordes por clamp/warp
1135
+ col *= edge;
1136
+
1137
+ f_color = vec4(col, 1.0);
1138
+ }
1139
+ '''
1140
+ )
1141
+
1142
+ def __init__(
1143
+ self,
1144
+ size,
1145
+ first_frame,
1146
+ context = None,
1147
+ aberr_strength = 1.5,
1148
+ barrel_k = 0.08,
1149
+ blur_radius = 0.006,
1150
+ blur_angle = 0.0, # (0 = horizontal, 1.57 ≈ vertical)
1151
+ blur_samples = 12,
1152
+ vignette_strength = 0.8,
1153
+ grain_amount = 0.02,
1154
+ flicker_amount = 0.05,
1155
+ scanline_amount = 0.05
1156
+ ):
1157
+ super().__init__(size, first_frame, context)
1158
+
1159
+ self.uniforms.set('aberr_strength', aberr_strength)
1160
+ self.uniforms.set('barrel_k', barrel_k)
1161
+ self.uniforms.set('blur_radius', blur_radius)
1162
+ self.uniforms.set('blur_angle', blur_angle)
1163
+ self.uniforms.set('blur_samples', blur_samples)
1164
+ self.uniforms.set('vignette_strength', vignette_strength)
1165
+ self.uniforms.set('grain_amount', grain_amount)
1166
+ self.uniforms.set('flicker_amount', flicker_amount)
1167
+ self.uniforms.set('scanline_amount', scanline_amount)
1168
+
1169
+ def _prepare_frame(
1170
+ self,
1171
+ t: float
1172
+ ) -> 'BreathingFrame':
1173
+ self.uniforms.set('time', t)
1174
+
1175
+ return self
1176
+
1177
+ class GlitchRgbFrame(FrameShaderBase):
1178
+ """
1179
+ Nice effect like a tv screen or something...
1180
+ """
1181
+
1182
+ @property
1183
+ def vertex_shader(
1184
+ self
1185
+ ) -> str:
1186
+ return (
1187
+ '''
1188
+ #version 330
1189
+
1190
+ // ----------- Vertex Shader -----------
1191
+ in vec2 in_vert;
1192
+ in vec2 in_texcoord;
1193
+
1194
+ out vec2 v_uv;
1195
+
1196
+ void main() {
1197
+ v_uv = in_texcoord;
1198
+ gl_Position = vec4(in_vert, 0.0, 1.0);
1199
+ }
1200
+ '''
1201
+ )
1202
+
1203
+ @property
1204
+ def fragment_shader(
1205
+ self
1206
+ ) -> str:
1207
+ return (
1208
+ '''
1209
+ #version 330
1210
+
1211
+ // ----------- Fragment Shader -----------
1212
+ uniform sampler2D tex;
1213
+ uniform float time;
1214
+
1215
+ // Intensidades del efecto
1216
+ uniform float amp; // amplitud de distorsión
1217
+ uniform float freq; // frecuencia de la onda
1218
+ uniform float glitchAmp; // fuerza del glitch
1219
+ uniform float glitchSpeed;
1220
+
1221
+ in vec2 v_uv;
1222
+ out vec4 f_color;
1223
+
1224
+ void main() {
1225
+ // Distorsión sinusoidal en Y
1226
+ float wave = sin(v_uv.x * freq + time * 2.0) * amp;
1227
+
1228
+ // Pequeño desplazamiento aleatorio (shake)
1229
+ float shakeX = (fract(sin(time * 12.9898) * 43758.5453) - 0.5) * 0.01;
1230
+ float shakeY = (fract(sin(time * 78.233) * 12345.6789) - 0.5) * 0.01;
1231
+
1232
+ // Coordenadas base con distorsión
1233
+ vec2 uv = vec2(v_uv.x + shakeX, v_uv.y + wave + shakeY);
1234
+
1235
+ // Glitch con separación RGB
1236
+ float glitch = sin(time * glitchSpeed) * glitchAmp;
1237
+ vec2 uv_r = uv + vec2(glitch, 0.0);
1238
+ vec2 uv_g = uv + vec2(-glitch * 0.5, glitch * 0.5);
1239
+ vec2 uv_b = uv + vec2(0.0, -glitch);
1240
+
1241
+ // Muestreo canales desplazados
1242
+ float r = texture(tex, uv_r).r;
1243
+ float g = texture(tex, uv_g).g;
1244
+ float b = texture(tex, uv_b).b;
1245
+
1246
+ f_color = vec4(r, g, b, 1.0);
1247
+ }
1248
+ '''
1249
+ )
1250
+
1251
+ def __init__(
1252
+ self,
1253
+ size,
1254
+ first_frame,
1255
+ context = None,
1256
+ amp = 0.02,
1257
+ freq = 25.0,
1258
+ glitchAmp = 0.02,
1259
+ glitchSpeed = 30.0
1260
+ ):
1261
+ super().__init__(size, first_frame, context)
1262
+
1263
+ self.uniforms.set('amp', amp)
1264
+ self.uniforms.set('freq', freq)
1265
+ self.uniforms.set('glitchAmp', glitchAmp)
1266
+ self.uniforms.set('glitchSpeed', glitchSpeed)
1267
+
1268
+ def _prepare_frame(
1269
+ self,
1270
+ t: float
1271
+ ) -> 'BreathingFrame':
1272
+ self.uniforms.set('time', t)
1273
+
1274
+ return self
1275
+