yta-editor-nodes-gpu 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,723 @@
1
+ """
2
+ Experimental module. The effects here
3
+ have not been tested completely or the
4
+ result is not as good as it should be
5
+ to be considered a definitive effect.
6
+
7
+ TODO: This implementation is old and
8
+ now we are using other classes, but
9
+ the shader and the variables are the
10
+ same and that is why we keep the code
11
+ here, to implement it later... (GPU)
12
+ """
13
+ from yta_video_opengl.abstract import _OpenGLBase
14
+ from typing import Union
15
+
16
+ import numpy as np
17
+ import moderngl
18
+ import math
19
+
20
+
21
+ class BreathingFrame(_OpenGLBase):
22
+ """
23
+ The frame but as if it was breathing.
24
+ """
25
+
26
+ @property
27
+ def fragment_shader(
28
+ self
29
+ ) -> str:
30
+ return (
31
+ '''
32
+ #version 330
33
+ uniform sampler2D tex;
34
+ uniform float time;
35
+ in vec2 v_text;
36
+ out vec4 f_color;
37
+ // Use uniforms to be customizable
38
+
39
+ void main() {
40
+ // Dynamic zoom scaled with t
41
+ float scale = 1.0 + 0.05 * sin(time * 2.0); // 5% de zoom
42
+ vec2 center = vec2(0.5, 0.5);
43
+
44
+ // Recalculate coords according to center
45
+ vec2 uv = (v_text - center) / scale + center;
46
+
47
+ // Clamp to avoid artifacts
48
+ uv = clamp(uv, 0.0, 1.0);
49
+
50
+ f_color = texture(tex, uv);
51
+ }
52
+ '''
53
+ )
54
+
55
+ def process(
56
+ self,
57
+ input: Union[moderngl.Texture, 'np.ndarray'],
58
+ t: float = 0.0,
59
+ ) -> moderngl.Texture:
60
+ """
61
+ Apply the shader to the 'input', that
62
+ must be a frame or a texture, and return
63
+ the new resulting texture.
64
+
65
+ We use and return textures to maintain
66
+ the process in GPU and optimize it.
67
+ """
68
+ return super().process(
69
+ input = input,
70
+ time = t
71
+ )
72
+
73
+ class HandheldFrame(_OpenGLBase):
74
+ """
75
+ The frame but as if it was being recorded by
76
+ someone holding a camera, that is not 100%
77
+ stable.
78
+ """
79
+
80
+ @property
81
+ def vertex_shader(
82
+ self
83
+ ) -> str:
84
+ return (
85
+ '''
86
+ #version 330
87
+ in vec2 in_vert;
88
+ in vec2 in_texcoord;
89
+ out vec2 v_text;
90
+
91
+ uniform mat3 transform;
92
+
93
+ void main() {
94
+ vec3 pos = vec3(in_vert, 1.0);
95
+ pos = transform * pos;
96
+ gl_Position = vec4(pos.xy, 0.0, 1.0);
97
+ v_text = in_texcoord;
98
+ }
99
+ '''
100
+ )
101
+
102
+ @property
103
+ def fragment_shader(
104
+ self
105
+ ) -> str:
106
+ return (
107
+ '''
108
+ #version 330
109
+ uniform sampler2D tex;
110
+ in vec2 v_text;
111
+ out vec4 f_color;
112
+
113
+ void main() {
114
+ f_color = texture(tex, v_text);
115
+ }
116
+ '''
117
+ )
118
+
119
+ def _handheld_matrix(
120
+ self,
121
+ t
122
+ ):
123
+ # Rotación más notoria
124
+ angle = self._smooth_noise(t, freq=0.8, scale=0.05) # antes 0.02
125
+
126
+ # Traslaciones más grandes
127
+ tx = self._smooth_noise(t, freq=1.1, scale=0.04) # antes 0.015
128
+ ty = self._smooth_noise(t, freq=1.4, scale=0.04)
129
+
130
+ # Zoom más agresivo
131
+ zoom = 1.0 + self._smooth_noise(t, freq=0.5, scale=0.06) # antes 0.02
132
+
133
+ cos_a, sin_a = math.cos(angle), math.sin(angle)
134
+
135
+ return np.array([
136
+ [ cos_a * zoom, -sin_a * zoom, tx],
137
+ [ sin_a * zoom, cos_a * zoom, ty],
138
+ [ 0.0, 0.0, 1.0]
139
+ ], dtype="f4")
140
+
141
+ def _smooth_noise(
142
+ self,
143
+ t,
144
+ freq = 1.5,
145
+ scale = 1.0
146
+ ):
147
+ """
148
+ Small noise by using sin and cos mixed.
149
+ """
150
+ return (
151
+ math.sin(t * freq) +
152
+ 0.5 * math.cos(t * freq * 0.5 + 1.7) +
153
+ 0.25 * math.sin(t * freq * 0.25 + 2.5)
154
+ ) * scale
155
+
156
+ def process(
157
+ self,
158
+ input: Union[moderngl.Texture, np.ndarray],
159
+ t: float = 0.0,
160
+ ) -> moderngl.Texture:
161
+ """
162
+ Apply the shader to the 'input', that
163
+ must be a frame or a texture, and return
164
+ the new resulting texture.
165
+
166
+ We use and return textures to maintain
167
+ the process in GPU and optimize it.
168
+ """
169
+ return super().process(
170
+ input = input,
171
+ # TODO: It was 'set_mat' previously
172
+ # self._handheld_matrix(t).tobytes()
173
+ transform = self._handheld_matrix(t)
174
+ )
175
+
176
+ class OrbitingFrame(_OpenGLBase):
177
+ """
178
+ The frame but orbiting around the camera.
179
+ """
180
+
181
+ @property
182
+ def vertex_shader(
183
+ self
184
+ ) -> str:
185
+ return (
186
+ '''
187
+ #version 330
188
+
189
+ in vec2 in_vert;
190
+ in vec2 in_texcoord;
191
+
192
+ out vec2 v_uv;
193
+
194
+ uniform mat4 mvp; // Model-View-Projection matrix
195
+
196
+ void main() {
197
+ v_uv = in_texcoord;
198
+ // El quad está en XY, lo pasamos a XYZ con z=0
199
+ vec4 pos = vec4(in_vert, 0.0, 1.0);
200
+ gl_Position = mvp * pos;
201
+ }
202
+ '''
203
+ )
204
+
205
+ @property
206
+ def fragment_shader(
207
+ self
208
+ ) -> str:
209
+ return (
210
+ '''
211
+ #version 330
212
+
213
+ uniform sampler2D tex;
214
+ in vec2 v_uv;
215
+ out vec4 f_color;
216
+
217
+ void main() {
218
+ f_color = texture(tex, v_uv);
219
+ }
220
+ '''
221
+ )
222
+
223
+ def _perspective(
224
+ self,
225
+ fov_y_rad,
226
+ aspect,
227
+ near,
228
+ far
229
+ ):
230
+ f = 1.0 / np.tan(fov_y_rad / 2.0)
231
+ m = np.zeros((4,4), dtype='f4')
232
+ m[0,0] = f / aspect
233
+ m[1,1] = f
234
+ m[2,2] = (far + near) / (near - far)
235
+ m[2,3] = (2 * far * near) / (near - far)
236
+ m[3,2] = -1.0
237
+
238
+ return m
239
+
240
+ def _look_at(
241
+ self,
242
+ eye,
243
+ target,
244
+ up = (0, 1, 0)
245
+ ):
246
+ eye = np.array(eye, dtype='f4')
247
+ target = np.array(target, dtype='f4')
248
+ up = np.array(up, dtype='f4')
249
+
250
+ f = target - eye
251
+ f = f / np.linalg.norm(f)
252
+ s = np.cross(f, up)
253
+ s = s / np.linalg.norm(s)
254
+ u = np.cross(s, f)
255
+
256
+ m = np.eye(4, dtype='f4')
257
+ m[0,0:3] = s
258
+ m[1,0:3] = u
259
+ m[2,0:3] = -f
260
+ m[0,3] = -np.dot(s, eye)
261
+ m[1,3] = -np.dot(u, eye)
262
+ m[2,3] = np.dot(f, eye)
263
+
264
+ return m
265
+
266
+ def _translate(
267
+ self,
268
+ x,
269
+ y,
270
+ z
271
+ ):
272
+ m = np.eye(4, dtype='f4')
273
+ m[0,3] = x
274
+ m[1,3] = y
275
+ m[2,3] = z
276
+
277
+ return m
278
+
279
+ def _rotate_y(
280
+ self,
281
+ angle
282
+ ):
283
+ c, s = np.cos(angle), np.sin(angle)
284
+ m = np.eye(4, dtype='f4')
285
+ m[0,0], m[0,2] = c, s
286
+ m[2,0], m[2,2] = -s, c
287
+
288
+ return m
289
+
290
+ def _scale_uniform(
291
+ self,
292
+ k
293
+ ):
294
+ m = np.eye(4, dtype='f4')
295
+ m[0,0] = m[1,1] = m[2,2] = k
296
+
297
+ return m
298
+
299
+ def _carousel_mvp(
300
+ self,
301
+ t,
302
+ *,
303
+ aspect,
304
+ fov_deg = 60.0,
305
+ radius = 4.0,
306
+ center_z = -6.0,
307
+ speed = 1.0,
308
+ face_center_strength = 1.0,
309
+ extra_scale = 1.0
310
+ ):
311
+ """
312
+ t: tiempo en segundos
313
+ aspect: width/height del framebuffer
314
+ radius: radio en XZ
315
+ center_z: desplaza el carrusel entero hacia -Z para que esté frente a cámara
316
+ speed: velocidad angular
317
+ face_center_strength: 1.0 = panel mira al centro; 0.0 = no gira con la órbita
318
+ """
319
+
320
+ # Proyección y vista (cámara en el origen mirando hacia -Z)
321
+ proj = self._perspective(np.radians(fov_deg), aspect, 0.1, 100.0)
322
+ view = np.eye(4, dtype='f4') # o look_at((0,0,0), (0,0,-1))
323
+
324
+ # Ángulo de órbita (elige el offset para que "entre" por la izquierda)
325
+ theta = speed * t - np.pi * 0.5
326
+
327
+ # Órbita en XZ con el centro desplazado a center_z
328
+ # x = radius * np.cos(theta)
329
+ # z = radius * np.sin(theta) + center_z
330
+ x = radius * np.cos(theta)
331
+ z = (radius * 0.2) * np.sin(theta) + center_z
332
+
333
+ # Yaw para que el panel apunte al centro (0,0,center_z)
334
+ # El vector desde panel -> centro es (-x, 0, center_z - z)
335
+ yaw_to_center = np.arctan2(-x, (center_z - z)) # atan2(X, Z)
336
+ yaw = face_center_strength * yaw_to_center
337
+
338
+ model = self._translate(x, 0.0, z) @ self._rotate_y(yaw) @ self._scale_uniform(extra_scale)
339
+
340
+ # ¡IMPORTANTE! OpenGL espera column-major: transponemos al escribir
341
+ mvp = proj @ view @ model
342
+
343
+ return mvp
344
+
345
+ def process(
346
+ self,
347
+ input: Union[moderngl.Texture, np.ndarray],
348
+ t: float = 0.0,
349
+ ) -> moderngl.Texture:
350
+ """
351
+ Apply the shader to the 'input', that
352
+ must be a frame or a texture, and return
353
+ the new resulting texture.
354
+
355
+ We use and return textures to maintain
356
+ the process in GPU and optimize it.
357
+ """
358
+ aspect = self.size[0] / self.size[1]
359
+ mvp = self._carousel_mvp(t, aspect=aspect, radius=4.0, center_z=-4.0, speed=1.2, face_center_strength=1.0, extra_scale = 1.0)
360
+
361
+ return super().process(
362
+ input = input,
363
+ # TODO: It was 'set_mat' previously
364
+ # mvp.T.tobytes()
365
+ mvp = mvp.T
366
+ )
367
+
368
+ class RotatingInCenterFrame(_OpenGLBase):
369
+ """
370
+ The frame but orbiting around the camera.
371
+ """
372
+
373
+ @property
374
+ def vertex_shader(
375
+ self
376
+ ) -> str:
377
+ return (
378
+ '''
379
+ #version 330
380
+
381
+ in vec2 in_vert;
382
+ in vec2 in_texcoord;
383
+ out vec2 v_uv;
384
+
385
+ uniform float time;
386
+ uniform float speed;
387
+
388
+ void main() {
389
+ v_uv = in_texcoord;
390
+
391
+ // Rotación alrededor del eje Y
392
+ float angle = time * speed; // puedes usar time directamente, o time * speed
393
+ float cosA = cos(angle);
394
+ float sinA = sin(angle);
395
+
396
+ // Convertimos el quad a 3D (x, y, z)
397
+ vec3 pos = vec3(in_vert.xy, 0.0);
398
+
399
+ // Rotación Y
400
+ mat3 rotY = mat3(
401
+ cosA, 0.0, sinA,
402
+ 0.0 , 1.0, 0.0,
403
+ -sinA, 0.0, cosA
404
+ );
405
+
406
+ pos = rotY * pos;
407
+
408
+ gl_Position = vec4(pos, 1.0);
409
+ }
410
+ '''
411
+ )
412
+
413
+ @property
414
+ def fragment_shader(
415
+ self
416
+ ) -> str:
417
+ return (
418
+ '''
419
+ #version 330
420
+
421
+ in vec2 v_uv;
422
+ out vec4 f_color;
423
+
424
+ uniform sampler2D tex;
425
+
426
+ void main() {
427
+ f_color = texture(tex, v_uv);
428
+ }
429
+ '''
430
+ )
431
+
432
+ def __init__(
433
+ self,
434
+ size,
435
+ opengl_context = None,
436
+ speed: float = 30
437
+ ):
438
+ super().__init__(
439
+ opengl_context = opengl_context,
440
+ size = size,
441
+ speed = speed,
442
+ )
443
+
444
+ def process(
445
+ self,
446
+ input: Union[moderngl.Texture, np.ndarray],
447
+ t: float = 0.0,
448
+ ) -> moderngl.Texture:
449
+ """
450
+ Apply the shader to the 'input', that
451
+ must be a frame or a texture, and return
452
+ the new resulting texture.
453
+
454
+ We use and return textures to maintain
455
+ the process in GPU and optimize it.
456
+ """
457
+ return super().process(
458
+ input = input,
459
+ time = t
460
+ )
461
+
462
+ class StrangeTvFrame(_OpenGLBase):
463
+ """
464
+ Nice effect like a tv screen or something...
465
+ """
466
+
467
+ @property
468
+ def fragment_shader(
469
+ self
470
+ ) -> str:
471
+ return (
472
+ '''
473
+ #version 330
474
+
475
+ uniform sampler2D tex;
476
+ uniform float time;
477
+
478
+ // ---- Parámetros principales (ajústalos en runtime) ----
479
+ uniform float aberr_strength; // 0..3 (fuerza del RGB split radial)
480
+ uniform float barrel_k; // -0.5..0.5 (distorsión de lente; positivo = barrel)
481
+ uniform float blur_radius; // 0..0.02 (radio de motion blur en UV)
482
+ uniform float blur_angle; // en radianes (dirección del arrastre)
483
+ uniform int blur_samples; // 4..24 (taps del blur)
484
+ uniform float vignette_strength; // 0..2
485
+ uniform float grain_amount; // 0..0.1
486
+ uniform float flicker_amount; // 0..0.2
487
+ uniform float scanline_amount; // 0..0.2
488
+
489
+ in vec2 v_uv;
490
+ out vec4 f_color;
491
+
492
+ // --- helpers ---
493
+ float rand(vec2 co){
494
+ return fract(sin(dot(co, vec2(12.9898,78.233))) * 43758.5453);
495
+ }
496
+
497
+ // Barrel distortion (simple, k>0 curva hacia fuera)
498
+ vec2 barrel(vec2 uv, float k){
499
+ // map to [-1,1]
500
+ vec2 p = uv * 2.0 - 1.0;
501
+ float r2 = dot(p, p);
502
+ p *= (1.0 + k * r2);
503
+ // back to [0,1]
504
+ return p * 0.5 + 0.5;
505
+ }
506
+
507
+ // Aberración cromática radial
508
+ vec3 sample_chromatic(sampler2D t, vec2 uv, vec2 center, float strength){
509
+ // Offset radial según distancia al centro
510
+ vec2 d = uv - center;
511
+ float r = length(d);
512
+ vec2 dir = (r > 1e-5) ? d / r : vec2(0.0);
513
+ // Cada canal se desplaza un poco distinto
514
+ float s = strength * r * 0.005; // escala fina
515
+ float sr = s * 1.0;
516
+ float sg = s * 0.5;
517
+ float sb = s * -0.5; // azul hacia dentro para contraste
518
+
519
+ float rC = texture(t, uv + dir * sr).r;
520
+ float gC = texture(t, uv + dir * sg).g;
521
+ float bC = texture(t, uv + dir * sb).b;
522
+ return vec3(rC, gC, bC);
523
+ }
524
+
525
+ void main(){
526
+ vec2 uv = v_uv;
527
+ vec2 center = vec2(0.5, 0.5);
528
+
529
+ // Lente (barrel/pincushion)
530
+ uv = barrel(uv, barrel_k);
531
+
532
+ // Early out si nos salimos mucho (fade de bordes)
533
+ vec2 uv_clamped = clamp(uv, 0.0, 1.0);
534
+ float edge = smoothstep(0.0, 0.02, 1.0 - max(max(-uv.x, uv.x-1.0), max(-uv.y, uv.y-1.0)));
535
+
536
+ // Dirección del motion blur
537
+ vec2 dir = vec2(cos(blur_angle), sin(blur_angle));
538
+ // Pequeña variación temporal para que “respire”
539
+ float jitter = (sin(time * 13.0) * 0.5 + 0.5) * 0.4 + 0.6;
540
+
541
+ // Acumulación de blur con pesos
542
+ vec3 acc = vec3(0.0);
543
+ float wsum = 0.0;
544
+
545
+ int N = max(1, blur_samples);
546
+ for(int i = 0; i < 64; ++i){ // hard cap de seguridad
547
+ if(i >= N) break;
548
+ // t de -1..1 distribuye muestras a ambos lados
549
+ float fi = float(i);
550
+ float t = (fi / float(N - 1)) * 2.0 - 1.0;
551
+
552
+ // curva de pesos (gauss approx)
553
+ float w = exp(-t*t * 2.5);
554
+ // offset base
555
+ vec2 ofs = dir * t * blur_radius * jitter;
556
+
557
+ // micro-jitter por muestra para romper banding
558
+ ofs += vec2(rand(uv + fi)*0.0005, rand(uv + fi + 3.14)*0.0005) * blur_radius;
559
+
560
+ // muestreo con aberración cromática
561
+ vec3 c = sample_chromatic(tex, uv + ofs, center, aberr_strength);
562
+
563
+ acc += c * w;
564
+ wsum += w;
565
+ }
566
+ vec3 col = acc / max(wsum, 1e-6);
567
+
568
+ // Scanlines + flicker
569
+ float scan = 1.0 - scanline_amount * (0.5 + 0.5 * sin((uv.y + time*1.7)*3.14159*480.0));
570
+ float flick = 1.0 + flicker_amount * (sin(time*60.0 + uv.x*10.0) * 0.5 + 0.5);
571
+ col *= scan * flick;
572
+
573
+ // Vignette (radial)
574
+ float r = distance(uv, center);
575
+ float vig = 1.0 - smoothstep(0.7, 1.0, r * (1.0 + 0.5*vignette_strength));
576
+ col *= mix(1.0, vig, vignette_strength);
577
+
578
+ // Grano
579
+ float g = (rand(uv * (time*37.0 + 1.0)) - 0.5) * 2.0 * grain_amount;
580
+ col += g;
581
+
582
+ // Fade de bordes por clamp/warp
583
+ col *= edge;
584
+
585
+ f_color = vec4(col, 1.0);
586
+ }
587
+ '''
588
+ )
589
+
590
+ def __init__(
591
+ self,
592
+ size,
593
+ opengl_context = None,
594
+ aberr_strength: float = 1.5,
595
+ barrel_k: float = 0.08,
596
+ blur_radius: float = 0.006,
597
+ blur_angle: float = 0.0, # (0 = horizontal, 1.57 ≈ vertical)
598
+ blur_samples: int = 12,
599
+ vignette_strength: float = 0.8,
600
+ grain_amount: float = 0.02,
601
+ flicker_amount: float = 0.05,
602
+ scanline_amount: float = 0.05
603
+ ):
604
+ super().__init__(
605
+ opengl_context = opengl_context,
606
+ size = size,
607
+ aberr_strength = aberr_strength,
608
+ barrel_k = barrel_k,
609
+ blur_radius = blur_radius,
610
+ blur_angle = blur_angle,
611
+ blur_samples = blur_samples,
612
+ vignette_strength = vignette_strength,
613
+ grain_amount = grain_amount,
614
+ flicker_amount = flicker_amount,
615
+ scanline_amount = scanline_amount
616
+ )
617
+
618
+ def process(
619
+ self,
620
+ input: Union[moderngl.Texture, np.ndarray],
621
+ t: float = 0.0,
622
+ ) -> moderngl.Texture:
623
+ """
624
+ Apply the shader to the 'input', that
625
+ must be a frame or a texture, and return
626
+ the new resulting texture.
627
+
628
+ We use and return textures to maintain
629
+ the process in GPU and optimize it.
630
+ """
631
+ return super().process(
632
+ input = input,
633
+ time = t
634
+ )
635
+
636
+ class GlitchRgbFrame(_OpenGLBase):
637
+ """
638
+ Nice effect like a tv screen or something...
639
+ """
640
+
641
+ @property
642
+ def fragment_shader(
643
+ self
644
+ ) -> str:
645
+ return (
646
+ '''
647
+ #version 330
648
+
649
+ // ----------- Fragment Shader -----------
650
+ uniform sampler2D tex;
651
+ uniform float time;
652
+
653
+ // Intensidades del efecto
654
+ uniform float amp; // amplitud de distorsión
655
+ uniform float freq; // frecuencia de la onda
656
+ uniform float glitchAmp; // fuerza del glitch
657
+ uniform float glitchSpeed;
658
+
659
+ in vec2 v_uv;
660
+ out vec4 f_color;
661
+
662
+ void main() {
663
+ // Distorsión sinusoidal en Y
664
+ float wave = sin(v_uv.x * freq + time * 2.0) * amp;
665
+
666
+ // Pequeño desplazamiento aleatorio (shake)
667
+ float shakeX = (fract(sin(time * 12.9898) * 43758.5453) - 0.5) * 0.01;
668
+ float shakeY = (fract(sin(time * 78.233) * 12345.6789) - 0.5) * 0.01;
669
+
670
+ // Coordenadas base con distorsión
671
+ vec2 uv = vec2(v_uv.x + shakeX, v_uv.y + wave + shakeY);
672
+
673
+ // Glitch con separación RGB
674
+ float glitch = sin(time * glitchSpeed) * glitchAmp;
675
+ vec2 uv_r = uv + vec2(glitch, 0.0);
676
+ vec2 uv_g = uv + vec2(-glitch * 0.5, glitch * 0.5);
677
+ vec2 uv_b = uv + vec2(0.0, -glitch);
678
+
679
+ // Muestreo canales desplazados
680
+ float r = texture(tex, uv_r).r;
681
+ float g = texture(tex, uv_g).g;
682
+ float b = texture(tex, uv_b).b;
683
+
684
+ f_color = vec4(r, g, b, 1.0);
685
+ }
686
+ '''
687
+ )
688
+
689
+ def __init__(
690
+ self,
691
+ size,
692
+ opengl_context = None,
693
+ amplitude: float = 0.02,
694
+ frequency: float = 25.0,
695
+ glitch_amplitude: float = 0.02,
696
+ glitch_speed: float = 30.0
697
+ ):
698
+ super().__init__(
699
+ opengl_context = opengl_context,
700
+ size = size,
701
+ amp = amplitude,
702
+ freq = frequency,
703
+ glitchAmp = glitch_amplitude,
704
+ glitchSpeed = glitch_speed
705
+ )
706
+
707
+ def process(
708
+ self,
709
+ input: Union[moderngl.Texture, np.ndarray],
710
+ t: float = 0.0,
711
+ ) -> moderngl.Texture:
712
+ """
713
+ Apply the shader to the 'input', that
714
+ must be a frame or a texture, and return
715
+ the new resulting texture.
716
+
717
+ We use and return textures to maintain
718
+ the process in GPU and optimize it.
719
+ """
720
+ return super().process(
721
+ input = input,
722
+ time = t
723
+ )