yta-editor-nodes-gpu 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,694 @@
1
+ """
2
+ TODO: This module doesn't use 't' but 'progress'
3
+ so it is not a child of 'processor.video', maybe
4
+ we should move it to be 'processor.transitions'
5
+ instead of 'processor.video.transitions'... (?)
6
+ """
7
+ from yta_video_opengl.abstract import _OpenGLBase
8
+ from yta_validation.parameter import ParameterValidator
9
+ from typing import Union
10
+
11
+ import numpy as np
12
+ import moderngl
13
+
14
+
15
+ class _TransitionProcessorGPU(_OpenGLBase):
16
+ """
17
+ *Abstract class*
18
+
19
+ *For internal use only*
20
+
21
+ A transition between the frames of 2 videos.
22
+
23
+ This transition is made with GPU (OpenGL).
24
+ """
25
+
26
+ @property
27
+ def fragment_shader(
28
+ self
29
+ ) -> str:
30
+ """
31
+ The code of the fragment shader.
32
+ """
33
+ return (
34
+ '''
35
+ #version 330
36
+ uniform sampler2D first_texture;
37
+ uniform sampler2D second_texture;
38
+ uniform float progress; // 0.0 → full A, 1.0 → full B
39
+ in vec2 v_uv;
40
+ out vec4 output_color;
41
+
42
+ void main() {
43
+ // Horizontal version (right to left)
44
+ vec2 uv_first = v_uv + vec2(-progress, 0.0);
45
+ vec2 uv_second = v_uv + vec2(1.0 - progress, 0.0);
46
+
47
+ vec4 color_first = texture(first_texture, uv_first);
48
+ vec4 color_second = texture(second_texture, uv_second);
49
+
50
+ if (uv_first.x < 0.0) {
51
+ output_color = color_second;
52
+ } else if (uv_second.x > 1.0) {
53
+ output_color = color_first;
54
+ } else {
55
+ // A and B frames are shown at the same time
56
+ output_color = mix(color_first, color_second, progress);
57
+ }
58
+ }
59
+ '''
60
+ )
61
+
62
+ def __init__(
63
+ self,
64
+ opengl_context: Union[moderngl.Context, None],
65
+ output_size: tuple[int, int],
66
+ **kwargs
67
+ ):
68
+ super().__init__(
69
+ opengl_context = opengl_context,
70
+ output_size = output_size,
71
+ **kwargs
72
+ )
73
+
74
+ def _prepare_input_textures(
75
+ self
76
+ ) -> '_OpenGLBase':
77
+ """
78
+ *For internal use only*
79
+
80
+ *This method should be overwritten*
81
+
82
+ Set the input texture variables and handlers
83
+ we need to manage this. This method has to be
84
+ called only once, just to set the slot for
85
+ the different textures we will use (and are
86
+ registered as textures in the shader).
87
+ """
88
+ self.textures.add('first_texture', 0)
89
+ self.textures.add('second_texture', 1)
90
+
91
+ return self
92
+
93
+ def process(
94
+ self,
95
+ first_input: Union[moderngl.Texture, np.ndarray],
96
+ second_input: Union[moderngl.Texture, np.ndarray],
97
+ progress: float,
98
+ output_size: Union[tuple[int, int], None] = None,
99
+ **kwargs
100
+ ) -> moderngl.Texture:
101
+ """
102
+ Validate the parameters, set the textures map, process
103
+ it and return the result according to the `progress`
104
+ provided.
105
+
106
+ You can provide any additional parameter
107
+ in the **kwargs, but be careful because
108
+ this could overwrite other uniforms that
109
+ were previously set.
110
+
111
+ We use and return textures to maintain
112
+ the process in GPU and optimize it.
113
+ """
114
+ ParameterValidator.validate_mandatory_instance_of('first_input', first_input, [moderngl.Texture, np.ndarray])
115
+ ParameterValidator.validate_mandatory_instance_of('second_input', second_input, [moderngl.Texture, np.ndarray])
116
+ ParameterValidator.validate_mandatory_positive_float('progress', progress, do_include_zero = True)
117
+
118
+ textures_map = {
119
+ 'first_texture': first_input,
120
+ 'second_texture': second_input
121
+ }
122
+
123
+ return self._process_common(
124
+ textures_map = textures_map,
125
+ output_size = output_size,
126
+ progress = progress,
127
+ **kwargs
128
+ )
129
+
130
+ # Specific implementations here below:
131
+ class SlideTransitionProcessorGPU(_TransitionProcessorGPU):
132
+ """
133
+ A transition between the frames of 2 videos, sliding
134
+ from right to left.
135
+
136
+ This transition is made with GPU (OpenGL).
137
+ """
138
+
139
+ # TODO: I know it is the same as in the base class
140
+ # but I want it like that
141
+ @property
142
+ def fragment_shader(
143
+ self
144
+ ) -> str:
145
+ """
146
+ The code of the fragment shader.
147
+ """
148
+ return (
149
+ '''
150
+ #version 330
151
+ uniform sampler2D first_texture;
152
+ uniform sampler2D second_texture;
153
+ uniform float progress; // 0.0 → full A, 1.0 → full B
154
+ in vec2 v_uv;
155
+ out vec4 output_color;
156
+
157
+ void main() {
158
+ // Horizontal version (slide to right)
159
+ //vec2 uv_first = v_uv + vec2(-progress, 0.0);
160
+ //vec2 uv_second = v_uv + vec2(1.0 - progress, 0.0);
161
+
162
+ // Horizontal version (slide to left)
163
+ vec2 uv_first = v_uv + vec2(progress, 0.0);
164
+ vec2 uv_second = v_uv + vec2(-1.0 + progress, 0.0);
165
+
166
+ vec4 color_first = texture(first_texture, uv_first);
167
+ vec4 color_second = texture(second_texture, uv_second);
168
+
169
+ // Horizontal version (slide to right)
170
+ //if (uv_first.x < 0.0) {
171
+ // output_color = color_second;
172
+ //} else if (uv_second.x > 1.0) {
173
+ // output_color = color_first;
174
+ //} else {
175
+ // // A and B frames are shown at the same time
176
+ // output_color = mix(color_first, color_second, progress);
177
+ //}
178
+
179
+ // Horizontal version (slide t o left)
180
+ if (uv_first.x > 1.0) {
181
+ output_color = color_second;
182
+ } else if (uv_second.x < 0.0) {
183
+ output_color = color_first;
184
+ } else {
185
+ output_color = mix(color_first, color_second, progress);
186
+ }
187
+ }
188
+ '''
189
+ )
190
+
191
+ class CrossfadeTransitionProcessorGPU(_TransitionProcessorGPU):
192
+ """
193
+ A transition between the frames of 2 videos,
194
+ transforming the first one into the second one.
195
+
196
+ This transition is made with GPU (OpenGL).
197
+ """
198
+
199
+ @property
200
+ def fragment_shader(
201
+ self
202
+ ) -> str:
203
+ return (
204
+ """
205
+ #version 330
206
+ uniform sampler2D first_texture;
207
+ uniform sampler2D second_texture;
208
+ uniform float progress; // 0 = full A, 1 = full B
209
+ in vec2 v_uv;
210
+ out vec4 output_color;
211
+ void main() {
212
+ vec4 color_first = texture(first_texture, v_uv);
213
+ vec4 color_second = texture(second_texture, v_uv);
214
+ output_color = mix(color_first, color_second, progress);
215
+ }
216
+ """
217
+ )
218
+
219
+ class DistortedCrossfadeTransitionProcessorGPU(_TransitionProcessorGPU):
220
+ """
221
+ A transition between the frames of 2 videos,
222
+ transforming the first one into the second one
223
+ with a distortion in between.
224
+
225
+ This transition is made with GPU (OpenGL).
226
+ """
227
+
228
+ @property
229
+ def fragment_shader(
230
+ self
231
+ ) -> str:
232
+ return (
233
+ """
234
+ #version 330
235
+ uniform sampler2D first_texture;
236
+ uniform sampler2D second_texture;
237
+ uniform float progress; // 0.0 -> A, 1.0 -> B
238
+ uniform float intensity; // Distortion control
239
+ in vec2 v_uv;
240
+ out vec4 output_color;
241
+
242
+ const int passes = 6;
243
+
244
+ void main() {
245
+ vec4 c1 = vec4(0.0);
246
+ vec4 c2 = vec4(0.0);
247
+
248
+ float disp = intensity * (0.5 - distance(0.5, progress));
249
+ for (int xi=0; xi<passes; xi++) {
250
+ float x = float(xi) / float(passes) - 0.5;
251
+ for (int yi=0; yi<passes; yi++) {
252
+ float y = float(yi) / float(passes) - 0.5;
253
+ vec2 v = vec2(x, y);
254
+ float d = disp;
255
+ c1 += texture(first_texture, v_uv + d * v);
256
+ c2 += texture(second_texture, v_uv + d * v);
257
+ }
258
+ }
259
+ c1 /= float(passes * passes);
260
+ c2 /= float(passes * passes);
261
+ output_color = mix(c1, c2, progress);
262
+ }
263
+ """
264
+ )
265
+
266
+ def __init__(
267
+ self,
268
+ opengl_context: Union[moderngl.Context, None],
269
+ # TODO: Review this
270
+ output_size: tuple[int, int] = (1920, 1080),
271
+ intensity: float = 1.0,
272
+ **kwargs
273
+ ):
274
+ super().__init__(
275
+ opengl_context = opengl_context,
276
+ output_size = output_size,
277
+ intensity = intensity,
278
+ **kwargs
279
+ )
280
+
281
+ class AlphaPediaMaskTransitionProcessorGPU(_TransitionProcessorGPU):
282
+ """
283
+ A transition made by using a custom mask to
284
+ join the 2 videos. This mask is specifically
285
+ obtained from the AlphaPediaYT channel in which
286
+ we upload specific masking videos.
287
+
288
+ Both videos will be placed occupying the whole
289
+ scene, just overlapping by using the transition
290
+ video mask, but not moving the frame through
291
+ the screen like other classes do (like the
292
+ FallingBars).
293
+ """
294
+
295
+ # TODO: I think I don't need a 'progress' but just
296
+ # mix both frames as much as the alpha (or white
297
+ # presence) tells
298
+ @property
299
+ def fragment_shader(
300
+ self
301
+ ) -> str:
302
+ return (
303
+ """
304
+ #version 330
305
+
306
+ uniform sampler2D first_texture;
307
+ uniform sampler2D second_texture;
308
+ uniform sampler2D mask_texture;
309
+
310
+ uniform float progress; // 0.0 → full A, 1.0 → full B
311
+ uniform bool use_alpha_channel; // True to use the alpha channel
312
+ //uniform float contrast; // Optional contrast to magnify the result
313
+
314
+ in vec2 v_uv;
315
+ out vec4 output_color;
316
+
317
+ void main() {
318
+ vec4 first_color = texture(first_texture, v_uv);
319
+ vec4 second_color = texture(second_texture, v_uv);
320
+ vec4 mask_color = texture(mask_texture, v_uv);
321
+
322
+ // Mask alpha or red?
323
+ float mask_value = use_alpha_channel ? mask_color.a : mask_color.r;
324
+
325
+ // Optional contrast
326
+ //mask_value = clamp((mask_value - 0.5) * contrast + 0.5, 0.0, 1.0);
327
+ mask_value = clamp((mask_value - 0.5) + 0.5, 0.0, 1.0);
328
+
329
+ float t = smoothstep(0.0, 1.0, mask_value + progress - 0.5);
330
+
331
+ output_color = mix(first_color, second_color, t);
332
+ }
333
+ """
334
+ )
335
+
336
+ def _prepare_input_textures(
337
+ self
338
+ ) -> None:
339
+ """
340
+ *For internal use only*
341
+
342
+ Set the input texture variables and handlers
343
+ we need to manage this.
344
+ """
345
+ self.textures.add('first_texture', 0)
346
+ self.textures.add('second_texture', 1)
347
+ self.textures.add('mask_texture', 2)
348
+
349
+ def process(
350
+ self,
351
+ input_a: Union[moderngl.Texture, 'np.ndarray'],
352
+ input_b: Union[moderngl.Texture, 'np.ndarray'],
353
+ input_mask: Union[moderngl.Texture, 'np.ndarray'],
354
+ progress: float,
355
+ output_size: Union[tuple[int, int], None] = None,
356
+ **kwargs
357
+ ) -> moderngl.Texture:
358
+ """
359
+ Apply the shader to the 'input', that
360
+ must be a frame or a texture, and return
361
+ the new resulting texture.
362
+
363
+ You can provide any additional parameter
364
+ in the **kwargs, but be careful because
365
+ this could overwrite other uniforms that
366
+ were previously set.
367
+
368
+ We use and return textures to maintain
369
+ the process in GPU and optimize it.
370
+ """
371
+ ParameterValidator.validate_mandatory_instance_of('input_a', input_a, [moderngl.Texture, np.ndarray])
372
+ ParameterValidator.validate_mandatory_instance_of('input_b', input_b, [moderngl.Texture, np.ndarray])
373
+ ParameterValidator.validate_mandatory_instance_of('input_mask', input_mask, [moderngl.Texture, np.ndarray])
374
+ ParameterValidator.validate_mandatory_positive_float('progress', progress, do_include_zero = True)
375
+
376
+ textures_map = {
377
+ 'first_texture': input_a,
378
+ 'second_texture': input_b,
379
+ 'mask_texture': input_mask
380
+ }
381
+
382
+ # TODO: There is an 'use_alpha_channel' uniform to use
383
+ # the alpha instead of the red color of the frame value,
384
+ # but the red is working for our AlphaPedia videos, so...
385
+
386
+ kwargs = {
387
+ **kwargs,
388
+ 'progress': progress
389
+ }
390
+
391
+ return self._process_common(
392
+ textures_map = textures_map,
393
+ output_size = output_size,
394
+ **kwargs
395
+ )
396
+
397
+ class CircleOpeningTransitionProcessorGPU(_TransitionProcessorGPU):
398
+ """
399
+ A transition between the frames of 2 videos in
400
+ which the frames are mixed by generating a circle
401
+ that grows from the middle to end fitting the
402
+ whole screen.
403
+
404
+ This transition is made with GPU (OpenGL).
405
+ """
406
+
407
+ @property
408
+ def fragment_shader(
409
+ self
410
+ ) -> str:
411
+ return (
412
+ """
413
+ #version 330
414
+ #define UNIQUE_ID_{id(self)}
415
+ uniform sampler2D first_texture;
416
+ uniform sampler2D second_texture;
417
+ uniform float progress; // 0.0 → full A, 1.0 → full B
418
+ uniform float border_smoothness; // 0.02 is a good value
419
+
420
+ in vec2 v_uv;
421
+ out vec4 output_color;
422
+
423
+ void main() {
424
+ // Obtain the size automatically from the texture
425
+ vec2 output_size = vec2(textureSize(first_texture, 0));
426
+
427
+ vec2 pos = v_uv * output_size;
428
+ vec2 center = output_size * 0.5;
429
+
430
+ // Distance from center
431
+ float dist = distance(pos, center);
432
+
433
+ // Radius of current circle
434
+ float max_radius = length(center);
435
+ float radius = progress * max_radius;
436
+
437
+ vec4 first_color = texture(first_texture, v_uv);
438
+ vec4 second_color = texture(second_texture, v_uv);
439
+
440
+ // With smooth circle
441
+ // TODO: Make this customizable
442
+ float mask = 1.0 - smoothstep(radius - border_smoothness * max_radius, radius + border_smoothness * max_radius, dist);
443
+ output_color = mix(first_color, second_color, mask);
444
+ }
445
+ """
446
+ )
447
+
448
+ def __init__(
449
+ self,
450
+ opengl_context: Union[moderngl.Context, None],
451
+ # TODO: Review this
452
+ output_size: tuple[int, int] = (1920, 1080),
453
+ border_smoothness: float = 0.02,
454
+ **kwargs
455
+ ):
456
+ super().__init__(
457
+ opengl_context = opengl_context,
458
+ # TODO: Maybe 'output_size' has to be the texture size
459
+ # by default
460
+ output_size = output_size,
461
+ border_smoothness = border_smoothness,
462
+ **kwargs
463
+ )
464
+
465
+ class CircleClosingTransitionProcessorGPU(_TransitionProcessorGPU):
466
+ """
467
+ A transition between the frames of 2 videos in
468
+ which the frames are mixed by generating a circle
469
+ that grows from the middle to end fitting the
470
+ whole screen.
471
+
472
+ This transition is made with GPU (OpenGL).
473
+ """
474
+
475
+ @property
476
+ def fragment_shader(
477
+ self
478
+ ) -> str:
479
+ return (
480
+ """
481
+ #version 330
482
+
483
+ uniform sampler2D first_texture;
484
+ uniform sampler2D second_texture;
485
+ uniform float progress; // 0.0 → full A, 1.0 → full B
486
+ uniform float border_smoothness; // 0.02 is a good value
487
+
488
+ in vec2 v_uv;
489
+ out vec4 output_color;
490
+
491
+ void main() {
492
+ // Obtain the size automatically from the texture
493
+ vec2 output_size = vec2(textureSize(first_texture, 0));
494
+
495
+ vec2 pos = v_uv * output_size;
496
+ vec2 center = output_size * 0.5;
497
+
498
+ // Distance from center
499
+ float dist = distance(pos, center);
500
+
501
+ // Radius of current circle
502
+ float max_radius = length(center);
503
+ float radius = (1.0 - progress) * max_radius;
504
+
505
+ vec4 first_color = texture(first_texture, v_uv);
506
+ vec4 second_color = texture(second_texture, v_uv);
507
+
508
+ // With smooth circle
509
+ // TODO: Make this customizable
510
+ float mask = smoothstep(radius - border_smoothness * max_radius, radius + border_smoothness * max_radius, dist);
511
+ output_color = mix(first_color, second_color, mask);
512
+ }
513
+ """
514
+ )
515
+
516
+ def __init__(
517
+ self,
518
+ opengl_context: Union[moderngl.Context, None],
519
+ # TODO: Review this
520
+ output_size: tuple[int, int] = (1920, 1080),
521
+ border_smoothness: float = 0.02,
522
+ **kwargs
523
+ ):
524
+ super().__init__(
525
+ opengl_context = opengl_context,
526
+ # TODO: Maybe 'output_size' has to be the texture size
527
+ # by default
528
+ output_size = output_size,
529
+ border_smoothness = border_smoothness,
530
+ **kwargs
531
+ )
532
+
533
+ # TODO: This effect is not working according to
534
+ # the progress, you cannot use normal timing
535
+ class BarsFallingTransitionProcessorGPU(_TransitionProcessorGPU):
536
+ """
537
+ A transition between the frames of 2 videos in which
538
+ a set of bars fall with the first video to let the
539
+ second one be seen.
540
+
541
+ Extracted from here:
542
+ - https://gl-transitions.com/editor/DoomScreenTransition
543
+
544
+ This transition is made with GPU (OpenGL).
545
+ """
546
+
547
+ @property
548
+ def fragment_shader(
549
+ self
550
+ ) -> str:
551
+ return (
552
+ """
553
+ #version 330
554
+
555
+ uniform sampler2D first_texture;
556
+ uniform sampler2D second_texture;
557
+ uniform float progress; // 0.0 → start, 1.0 → end
558
+
559
+ uniform int number_of_bars;
560
+ uniform float amplitude; // Speed
561
+ uniform float noise; // Extra noise [0.0, 1.0]
562
+ uniform float frequency; // Wave frequency
563
+ uniform float drip_scale; // Falling from center
564
+
565
+ in vec2 v_uv;
566
+ out vec4 output_color;
567
+
568
+ // pseudo-random from integer
569
+ float rand(int num) {
570
+ return fract(mod(float(num) * 67123.313, 12.0) * sin(float(num) * 10.3) * cos(float(num)));
571
+ }
572
+
573
+ // Wave for vertical distortion
574
+ float wave(int num) {
575
+ float fn = float(num) * frequency * 0.1 * float(number_of_bars);
576
+ return cos(fn * 0.5) * cos(fn * 0.13) * sin((fn + 10.0) * 0.3) / 2.0 + 0.5;
577
+ }
578
+
579
+ // Vertical curve to borders
580
+ float drip(int num) {
581
+ return sin(float(num) / float(number_of_bars - 1) * 3.141592) * drip_scale;
582
+ }
583
+
584
+ // Displacement for a bar
585
+ float pos(int num) {
586
+ float w = wave(num);
587
+ float r = rand(num);
588
+ float base = (noise == 0.0) ? w : mix(w, r, noise);
589
+ return base + ((drip_scale == 0.0) ? 0.0 : drip(num));
590
+ }
591
+
592
+ void main() {
593
+ int bar = int(v_uv.x * float(number_of_bars));
594
+
595
+ float scale = 1.0 + pos(bar) * amplitude;
596
+ float phase = progress * scale;
597
+ float pos_y = v_uv.y;
598
+
599
+ vec2 p;
600
+ vec4 color;
601
+
602
+ if (phase + pos_y < 1.0) {
603
+ // Frame A is visible
604
+ p = vec2(v_uv.x, v_uv.y + mix(0.0, 1.0, phase));
605
+ color = texture(first_texture, p);
606
+ } else {
607
+ // Frame B is visible
608
+ color = texture(second_texture, v_uv);
609
+ }
610
+
611
+ output_color = color;
612
+ }
613
+ """
614
+ )
615
+
616
+ def __init__(
617
+ self,
618
+ opengl_context: Union[moderngl.Context, None],
619
+ # TODO: Review this
620
+ output_size: tuple[int, int] = (1920, 1080),
621
+ number_of_bars: int = 30,
622
+ amplitude: float = 2.0,
623
+ noise: float = 0.1, # [0.0, 1.0]
624
+ frequency: float = 0.5,
625
+ drip_scale: float = 0.5,
626
+ **kwargs
627
+ ):
628
+ super().__init__(
629
+ opengl_context = opengl_context,
630
+ output_size = output_size,
631
+ number_of_bars = number_of_bars,
632
+ amplitude = amplitude,
633
+ noise = noise,
634
+ frequency = frequency,
635
+ drip_scale = drip_scale,
636
+ **kwargs
637
+ )
638
+
639
+
640
+ """
641
+ Note for the developer:
642
+
643
+ Here below you have a shader that allows you
644
+ to create more slide transitions (vertical,
645
+ diagonal) but have to be refactored because
646
+ the mixing part is not working properly
647
+ according to the position. The code was made
648
+ for an horizontal slide but has to be adapted
649
+ to the other movements.
650
+
651
+ Code here below:
652
+
653
+ #version 330
654
+
655
+ // FRAGMENT SHADER — Slide horizontal
656
+ uniform sampler2D texA;
657
+ uniform sampler2D texB;
658
+ uniform float progress; // 0.0 → full A, 1.0 → full B
659
+
660
+ in vec2 frag_uv;
661
+ out vec4 frag_color;
662
+
663
+ void main() {
664
+ // Horizontal version (right to left)
665
+ vec2 uvA = frag_uv + vec2(-progress, 0.0);
666
+ vec2 uvB = frag_uv + vec2(1.0 - progress, 0.0);
667
+
668
+ // Horizontal version (left to right)
669
+ //vec2 uvA = frag_uv + vec2(progress, 0.0);
670
+ //vec2 uvB = frag_uv + vec2(-1.0 + progress, 0.0);
671
+
672
+ // Vertical version (top to bottom)
673
+ // TODO: We need to adjust the color mixin
674
+ // to make it fit the type of transition
675
+ //vec2 uvA = frag_uv + vec2(0.0, -progress);
676
+ //vec2 uvB = frag_uv + vec2(0.0, 1.0 - progress);
677
+
678
+ // Diagonal version (top left to bottom right)
679
+ //vec2 uvA = frag_uv + vec2(-progress, -progress);
680
+ //vec2 uvB = frag_uv + vec2(1.0 - progress, 1.0 - progress);
681
+
682
+ vec4 colorA = texture(texA, uvA);
683
+ vec4 colorB = texture(texB, uvB);
684
+
685
+ if (uvA.x < 0.0) {
686
+ frag_color = colorB;
687
+ } else if (uvB.x > 1.0) {
688
+ frag_color = colorA;
689
+ } else {
690
+ // A and B frames are shown at the same time
691
+ frag_color = mix(colorA, colorB, progress);
692
+ }
693
+ }
694
+ """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: yta-editor-nodes-gpu
3
- Version: 0.0.1
3
+ Version: 0.0.3
4
4
  Summary: Youtube Autonomous Main Editor Nodes GPU module
5
5
  License-File: LICENSE
6
6
  Author: danialcala94
@@ -8,6 +8,10 @@ Author-email: danielalcalavalera@gmail.com
8
8
  Requires-Python: ==3.9
9
9
  Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Programming Language :: Python :: 3.9
11
+ Requires-Dist: moderngl (>=0.0.1,<9.0.0)
12
+ Requires-Dist: yta_programming (>=0.0.1,<1.0.0)
13
+ Requires-Dist: yta_validation (>=0.0.1,<1.0.0)
14
+ Requires-Dist: yta_video_opengl (>=0.0.1,<1.0.0)
11
15
  Description-Content-Type: text/markdown
12
16
 
13
17
  # Youtube Autonomous Main Editor Nodes GPU module